1
0
Fork 0

Optimize and cleanup

- fix really bad facelist generation.
- add a face area list in superpoly - each triangle area was calculated twice
- cleanup contruct_bucket - calls a function for each step.
  TODO : move groups of operations into their own files
         i.e. load, clip, elevation, shared edges, and output
This commit is contained in:
Peter Sadrozinski 2012-07-26 21:32:53 -04:00 committed by Christian Schmitt
parent 30573aa811
commit 7e55efbb03
7 changed files with 346 additions and 555 deletions

View file

@ -83,11 +83,21 @@ TGConstruct::TGConstruct():
// Destructor
TGConstruct::~TGConstruct() { }
TGConstruct::~TGConstruct() {
array.close();
// Load elevation data from an Array file, a regular grid of elevation
// data) and return list of fitted nodes.
bool TGConstruct::load_array() {
// land class polygons
polys_in.clear();
polys_clipped.clear();
// All Nodes
nodes.clear();
}
// STEP 1
// Load elevation data from an Array file (a regular grid of elevation data)
// and return list of fitted nodes.
void TGConstruct::LoadElevationArray( void ) {
string base = bucket.gen_base_path();
int i;
@ -104,7 +114,15 @@ bool TGConstruct::load_array() {
array.parse( bucket );
array.remove_voids( );
return true;
point_list corner_list = array.get_corner_list();
for (unsigned int i=0; i<corner_list.size(); i++) {
nodes.unique_add(corner_list[i]);
}
point_list fit_list = array.get_fitted_list();
for (unsigned int i=0; i<fit_list.size(); i++) {
nodes.unique_add(fit_list[i]);
}
}
// Add a polygon to the clipper.
@ -343,7 +361,7 @@ bool TGConstruct::load_osgb36_poly(const string& path) {
// load all 2d polygons from the specified load disk directories and
// clip against each other to resolve any overlaps
int TGConstruct::load_polys( ) {
int TGConstruct::LoadLandclassPolys( void ) {
int i;
string base = bucket.gen_base_path();
@ -574,7 +592,7 @@ int TGConstruct::load_landcover()
return count;
}
void TGConstruct::add_intermediate_nodes( ) {
void TGConstruct::FixTJunctions( void ) {
int before, after;
// traverse each poly, and add intermediate nodes
@ -587,7 +605,7 @@ void TGConstruct::add_intermediate_nodes( ) {
after = current.total_size();
if (before != after) {
SG_LOG( SG_CLIPPER, SG_INFO, "Fixed t-junctions in " << get_area_name( (AreaType)i ) << ":" << j+1 << " of " << (int)polys_clipped.superpolys[i].size() << " nodes increased from " << before << " to " << after );
SG_LOG( SG_CLIPPER, SG_INFO, "Fixed T-Junctions in " << get_area_name( (AreaType)i ) << ":" << j+1 << " of " << (int)polys_clipped.superpolys[i].size() << " nodes increased from " << before << " to " << after );
}
/* Save it back */
@ -624,7 +642,7 @@ double TGConstruct::distanceSphere( const Point3D p1, const Point3D p2 ) {
// fix the elevations of the geodetic nodes
// This should be done in the nodes class itself, except for the need for the triangle type
// hopefully, this will get better when we have the area lookup via superpoly...
void TGConstruct::fix_point_heights()
void TGConstruct::CalcElevations( void )
{
//TGPolygon tri_poly;
TGPolyNodes tri_nodes;
@ -650,26 +668,18 @@ void TGConstruct::fix_point_heights()
for (int j = 0; j < (int)polys_clipped.superpolys[i].size(); ++j ) {
SG_LOG( SG_CLIPPER, SG_INFO, "Flattening " << get_area_name( (AreaType)i ) << ":" << j+1 << " of " << (int)polys_clipped.superpolys[i].size() );
// tri_poly = polys_clipped.superpolys[i][j].get_tris();
tri_nodes = polys_clipped.superpolys[i][j].get_tri_idxs();
// for (int k=0; k< tri_poly.contours(); k++) {
for (int k=0; k< tri_nodes.contours(); k++) {
// if (tri_poly.contour_size(k) != 3) {
if (tri_nodes.contour_size(k) != 3) {
SG_LOG(SG_GENERAL, SG_ALERT, "triangle doesnt have 3 nodes" << tri_nodes.contour_size(k) );
exit(0);
}
// n1 = nodes.find( tri_poly.get_pt( k, 0 ) );
n1 = tri_nodes.get_pt( k, 0 );
e1 = nodes.get_node(n1).GetPosition().z();
// n2 = nodes.find( tri_poly.get_pt( k, 1 ) );
n2 = tri_nodes.get_pt( k, 1 );
e2 = nodes.get_node(n2).GetPosition().z();
// n3 = nodes.find( tri_poly.get_pt( k, 2 ) );
n3 = tri_nodes.get_pt( k, 2 );
e3 = nodes.get_node(n3).GetPosition().z();
@ -677,12 +687,6 @@ void TGConstruct::fix_point_heights()
if ( e2 < min ) { min = e2; }
if ( e3 < min ) { min = e3; }
SG_LOG(SG_GENERAL, SG_ALERT, "FLATTEN LAKE: original elevations are " <<
nodes.get_node(n1).GetPosition().z() << "(" << e1 << "), " <<
nodes.get_node(n2).GetPosition().z() << "(" << e2 << "), " <<
nodes.get_node(n3).GetPosition().z() << "(" << e3 << ")" <<
" new elevation is " << min );
nodes.SetElevation( n1, min );
nodes.SetElevation( n2, min );
nodes.SetElevation( n3, min );
@ -694,12 +698,9 @@ void TGConstruct::fix_point_heights()
for (int j = 0; j < (int)polys_clipped.superpolys[i].size(); ++j ) {
SG_LOG( SG_CLIPPER, SG_INFO, "Flattening " << get_area_name( (AreaType)i ) << ":" << j+1 << " of " << (int)polys_clipped.superpolys[i].size() );
// tri_poly = polys_clipped.superpolys[i][j].get_tris();
tri_nodes = polys_clipped.superpolys[i][j].get_tri_idxs();
// for (int k=0; k< tri_poly.contours(); k++) {
for (int k=0; k< tri_nodes.contours(); k++) {
// if (tri_poly.contour_size(k) != 3) {
if (tri_nodes.contour_size(k) != 3) {
SG_LOG(SG_GENERAL, SG_ALERT, "triangle doesnt have 3 nodes" << tri_nodes.contour_size(k) );
exit(0);
@ -707,15 +708,10 @@ void TGConstruct::fix_point_heights()
point_list raw_nodes = nodes.get_geod_nodes();
// n1 = nodes.find( tri_poly.get_pt( k, 0 ) );
n1 = tri_nodes.get_pt( k, 0 );
e1 = nodes.get_node(n1).GetPosition().z();
// n2 = nodes.find( tri_poly.get_pt( k, 1 ) );
n2 = tri_nodes.get_pt( k, 1 );
e2 = nodes.get_node(n2).GetPosition().z();
// n3 = nodes.find( tri_poly.get_pt( k, 2 ) );
n3 = tri_nodes.get_pt( k, 2 );
e3 = nodes.get_node(n3).GetPosition().z();
@ -744,12 +740,9 @@ void TGConstruct::fix_point_heights()
for (int j = 0; j < (int)polys_clipped.superpolys[i].size(); ++j ) {
SG_LOG( SG_CLIPPER, SG_INFO, "Flattening " << get_area_name( (AreaType)i ) << ":" << j+1 << " of " << (int)polys_clipped.superpolys[i].size() );
// tri_poly = polys_clipped.superpolys[i][j].get_tris();
tri_nodes = polys_clipped.superpolys[i][j].get_tri_idxs();
// for (int k=0; k< tri_poly.contours(); k++) {
for (int k=0; k< tri_nodes.contours(); k++) {
// if (tri_poly.contour_size(k) != 3) {
if (tri_nodes.contour_size(k) != 3) {
SG_LOG(SG_GENERAL, SG_ALERT, "triangle doesnt have 3 nodes" << tri_nodes.contour_size(k) );
exit(0);
@ -757,15 +750,10 @@ void TGConstruct::fix_point_heights()
point_list raw_nodes = nodes.get_geod_nodes();
// n1 = nodes.find( tri_poly.get_pt( k, 0 ) );
n1 = tri_nodes.get_pt( k, 0 );
e1 = nodes.get_node(n1).GetPosition().z();
// n2 = nodes.find( tri_poly.get_pt( k, 1 ) );
n2 = tri_nodes.get_pt( k, 1 );
e2 = nodes.get_node(n2).GetPosition().z();
// n3 = nodes.find( tri_poly.get_pt( k, 2 ) );
n3 = tri_nodes.get_pt( k, 2 );
e3 = nodes.get_node(n3).GetPosition().z();
@ -793,24 +781,18 @@ void TGConstruct::fix_point_heights()
if ( is_ocean_area( (AreaType)i ) ) {
for (int j = 0; j < (int)polys_clipped.superpolys[i].size(); ++j ) {
// tri_poly = polys_clipped.superpolys[i][j].get_tris();
tri_nodes = polys_clipped.superpolys[i][j].get_tri_idxs();
SG_LOG( SG_CLIPPER, SG_INFO, "Flattening " << get_area_name( (AreaType)i ) << ":" << j+1 << " of " << (int)polys_clipped.superpolys[i].size() );
// for (int k=0; k< tri_poly.contours(); k++) {
for (int k=0; k< tri_nodes.contours(); k++) {
// if (tri_poly.contour_size(k) != 3) {
if (tri_nodes.contour_size(k) != 3) {
SG_LOG(SG_GENERAL, SG_ALERT, "triangle doesnt have 3 nodes" << tri_nodes.contour_size(k) );
exit(0);
}
// n1 = nodes.find( tri_poly.get_pt( k, 0 ) );
n1 = tri_nodes.get_pt( k, 0 );
// n2 = nodes.find( tri_poly.get_pt( k, 1 ) );
n2 = tri_nodes.get_pt( k, 1 );
// n3 = nodes.find( tri_poly.get_pt( k, 2 ) );
n3 = tri_nodes.get_pt( k, 2 );
nodes.SetElevation( n1, 0.0 );
@ -953,7 +935,7 @@ TGPolygon TGConstruct::linear_tex_coords( const TGPolygon& tri, const TGTexParam
}
// collect custom objects and move to scenery area
void TGConstruct::do_custom_objects( ) {
void TGConstruct::AddCustomObjects( void ) {
// Create/open the output .stg file for writing
SGPath dest_d(get_output_base().c_str());
dest_d.append(bucket.gen_base_path().c_str());
@ -1085,16 +1067,19 @@ void TGConstruct::merge_slivers( TGPolyList& clipped, poly_list& slivers_list )
}
}
bool TGConstruct::clip_all(const point2d& min, const point2d& max) {
bool TGConstruct::ClipLandclassPolys( void ) {
TGPolygon accum, clipped, tmp;
TGPolygon remains;
poly_list slivers;
int i, j;
Point3D p;
point2d min, max;
SG_LOG( SG_CLIPPER, SG_INFO, "Running master clipper" );
SG_LOG( SG_CLIPPER, SG_INFO, " (" << min.x << "," << min.y << ") (" << max.x << "," << max.y << ")" );
// Get clip bounds
min.x = bucket.get_center_lon() - 0.5 * bucket.get_width();
min.y = bucket.get_center_lat() - 0.5 * bucket.get_height();
max.x = bucket.get_center_lon() + 0.5 * bucket.get_width();
max.y = bucket.get_center_lat() + 0.5 * bucket.get_height();
accum.erase();
@ -1157,7 +1142,6 @@ bool TGConstruct::clip_all(const point2d& min, const point2d& max) {
// process polygons in priority order
for ( i = 0; i < TG_MAX_AREA_TYPES; ++i ) {
SG_LOG( SG_CLIPPER, SG_INFO, "num polys of type (" << i << ") = " << polys_in.superpolys[i].size() );
for( j = 0; j < (int)polys_in.superpolys[i].size(); ++j ) {
TGPolygon current = polys_in.superpolys[i][j].get_poly();
@ -1270,7 +1254,22 @@ bool TGConstruct::clip_all(const point2d& min, const point2d& max) {
}
}
SG_LOG( SG_CLIPPER, SG_INFO, " master clipper finished." );
// Once clipping is complete, make sure any newly added intersection nodes
// are added to the tgnodes
for (int i = 0; i < TG_MAX_AREA_TYPES; i++) {
for (int j = 0; j < (int)polys_clipped.superpolys[i].size(); ++j ) {
TGPolygon poly = polys_clipped.superpolys[i][j].get_poly();
SG_LOG( SG_CLIPPER, SG_INFO, "Collecting nodes for " << get_area_name( (AreaType)i ) << ":" << j+1 << " of " << (int)polys_clipped.superpolys[i].size() );
for (int k=0; k< poly.contours(); k++) {
for (int l = 0; l < poly.contour_size(k); l++) {
// ensure we have all nodes...
nodes.unique_add( poly.get_pt( k, l ) );
}
}
}
}
return true;
}
@ -1342,30 +1341,19 @@ void TGConstruct::LookupNodesPerVertex( void )
void TGConstruct::LookupFacesPerNode( void )
{
SG_LOG(SG_GENERAL, SG_ALERT, "LookupFacesPerNode");
int five_percent = nodes.size()/20;
for (unsigned int n=0; n<nodes.size(); n++) {
// for each node, traverse all the triangles - and create face lists
for ( unsigned int area = 0; area < TG_MAX_AREA_TYPES; ++area ) {
// Add each face that includes a node to the node's face list
for ( unsigned int area = 0; area < TG_MAX_AREA_TYPES; ++area ) {
for( unsigned int p = 0; p < polys_clipped.superpolys[area].size(); ++p ) {
TGPolygon tris = polys_clipped.superpolys[area][p].get_tris();
for( unsigned int p = 0; p < polys_clipped.superpolys[area].size(); ++p ) {
TGPolyNodes tri_nodes = polys_clipped.superpolys[area][p].get_tri_idxs();
for (int tri=0; tri < tri_nodes.contours(); tri++) {
for (int sub = 0; sub < tri_nodes.contour_size(tri); sub++) {
if ( n == (unsigned int)tri_nodes.get_pt( tri, sub ) ) {
nodes.AddFace( n, area, p, tri );
}
}
for (int tri=0; tri < tris.contours(); tri++) {
for (int sub = 0; sub < tris.contour_size(tri); sub++) {
int n = nodes.find( tris.get_pt( tri, sub ) );
nodes.AddFace( n, area, p, tri );
}
}
}
if (n % five_percent == 0) {
SG_LOG(SG_GENERAL, SG_ALERT, " " << n*5 / five_percent << "% complete" );
}
}
}
@ -1382,9 +1370,12 @@ void TGConstruct::calc_normals( point_list& wgs84_nodes, TGSuperPoly& sp ) {
SGVec3d v1, v2, normal;
TGPolyNodes tri_nodes = sp.get_tri_idxs();
int_list face_nodes;
double_list face_areas;
point_list face_normals;
double area;
face_normals.clear();
face_areas.clear();
for (int i=0; i<tri_nodes.contours(); i++) {
face_nodes = tri_nodes.get_contour(i);
@ -1393,6 +1384,8 @@ void TGConstruct::calc_normals( point_list& wgs84_nodes, TGSuperPoly& sp ) {
Point3D p2 = wgs84_nodes[ face_nodes[1] ];
Point3D p3 = wgs84_nodes[ face_nodes[2] ];
area = calc_tri_area( face_nodes );
// do some sanity checking. With the introduction of landuse
// areas, we can get some long skinny triangles that blow up our
// "normal" calculations here. Let's check for really small
@ -1402,15 +1395,10 @@ void TGConstruct::calc_normals( point_list& wgs84_nodes, TGSuperPoly& sp ) {
bool degenerate = false;
const double area_eps = 1.0e-12;
double area = calc_tri_area( face_nodes );
// cout << " area = " << area << endl;
if ( area < area_eps ) {
degenerate = true;
}
// cout << " " << p1 << endl;
// cout << " " << p2 << endl;
// cout << " " << p3 << endl;
if ( fabs(p1.x() - p2.x()) < SG_EPSILON && fabs(p1.x() - p3.x()) < SG_EPSILON ) {
degenerate = true;
}
@ -1423,7 +1411,6 @@ void TGConstruct::calc_normals( point_list& wgs84_nodes, TGSuperPoly& sp ) {
if ( degenerate ) {
normal = normalize(SGVec3d(p1.x(), p1.y(), p1.z()));
SG_LOG(SG_GENERAL, SG_ALERT, "Degenerate tri!");
} else {
v1[0] = p2.x() - p1.x();
v1[1] = p2.y() - p1.y();
@ -1434,20 +1421,17 @@ void TGConstruct::calc_normals( point_list& wgs84_nodes, TGSuperPoly& sp ) {
normal = normalize(cross(v1, v2));
}
face_normals.push_back( Point3D::fromSGVec3( normal ) );
face_areas.push_back( area );
}
SG_LOG(SG_GENERAL, SG_ALERT, "calculated " << face_normals.size() << " face normals ");
sp.set_face_normals( face_normals );
sp.set_face_areas( face_areas );
}
void TGConstruct::CalcFaceNormals( void )
{
// traverse the superpols, and calc normals for each tri within
SG_LOG(SG_GENERAL, SG_ALERT, "Calculating face normals");
point_list wgs84_nodes = nodes.get_wgs84_nodes_as_Point3d();
for (int i = 0; i < TG_MAX_AREA_TYPES; i++) {
@ -1455,16 +1439,6 @@ void TGConstruct::CalcFaceNormals( void )
SG_LOG( SG_CLIPPER, SG_INFO, "Calculating face normals for " << get_area_name( (AreaType)i ) << ":" << j+1 << " of " << (int)polys_in.superpolys[i].size() );
calc_normals( wgs84_nodes, polys_clipped.superpolys[i][j] );
point_list fns = polys_clipped.superpolys[i][j].get_face_normals();
SG_LOG(SG_GENERAL, SG_ALERT, "SP " << i << "," << j << " has " << fns.size() << " face normals ");
}
}
for (int i = 0; i < TG_MAX_AREA_TYPES; i++) {
for (int j = 0; j < (int)polys_clipped.superpolys[i].size(); ++j ) {
point_list fns = polys_clipped.superpolys[i][j].get_face_normals();
SG_LOG(SG_GENERAL, SG_ALERT, "SP " << i << "," << j << " has " << fns.size() << " face normals ");
}
}
}
@ -1490,128 +1464,45 @@ void TGConstruct::CalcPointNormals( void )
unsigned int poly = faces[j].poly;
unsigned int tri = faces[j].tri;
int_list face_nodes;
double face_area;
normal = polys_clipped.superpolys[at][poly].get_face_normal( tri );
face_nodes = polys_clipped.superpolys[at][poly].get_tri_idxs().get_contour( tri ) ;
face_area = polys_clipped.superpolys[at][poly].get_face_area( tri );
double area = calc_tri_area( face_nodes );
normal *= area; // scale normal weight relative to area
total_area += area;
normal *= face_area; // scale normal weight relative to area
total_area += face_area;
average += normal;
// cout << normal << endl;
}
average /= total_area;
//cout << "average = " << average << endl;
nodes.SetNormal( i, average );
}
}
// master construction routine
// TODO : Split each step into its own function, and move
// into seperate files by major functionality
// loading, clipping, tesselating, normals, and output
// Also, we are still calculating some thing more than one
// (like face area - need to move this into superpoly )
void TGConstruct::construct_bucket( SGBucket b ) {
sprintf(ds_name, "./construct_debug_%ld", b.gen_index() );
ds_id = tgShapefileOpenDatasource( ds_name );
bucket = b;
SG_LOG(SG_GENERAL, SG_ALERT, "Construct tile, bucket = " << bucket );
// STEP 1) Load grid of elevation data (Array)
load_array();
// STEP 2) Clip 2D polygons against one another
if ( load_polys() == 0 ) {
// don't build the tile if there is no 2d data ... it *must*
// be ocean and the sim can build the tile on the fly.
return;
}
// Load the land use polygons if the --cover option was specified
if ( get_cover().size() > 0 ) {
load_landcover();
}
// Get clip bounds
point2d min, max;
min.x = bucket.get_center_lon() - 0.5 * bucket.get_width();
min.y = bucket.get_center_lat() - 0.5 * bucket.get_height();
max.x = bucket.get_center_lon() + 0.5 * bucket.get_width();
max.y = bucket.get_center_lat() + 0.5 * bucket.get_height();
// do clipping
SG_LOG(SG_GENERAL, SG_ALERT, "clipping polygons");
clip_all(min, max);
// SG_LOG(SG_GENERAL, SG_ALERT, "NODE LIST AFTER CLIPPING" );
// nodes.Dump();
// Make sure we have the elavation nodes in the main node database
// I'd like to do this first, but we get initial tgnodes from clipper
point_list corner_list = array.get_corner_list();
if ( corner_list.size() == 0 ) {
SG_LOG(SG_GENERAL, SG_ALERT, "corner list is 0 " );
}
for (unsigned int i=0; i<corner_list.size(); i++) {
SG_LOG(SG_GENERAL, SG_ALERT, "Add corner node " << corner_list[i] );
nodes.unique_add(corner_list[i]);
}
point_list fit_list = array.get_fitted_list();
for (unsigned int i=0; i<fit_list.size(); i++) {
nodes.unique_add(fit_list[i]);
}
SG_LOG(SG_GENERAL, SG_ALERT, "NODE LIST AFTER FITTING" );
// Step 3) Merge in Shared data (just add the nodes to the nodelist)
// When this step is complete, some nodes will have normals (from shared tiles)
// and some will not - need to indicate this in the new node class
SG_LOG(SG_GENERAL, SG_ALERT, "number of geod nodes = before adding adding shared edges" << nodes.size() );
TGMatch m;
m.load_neighbor_shared( bucket, work_base );
void TGConstruct::LoadSharedEdgeData( void )
{
match.load_neighbor_shared( bucket, work_base );
if ( useOwnSharedEdges ) {
m.load_missing_shared( bucket, work_base );
match.load_missing_shared( bucket, work_base );
}
m.add_shared_nodes( this );
match.add_shared_nodes( this );
}
// SG_LOG(SG_GENERAL, SG_ALERT, "NODE LIST AFTER ADDING SHARED EDGES" );
// nodes.Dump();
for (int i = 0; i < TG_MAX_AREA_TYPES; i++) {
for (int j = 0; j < (int)polys_clipped.superpolys[i].size(); ++j ) {
TGPolygon poly = polys_clipped.superpolys[i][j].get_poly();
void TGConstruct::SaveSharedEdgeData( void )
{
match.split_tile( bucket, this );
SG_LOG(SG_GENERAL, SG_ALERT, "Tile Split");
SG_LOG( SG_CLIPPER, SG_INFO, "Collecting nodes for " << get_area_name( (AreaType)i ) << ":" << j+1 << " of " << (int)polys_clipped.superpolys[i].size() );
if ( writeSharedEdges ) {
SG_LOG(SG_GENERAL, SG_ALERT, "write shared edges");
for (int k=0; k< poly.contours(); k++) {
for (int l = 0; l < poly.contour_size(k); l++) {
// ensure we have all nodes...
nodes.unique_add( poly.get_pt( k, l ) );
}
}
}
match.write_shared( bucket, work_base );
}
}
// Step 4) Add intermediate nodes
// need to add another add intermediate nodes function that can handle the new node class
add_intermediate_nodes();
// After adding intermediate nodes, clean the polys
clean_clipped_polys();
// SG_LOG(SG_GENERAL, SG_ALERT, "NODE LIST AFTER ADDING CLIPPED POLYS" );
// nodes.Dump();
void TGConstruct::TesselatePolys( void )
{
// tesselate the polygons and prepair them for final output
point_list extra = nodes.get_geod_nodes();
for (int i = 0; i < TG_MAX_AREA_TYPES; i++) {
@ -1636,95 +1527,23 @@ void TGConstruct::construct_bucket( SGBucket b ) {
#endif
TGPolygon tri = polygon_tesselate_alt_with_extra( poly, extra, false );
// ensure all added nodes are accounted for
for (int k=0; k< tri.contours(); k++) {
for (int l = 0; l < tri.contour_size(k); l++) {
// ensure we have all nodes...
nodes.unique_add( tri.get_pt( k, l ) );
}
}
// Save the triangulation
polys_clipped.superpolys[i][j].set_tris( tri );
}
}
}
/* Add any points from triangulation */
for (int i = 0; i < TG_MAX_AREA_TYPES; i++) {
for (int j = 0; j < (int)polys_clipped.superpolys[i].size(); ++j ) {
TGPolygon tri_poly = polys_clipped.superpolys[i][j].get_tris();
for (int k=0; k< tri_poly.contours(); k++) {
for (int l = 0; l < tri_poly.contour_size(k); l++) {
// ensure we have all nodes...
nodes.unique_add( tri_poly.get_pt( k, l ) );
}
}
}
}
// SG_LOG(SG_GENERAL, SG_ALERT, "NODE LIST BEFORE FLATTEN" );
// nodes.Dump();
// Step 7 : Generate triangle vertex to node index lists
LookupNodesPerVertex();
// Step 8) Flatten
fix_point_heights();
// SG_LOG(SG_GENERAL, SG_ALERT, "NODE LIST AFTER FLATTEN" );
// nodes.Dump();
// Step 8) Generate face_connected list
LookupFacesPerNode();
nodes.Dump();
// Step 9 - Calculate Face Normals
CalcFaceNormals();
// Step 10 - Calculate Point Normals
CalcPointNormals();
#if 0
if ( c.get_cover().size() > 0 ) {
// Now for all the remaining "default" land cover polygons, assign
// each one it's proper type from the land use/land cover
// database.
fix_land_cover_assignments( c );
}
// Step 12) calculate texture coordinates for each triangle
// Step 13) Sort the triangle list by material (optional)
#endif
// Calc texture coordinates
for (int i = 0; i < TG_MAX_AREA_TYPES; i++) {
for (int j = 0; j < (int)polys_clipped.superpolys[i].size(); ++j ) {
TGPolygon poly = polys_clipped.superpolys[i][j].get_poly();
SG_LOG( SG_CLIPPER, SG_INFO, "Texturing " << get_area_name( (AreaType)i ) << "(" << i << ") :" << j+1 << " of " << (int)polys_clipped.superpolys[i].size() << " : flag = " << polys_clipped.superpolys[i][j].get_flag());
TGPolygon tri = polys_clipped.superpolys[i][j].get_tris();
TGPolygon tc;
if ( polys_clipped.superpolys[i][j].get_flag() == "textured" ) {
// SG_LOG(SG_GENERAL, SG_DEBUG, "USE TEXTURE PARAMS for tex coord calculations" );
// tc = linear_tex_coords( tri, clipped_polys.texparams[i][j] );
tc = area_tex_coords( tri );
} else {
// SG_LOG(SG_GENERAL, SG_DEBUG, "USE SIMGEAR for tex coord calculations" );
tc = area_tex_coords( tri );
}
polys_clipped.superpolys[i][j].set_texcoords( tc );
}
}
// write shared data
m.split_tile( bucket, this );
SG_LOG(SG_GENERAL, SG_ALERT, "Tile Split");
if ( writeSharedEdges ) {
SG_LOG(SG_GENERAL, SG_ALERT, "write shared edges");
m.write_shared( bucket, this );
}
// dump_lat_nodes( c, 32.75 );
// TEMP TEMP TEMP TEMP
void TGConstruct::WriteBtgFile( void )
{
TGTriNodes normals, texcoords;
normals.clear();
texcoords.clear();
@ -1859,22 +1678,6 @@ void TGConstruct::construct_bucket( SGBucket b ) {
{
throw sg_exception("error writing file. :-(");
}
// Step 15) Adding custome objects to the .stg file
// collect custom objects and move to scenery area
do_custom_objects();
// Step 16 : clean the data structures
array.close();
// land class polygons
polys_in.clear();
polys_clipped.clear();
// All Nodes
nodes.clear();
face_normals.clear();
}
void TGConstruct::clean_clipped_polys() {
@ -1906,3 +1709,132 @@ void TGConstruct::clean_clipped_polys() {
}
}
}
void TGConstruct::CalcTextureCoordinates( void )
{
for (int i = 0; i < TG_MAX_AREA_TYPES; i++) {
for (int j = 0; j < (int)polys_clipped.superpolys[i].size(); ++j ) {
TGPolygon poly = polys_clipped.superpolys[i][j].get_poly();
SG_LOG( SG_CLIPPER, SG_INFO, "Texturing " << get_area_name( (AreaType)i ) << "(" << i << ") :" << j+1 << " of " << (int)polys_clipped.superpolys[i].size() << " : flag = " << polys_clipped.superpolys[i][j].get_flag());
TGPolygon tri = polys_clipped.superpolys[i][j].get_tris();
TGPolygon tc;
if ( polys_clipped.superpolys[i][j].get_flag() == "textured" ) {
// SG_LOG(SG_GENERAL, SG_DEBUG, "USE TEXTURE PARAMS for tex coord calculations" );
// tc = linear_tex_coords( tri, clipped_polys.texparams[i][j] );
tc = area_tex_coords( tri );
} else {
// SG_LOG(SG_GENERAL, SG_DEBUG, "USE SIMGEAR for tex coord calculations" );
tc = area_tex_coords( tri );
}
polys_clipped.superpolys[i][j].set_texcoords( tc );
}
}
}
// master construction routine
// TODO : Split each step into its own function, and move
// into seperate files by major functionality
// loading, clipping, tesselating, normals, and output
// Also, we are still calculating some thing more than one
// (like face area - need to move this into superpoly )
void TGConstruct::ConstructBucket( SGBucket b ) {
sprintf(ds_name, "./construct_debug_%ld", b.gen_index() );
ds_id = tgShapefileOpenDatasource( ds_name );
bucket = b;
SG_LOG(SG_GENERAL, SG_ALERT, "Construct tile, bucket = " << bucket );
// STEP 1)
// Load grid of elevation data (Array)
LoadElevationArray();
// STEP 2)
// Clip 2D polygons against one another
if ( LoadLandclassPolys() == 0 ) {
// don't build the tile if there is no 2d data ... it *must*
// be ocean and the sim can build the tile on the fly.
return;
}
// STEP 3)
// Load the land use polygons if the --cover option was specified
if ( get_cover().size() > 0 ) {
load_landcover();
}
// STEP 4)
// Clip the Landclass polygons
ClipLandclassPolys();
// STEP 5)
// Merge in Shared data (just add the nodes to the nodelist)
// When this step is complete, some nodes will have normals (from shared tiles)
// and some will not
LoadSharedEdgeData();
// STEP 6)
// Fix T-Junctions by finding nodes that lie close to polygon edges, and
// inserting them into the edge
FixTJunctions();
// TODO : Needs to be part of clipping
// just before union : If we need to clean again after fixing tjunctions, make
// sure we don't alter the shape
clean_clipped_polys();
// STEP 7)
// Generate triangles - we can't generate the node-face lookup table
// until all polys are tesselated, as extra nodes can still be generated
TesselatePolys();
// STEP 8)
// Generate triangle vertex coordinates to node index lists
// NOTE: After this point, no new nodes can be added
LookupNodesPerVertex();
// STEP 9)
// Interpolate elevations, and flatten stuff
CalcElevations();
// STEP 10)
// Generate face_connected list -
LookupFacesPerNode();
// STEP 11)
// Calculate Face Normals
CalcFaceNormals();
// STEP 12)
// Calculate Point Normals
CalcPointNormals();
#if 0
if ( c.get_cover().size() > 0 ) {
// Now for all the remaining "default" land cover polygons, assign
// each one it's proper type from the land use/land cover
// database.
fix_land_cover_assignments( c );
}
#endif
// STEP 13)
// Calculate Texture Coordinates
CalcTextureCoordinates();
// STEP 14)
// Write out the shared edge data
SaveSharedEdgeData();
// STEP 15)
// Generate the btg file
WriteBtgFile();
// STEP 16)
// Write Custom objects to .stg file
AddCustomObjects();
}

View file

@ -83,6 +83,9 @@ public:
}
};
// forward declaration
class TGMatch;
class TGConstruct {
private:
@ -126,52 +129,69 @@ private:
// All Nodes
TGNodes nodes;
// TODO : Add to superpoly
// face normal list (for flat shading)
point_list face_normals;
// SHared Edges match data
TGMatch match;
private:
// Load Data
void LoadElevationArray( void );
int LoadLandclassPolys( void );
// Load Data Helpers
bool load_poly(const std::string& path);
bool load_osgb36_poly(const std::string& path);
void add_poly(int area, const TGPolygon &poly, std::string material);
// Clip Data
bool ClipLandclassPolys( void );
// Clip Helpers
void move_slivers( TGPolygon& in, TGPolygon& out );
void merge_slivers( TGPolyList& clipped, poly_list& slivers_list );
// Shared edge Matching
void LoadSharedEdgeData( void );
void SaveSharedEdgeData( void );
// Polygon Cleaning
void FixTJunctions( void );
void clean_clipped_polys( void );
// Tesselation
void TesselatePolys( void );
// Elevation and Flattening
void CalcElevations( void );
// Normals and texture coords
void LookupNodesPerVertex( void );
void LookupFacesPerNode( void );
void CalcFaceNormals( void );
void CalcPointNormals( void );
void CalcTextureCoordinates( void );
// Helpers
TGPolygon linear_tex_coords( const TGPolygon& tri, const TGTexParams& tp );
TGPolygon area_tex_coords( const TGPolygon& tri );
// Should be in superpoly?
// Output
void WriteBtgFile( void );
void AddCustomObjects( void );
// Misc
void calc_normals( point_list& wgs84_nodes, TGSuperPoly& sp );
// Where should this be? Geometry library, I think...
double calc_tri_area( int_list& triangle_nodes );
public:
// Constructor
TGConstruct();
// Destructor
~TGConstruct();
void construct_bucket( SGBucket b );
bool load_array();
int load_polys();
bool load_poly(const std::string& path);
bool load_osgb36_poly(const std::string& path);
void add_poly(int area, const TGPolygon &poly, std::string material);
void ConstructBucket( SGBucket b );
void move_slivers( TGPolygon& in, TGPolygon& out );
void merge_slivers( TGPolyList& clipped, poly_list& slivers_list );
bool clip_all(const point2d& min, const point2d& max);
void add_intermediate_nodes(void);
void clean_clipped_polys(void);
void calc_gc_course_dist( const Point3D& start, const Point3D& dest,
double *course, double *dist );
double distanceSphere( const Point3D p1, const Point3D p2 );
void fix_point_heights();
TGPolygon linear_tex_coords( const TGPolygon& tri, const TGTexParams& tp );
TGPolygon area_tex_coords( const TGPolygon& tri );
int load_landcover ();
void add_to_polys( TGPolygon &accum, const TGPolygon &poly);
@ -181,8 +201,6 @@ public:
double x1, double y1, double x2, double y2,
double half_dx, double half_dy );
void do_custom_objects(void);
// land cover file
inline std::string get_cover () const { return cover; }
inline void set_cover (const std::string &s) { cover = s; }
@ -217,8 +235,8 @@ public:
inline point_list get_geod_nodes() const { return nodes.get_geod_nodes(); }
// face normal list (for flat shading)
inline point_list get_face_normals() const { return face_normals; }
inline void set_face_normals( point_list n ) { face_normals = n; }
// inline point_list get_face_normals() const { return face_normals; }
// inline void set_face_normals( point_list n ) { face_normals = n; }
// normal list (for each point) in cart coords (for smooth
// shading)

View file

@ -60,6 +60,9 @@
#include <Geometry/poly_support.hxx>
#include <landcover/landcover.hxx>
// TODO : Get rid of match...
#include <Match/match.hxx>
#include "construct.hxx"
#include "usgs.hxx"
@ -136,196 +139,6 @@ static void fix_land_cover_assignments( TGConstruct& c ) {
}
#endif
#if 0
// build the node -> element (triangle) reverse lookup table. there
// is an entry for each point containing a list of all the triangles
// that share that point.
static belongs_to_list gen_node_ele_lookup_table( TGConstruct& c ) {
belongs_to_list reverse_ele_lookup;
reverse_ele_lookup.clear();
int_list ele_list;
ele_list.clear();
// initialize reverse_ele_lookup structure by creating an empty
// list for each point
point_list wgs84_nodes = c.get_wgs84_nodes();
SG_LOG(SG_GENERAL, SG_ALERT, "there are " << wgs84_nodes.size() << " wgs84 nodes" );
const_point_list_iterator w_current = wgs84_nodes.begin();
const_point_list_iterator w_last = wgs84_nodes.end();
for ( ; w_current != w_last; ++w_current ) {
reverse_ele_lookup.push_back( ele_list );
}
SG_LOG(SG_GENERAL, SG_ALERT, "1 " );
// traverse triangle structure building reverse lookup table
triele_list tri_elements = c.get_tri_elements();
const_triele_list_iterator current = tri_elements.begin();
const_triele_list_iterator last = tri_elements.end();
int counter = 0;
SG_LOG(SG_GENERAL, SG_ALERT, "2 " );
for ( ; current != last; ++current ) {
//SG_LOG(SG_GENERAL, SG_ALERT, "CURRENT " << current );
// SG_LOG(SG_GENERAL, SG_ALERT, "N1: " << current->get_n1() << " N2: " << current->get_n2() << " N3: " << current->get_n3() );
reverse_ele_lookup[ current->get_n1() ].push_back( counter );
reverse_ele_lookup[ current->get_n2() ].push_back( counter );
reverse_ele_lookup[ current->get_n3() ].push_back( counter );
++counter;
}
SG_LOG(SG_GENERAL, SG_ALERT, "3 " );
return reverse_ele_lookup;
}
#endif
// caclulate the area for the specified triangle face
static double tri_ele_area( const TGConstruct& c, const TGTriEle tri ) {
point_list nodes = c.get_geod_nodes();
Point3D p1 = nodes[ tri.get_n1() ];
Point3D p2 = nodes[ tri.get_n2() ];
Point3D p3 = nodes[ tri.get_n3() ];
return triangle_area( p1, p2, p3 );
}
#if 0
// caclulate the normal for the specified triangle face
static Point3D calc_normal( TGConstruct& c, int i ) {
SGVec3d v1, v2, normal;
point_list wgs84_nodes = c.get_wgs84_nodes();
triele_list tri_elements = c.get_tri_elements();
Point3D p1 = wgs84_nodes[ tri_elements[i].get_n1() ];
Point3D p2 = wgs84_nodes[ tri_elements[i].get_n2() ];
Point3D p3 = wgs84_nodes[ tri_elements[i].get_n3() ];
// do some sanity checking. With the introduction of landuse
// areas, we can get some long skinny triangles that blow up our
// "normal" calculations here. Let's check for really small
// triangle areas and check if one dimension of the triangle
// coordinates is nearly coincident. If so, assign the "default"
// normal of straight up.
bool degenerate = false;
const double area_eps = 1.0e-12;
double area = tri_ele_area( c, tri_elements[i] );
// cout << " area = " << area << endl;
if ( area < area_eps ) {
degenerate = true;
}
// cout << " " << p1 << endl;
// cout << " " << p2 << endl;
// cout << " " << p3 << endl;
if ( fabs(p1.x() - p2.x()) < SG_EPSILON && fabs(p1.x() - p3.x()) < SG_EPSILON ) {
degenerate = true;
}
if ( fabs(p1.y() - p2.y()) < SG_EPSILON && fabs(p1.y() - p3.y()) < SG_EPSILON ) {
degenerate = true;
}
if ( fabs(p1.z() - p2.z()) < SG_EPSILON && fabs(p1.z() - p3.z()) < SG_EPSILON ) {
degenerate = true;
}
if ( degenerate ) {
normal = normalize(SGVec3d(p1.x(), p1.y(), p1.z()));
SG_LOG(SG_GENERAL, SG_ALERT, "Degenerate tri!");
} else {
v1[0] = p2.x() - p1.x();
v1[1] = p2.y() - p1.y();
v1[2] = p2.z() - p1.z();
v2[0] = p3.x() - p1.x();
v2[1] = p3.y() - p1.y();
v2[2] = p3.z() - p1.z();
normal = normalize(cross(v1, v2));
}
return Point3D( normal[0], normal[1], normal[2] );
}
#endif
#if 0
// build the face normal list
static point_list gen_face_normals( TGConstruct& c ) {
point_list face_normals;
// traverse triangle structure building the face normal table
SG_LOG(SG_GENERAL, SG_ALERT, "calculating face normals");
triele_list tri_elements = c.get_tri_elements();
for ( int i = 0; i < (int)tri_elements.size(); i++ ) {
Point3D p = calc_normal(c, i );
//cout << p << endl;
face_normals.push_back( p );
}
return face_normals;
}
#endif
#if 0
// calculate the normals for each point in wgs84_nodes
static point_list gen_point_normals( TGConstruct& c ) {
point_list point_normals;
Point3D normal;
SG_LOG(SG_GENERAL, SG_ALERT, "calculating node normals");
point_list wgs84_nodes = c.get_wgs84_nodes();
belongs_to_list reverse_ele_lookup = c.get_reverse_ele_lookup();
point_list face_normals = c.get_face_normals();
triele_list tri_elements = c.get_tri_elements();
// for each node
for ( int i = 0; i < (int)wgs84_nodes.size(); ++i ) {
int_list tri_list = reverse_ele_lookup[i];
double total_area = 0.0;
Point3D average( 0.0 );
// for each triangle that shares this node
for ( int j = 0; j < (int)tri_list.size(); ++j ) {
normal = face_normals[ tri_list[j] ];
double area = tri_ele_area( c, tri_elements[ tri_list[j] ] );
normal *= area; // scale normal weight relative to area
total_area += area;
average += normal;
// cout << normal << endl;
}
average /= total_area;
//cout << "average = " << average << endl;
point_normals.push_back( average );
}
SG_LOG(SG_GENERAL, SG_ALERT, "1st");
SG_LOG(SG_GENERAL, SG_ALERT, "wgs84 node list size = " << wgs84_nodes.size());
SG_LOG(SG_GENERAL, SG_ALERT, "normal list size = " << point_normals.size());
return point_normals;
}
#endif
# if 0
// generate the flight gear scenery file
static void do_output( TGConstruct& c, TGGenOutput& output ) {
output.build_tris( c );
output.write_tris( c );
}
#endif
// display usage and exit
static void usage( const string name ) {
SG_LOG(SG_GENERAL, SG_ALERT, "Usage: " << name);
@ -506,7 +319,7 @@ int main(int argc, char **argv) {
c->set_ignore_landmass( ignoreLandmass );
c->set_nudge( nudge );
c->construct_bucket( b );
c->ConstructBucket( b );
delete c;
} else {
// build all the tiles in an area
@ -531,7 +344,7 @@ int main(int argc, char **argv) {
c->set_ignore_landmass( ignoreLandmass );
c->set_nudge( nudge );
c->construct_bucket( b_min );
c->ConstructBucket( b_min );
delete c;
} else {
SGBucket b_cur;
@ -561,7 +374,7 @@ int main(int argc, char **argv) {
c->set_ignore_landmass( ignoreLandmass );
c->set_nudge( nudge );
c->construct_bucket( b_cur );
c->ConstructBucket( b_cur );
delete c;
} else {
SG_LOG(SG_GENERAL, SG_ALERT, "skipping " << b_cur);
@ -586,7 +399,7 @@ int main(int argc, char **argv) {
c->set_ignore_landmass( ignoreLandmass );
c->set_nudge( nudge );
c->construct_bucket( b );
c->ConstructBucket( b );
delete c;
}

View file

@ -28,17 +28,18 @@
#include <simgear/compiler.h>
#include <Geometry/point3d.hxx>
#include <Geometry/tg_nodes.hxx>
#include <Polygon/point2d.hxx>
#include "match.hxx"
#include <Main/construct.hxx>
#include <simgear/math/sg_geodesy.hxx>
#include <simgear/misc/sgstream.hxx>
#include <simgear/misc/sg_path.hxx>
#include <simgear/debug/logstream.hxx>
#include "match.hxx"
#include <stdlib.h>
using std::cout;
using std::endl;
//using std::cout;
//using std::endl;
using std::string;
TGMatch::TGMatch( void ) {
@ -55,15 +56,15 @@ void TGMatch::scan_share_file( const string& dir, const SGBucket& b,
{
string file = dir + "/" + b.gen_base_path() + "/" + b.gen_index_str();
cout << "reading shared data from " << file << endl;
// cout << "reading shared data from " << file << endl;
sg_gzifstream in( file );
if ( !in.is_open() ) {
cout << "Cannot open file: " << file << endl;
// cout << "Cannot open file: " << file << endl;
return;
}
cout << "open successful." << endl;
// cout << "open successful." << endl;
string target;
if ( search == SW_Corner ) {
@ -202,7 +203,7 @@ void TGMatch::load_shared( SGBucket b, string base, neighbor_type n ) {
// load any previously existing shared data from all neighbors (if
// shared data for a component exists set that components flag to true
void TGMatch::load_neighbor_shared( SGBucket b, string work ) {
cout << "Loading existing shared data from neighbor tiles" << endl;
// cout << "Loading existing shared data from neighbor tiles" << endl;
string base = work + "/Shared/";
@ -225,6 +226,7 @@ void TGMatch::load_neighbor_shared( SGBucket b, string work ) {
load_shared( b, base, EAST );
load_shared( b, base, WEST );
#if 0
cout << "Shared data read in:" << endl;
if ( sw_flag ) {
cout << " sw corner = " << sw_node << endl;
@ -266,6 +268,7 @@ void TGMatch::load_neighbor_shared( SGBucket b, string work ) {
cout << " " << west_nodes[i] << endl;
}
}
#endif
}
// try to load any missing shared data from our own shared data file
@ -318,7 +321,7 @@ Point3D tgFakeNormal( const Point3D& p ) {
double len = Point3D(0.0).distance3D(cart);
// cout << "len = " << len << endl;
cart /= len;
cout << "new fake normal = " << cart << endl;
// cout << "new fake normal = " << cart << endl;
return cart;
}
@ -331,8 +334,8 @@ Point3D tgFakeNormal( const Point3D& p ) {
void TGMatch::split_tile( SGBucket b, TGConstruct* c ) {
int i;
cout << "Spliting tile" << endl;
cout << " extracting (shared) edge nodes and normals" << endl;
//cout << "Spliting tile" << endl;
//cout << " extracting (shared) edge nodes and normals" << endl;
// calculate tile boundaries
point2d min, max;
@ -505,8 +508,7 @@ void TGMatch::split_tile( SGBucket b, TGConstruct* c ) {
// write the new shared edge points, normals, and segments for this
// tile
void TGMatch::write_shared( SGBucket b, TGConstruct* c ) {
string base = c->get_work_base();
void TGMatch::write_shared( SGBucket b, string base ) {
string dir = base + "/Shared/" + b.gen_base_path();
string file = dir + "/" + b.gen_index_str();
@ -515,9 +517,10 @@ void TGMatch::write_shared( SGBucket b, TGConstruct* c ) {
sgp.append( "dummy" );
sgp.create_dir( 0755 );
cout << "shared data will be written to " << file << endl;
//cout << "shared data will be written to " << file << endl;
#if 0
cout << "FLAGS" << endl;
cout << "=====" << endl;
cout << "sw_flag = " << sw_flag << endl;
@ -528,10 +531,11 @@ void TGMatch::write_shared( SGBucket b, TGConstruct* c ) {
cout << "south_flag = " << south_flag << endl;
cout << "east_flag = " << east_flag << endl;
cout << "west_flag = " << west_flag << endl;
#endif
FILE *fp;
if ( (fp = fopen( file.c_str(), "w" )) == NULL ) {
cout << "ERROR: opening " << file << " for writing!" << endl;
// cout << "ERROR: opening " << file << " for writing!" << endl;
exit(-1);
}
@ -687,7 +691,7 @@ void TGMatch::add_shared_nodes( TGConstruct* c ) {
TGNodes* nodes;
nodes = c->get_nodes();
cout << " BEFORE ADDING SHARED NODES: " << nodes->size() << endl;
// cout << " BEFORE ADDING SHARED NODES: " << nodes->size() << endl;
if ( sw_flag ) {
nodes->unique_add_fixed_elevation( sw_node );
@ -729,7 +733,7 @@ void TGMatch::add_shared_nodes( TGConstruct* c ) {
}
}
cout << " AFTER ADDING SHARED NODES: " << nodes->size() << endl;
// cout << " AFTER ADDING SHARED NODES: " << nodes->size() << endl;
}
// reassemble the tile pieces (combining the shared data and our own
@ -758,9 +762,9 @@ void TGMatch::assemble_tile( TGConstruct* c ) {
int nw_index = new_nodes.unique_add( nw_node );
insert_normal( new_normals, nw_normal, nw_index );
cout << "after adding corners:" << endl;
cout << " new_nodes = " << new_nodes.size() << endl;
cout << " new normals = " << new_normals.size() << endl;
// cout << "after adding corners:" << endl;
// cout << " new_nodes = " << new_nodes.size() << endl;
// cout << " new normals = " << new_normals.size() << endl;
// add the edge points
@ -790,9 +794,9 @@ void TGMatch::assemble_tile( TGConstruct* c ) {
insert_normal( new_normals, west_normals[i], index );
}
cout << "after adding edges:" << endl;
cout << " new_nodes = " << new_nodes.size() << endl;
cout << " new normals = " << new_normals.size() << endl;
// cout << "after adding edges:" << endl;
// cout << " new_nodes = " << new_nodes.size() << endl;
// cout << " new normals = " << new_normals.size() << endl;
// add the body points
for ( i = 0; i < (int)body_nodes.size(); ++i ) {
@ -800,9 +804,9 @@ void TGMatch::assemble_tile( TGConstruct* c ) {
insert_normal( new_normals, body_normals[i], index );
}
cout << "after adding body points:" << endl;
cout << " new_nodes = " << new_nodes.size() << endl;
cout << " new normals = " << new_normals.size() << endl;
// cout << "after adding body points:" << endl;
// cout << " new_nodes = " << new_nodes.size() << endl;
// cout << " new normals = " << new_normals.size() << endl;
// add the edge segments
new_segs.unique_divide_and_add( new_nodes.get_node_list(),
@ -813,9 +817,10 @@ void TGMatch::assemble_tile( TGConstruct* c ) {
TGTriSeg(ne_index, nw_index, 1) );
new_segs.unique_divide_and_add( new_nodes.get_node_list(),
TGTriSeg(nw_index, sw_index, 1) );
cout << "after adding edge segments:" << endl;
cout << " new_nodes = " << new_nodes.size() << endl;
cout << " new normals = " << new_normals.size() << endl;
// cout << "after adding edge segments:" << endl;
// cout << " new_nodes = " << new_nodes.size() << endl;
// cout << " new normals = " << new_normals.size() << endl;
// add the body segments
@ -840,16 +845,16 @@ void TGMatch::assemble_tile( TGConstruct* c ) {
// their new index)
n1 = new_nodes.unique_add( p1 );
if ( n1 >= (int)new_normals.size() ) {
cout << "Adding a segment resulted in a new node, faking a normal"
<< endl;
// cout << "Adding a segment resulted in a new node, faking a normal"
// << endl;
Point3D fake = tgFakeNormal( p1 );
insert_normal( new_normals, fake, n1 );
}
n2 = new_nodes.unique_add( p2 );
if ( n2 >= (int)new_normals.size() ) {
cout << "Adding a segment resulted in a new node, faking a normal"
<< endl;
// cout << "Adding a segment resulted in a new node, faking a normal"
// << endl;
Point3D fake = tgFakeNormal( p2 );
insert_normal( new_normals, fake, n2 );
}

View file

@ -29,13 +29,21 @@
# error This library requires C++
#endif
#include <string>
#include <vector>
#include <simgear/compiler.h>
#include <simgear/math/sg_types.hxx>
#include <simgear/bucket/newbucket.hxx>
#include <Main/construct.hxx>
// TO REMOVE
#include <Geometry/trieles.hxx>
#include <Geometry/trinodes.hxx>
#include <Geometry/trisegs.hxx>
// TO REMOVE
// Forward Declaration
class TGConstruct;
class TGMatch {
@ -107,7 +115,7 @@ public:
// write the new shared edge points, normals, and segments for
// this tile
void write_shared( SGBucket b, TGConstruct* c );
void write_shared( SGBucket b, std::string base );
// reassemble the tile pieces (combining the shared data and our
// own data)

View file

@ -36,7 +36,6 @@ TGSuperPoly::~TGSuperPoly()
{
}
// erase the "super" polygon
void TGSuperPoly::erase()
{

View file

@ -41,6 +41,7 @@
// TODO : Needs to be its own class
typedef std::vector < int > int_list;
typedef std::vector < int_list > idx_list;
typedef std::vector < double > double_list;
typedef idx_list::iterator idx_list_iterator;
typedef idx_list::const_iterator const_idx_list_iterator;
@ -136,6 +137,7 @@ TGPolygon tris; // triangulation
TGPolyNodes tri_idxs; // triangle node indexes
point_list face_normals; // triangle normals
double_list face_areas; // triangle areas
std::string flag; // For various potential record keeping needs
public:
@ -212,6 +214,20 @@ inline void set_face_normals( const point_list &fns )
face_normals = fns;
}
inline double get_face_area( int tri ) const
{
return face_areas[tri];
}
inline double_list get_face_areas() const
{
return face_areas;
}
inline void set_face_areas( const double_list &fas )
{
face_areas = fas;
}
inline std::string get_flag() const
{
return flag;