Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
[processing] Use bulk spatial index loader in Delete Duplicates algor…
…ithm

Cuts processing time to 70% of original for a 3 million point input file
(debug build)
  • Loading branch information
nyalldawson committed Jan 2, 2020
1 parent fe6abba commit c46e89f
Showing 1 changed file with 6 additions and 6 deletions.
Expand Up @@ -93,13 +93,11 @@ QVariantMap QgsDeleteDuplicateGeometriesAlgorithm::processAlgorithm( const QVari
double step = mSource->featureCount() > 0 ? 100.0 / mSource->featureCount() : 0;
QHash< QgsFeatureId, QgsGeometry > geometries;
QSet< QgsFeatureId > nullGeometryFeatures;
QgsSpatialIndex index;
QgsFeature f;
long current = 0;
while ( it.nextFeature( f ) )
QgsSpatialIndex index( it, [&]( const QgsFeature & f ) ->bool
{
if ( feedback->isCanceled() )
break;
return false;

if ( !f.hasGeometry() )
{
Expand All @@ -108,13 +106,15 @@ QVariantMap QgsDeleteDuplicateGeometriesAlgorithm::processAlgorithm( const QVari
else
{
geometries.insert( f.id(), f.geometry() );
index.addFeature( f );
}

// overall this loop takes about 10% of time
current++;
feedback->setProgress( 0.10 * current * step );
}
return true;
} );

QgsFeature f;

// start by assuming everything is unique, and chop away at this list
QHash< QgsFeatureId, QgsGeometry > uniqueFeatures = geometries;
Expand Down

0 comments on commit c46e89f

Please sign in to comment.