Skip to content

Commit c46e89f

Browse files
committedJan 2, 2020
[processing] Use bulk spatial index loader in Delete Duplicates algorithm
Cuts processing time to 70% of original for a 3 million point input file (debug build)
1 parent fe6abba commit c46e89f

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed
 

‎src/analysis/processing/qgsalgorithmdeleteduplicategeometries.cpp

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -93,13 +93,11 @@ QVariantMap QgsDeleteDuplicateGeometriesAlgorithm::processAlgorithm( const QVari
9393
double step = mSource->featureCount() > 0 ? 100.0 / mSource->featureCount() : 0;
9494
QHash< QgsFeatureId, QgsGeometry > geometries;
9595
QSet< QgsFeatureId > nullGeometryFeatures;
96-
QgsSpatialIndex index;
97-
QgsFeature f;
9896
long current = 0;
99-
while ( it.nextFeature( f ) )
97+
QgsSpatialIndex index( it, [&]( const QgsFeature & f ) ->bool
10098
{
10199
if ( feedback->isCanceled() )
102-
break;
100+
return false;
103101

104102
if ( !f.hasGeometry() )
105103
{
@@ -108,13 +106,15 @@ QVariantMap QgsDeleteDuplicateGeometriesAlgorithm::processAlgorithm( const QVari
108106
else
109107
{
110108
geometries.insert( f.id(), f.geometry() );
111-
index.addFeature( f );
112109
}
113110

114111
// overall this loop takes about 10% of time
115112
current++;
116113
feedback->setProgress( 0.10 * current * step );
117-
}
114+
return true;
115+
} );
116+
117+
QgsFeature f;
118118

119119
// start by assuming everything is unique, and chop away at this list
120120
QHash< QgsFeatureId, QgsGeometry > uniqueFeatures = geometries;

0 commit comments

Comments
 (0)
Please sign in to comment.