Skip to content

Commit

Permalink
USDLayerWriter : Try out parallelReduceLocations
Browse files Browse the repository at this point in the history
  • Loading branch information
danieldresser-ie committed Feb 8, 2025
1 parent 7fb3f9f commit c635c14
Showing 1 changed file with 79 additions and 105 deletions.
184 changes: 79 additions & 105 deletions src/GafferUSD/USDLayerWriter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ struct Filters
PathMatcher prune;
PathMatcher deleteObject;
PathMatcher deleteAttributes;
ScenePlug::ScenePath localPath;

// Merges in filters from sibling locations.
void add( const Filters &other )
Expand All @@ -94,12 +95,12 @@ struct Filters
}

// Merges in filters from child locations.
void addWithPrefix( const Filters &other, const ScenePlug::ScenePath &prefix )
void addWithPrefix( const Filters &other )
{
fullyPruned = fullyPruned && other.fullyPruned;
prune.addPaths( other.prune, prefix );
deleteAttributes.addPaths( other.deleteAttributes, prefix );
deleteObject.addPaths( other.deleteObject, prefix );
prune.addPaths( other.prune, other.localPath );
deleteAttributes.addPaths( other.deleteAttributes, other.localPath );
deleteObject.addPaths( other.deleteObject, other.localPath );
}

};
Expand All @@ -109,116 +110,90 @@ struct Filters
// - Objects with identical hashes will be included in `Filter::deleteObject`.
// - Attributes with identical hashes will be included in `Filter::deleteAttributes`.
// - Subtrees which are identical in all properties will be included in `Filter::prune`.
//
/// \todo The core logic of this could be lifted into a `SceneAlgo::parallelReduceLocations()`
/// and reused elsewhere. The recursive way we're building PathMatchers by prefixing with the
/// parent as unwind might outperform other approaches we're using elsewhere.
Filters filtersWalk( const GafferScene::ScenePlug *baseScene, const GafferScene::ScenePlug *layerScene, const std::vector<float> &frames, const ScenePlug::ScenePath &path, const Gaffer::ThreadState &threadState, tbb::task_group_context &taskGroupContext )
Filters buildFilters( const GafferScene::ScenePlug *baseScene, const GafferScene::ScenePlug *layerScene, const std::vector<float> &frames, const ScenePlug::ScenePath &path )
{
ScenePlug::PathScope pathScope( threadState, &path );

bool attributesMatch = true;
bool objectsMatch = true;
bool canPrune = true;

for( auto frame : frames )
{
pathScope.setFrame( frame );

if( !layerScene->existsPlug()->getValue() )
{
return { false, g_emptyPathMatcher, g_emptyPathMatcher, g_emptyPathMatcher };
}

if( attributesMatch )
{
attributesMatch = baseScene->attributesPlug()->hash() == layerScene->attributesPlug()->hash();
canPrune = canPrune && attributesMatch;
}

if( objectsMatch )
{
objectsMatch = baseScene->objectPlug()->hash() == layerScene->objectPlug()->hash();
canPrune = canPrune && objectsMatch;
}

if( canPrune )
{
canPrune = baseScene->transformPlug()->hash() == layerScene->transformPlug()->hash();
}

if( canPrune )
{
canPrune = baseScene->boundPlug()->hash() == layerScene->boundPlug()->hash();
}
}

ScenePlug::ScenePath localPath;
if( path.size() )
{
// We only need the last part, because we prefix with
// the parent path as we unwind the recursion.
localPath.push_back( path.back() );
}

Filters result;
result.fullyPruned = canPrune;
if( attributesMatch )
{
result.deleteAttributes.addPath( localPath );
}
if( objectsMatch )
{
result.deleteObject.addPath( localPath );
}

IECore::ConstInternedStringVectorDataPtr baseChildNamesData = baseScene->childNamesPlug()->getValue();
const vector<InternedString> &baseChildNames = baseChildNamesData->readable();
if( !baseChildNames.empty() )
{
using ChildNameRange = tbb::blocked_range<std::vector<IECore::InternedString>::const_iterator>;
const ChildNameRange loopRange( baseChildNames.begin(), baseChildNames.end() );

Filters childFilters = tbb::parallel_reduce(

loopRange,

Filters(),

[&] ( const ChildNameRange &range, Filters x ) {
Context::EditableScope childNamesScope( Context::current() );
// TODO : This doesn't seem right, but I'm trying to match the behaviour of the previous implementation,
// where it looks like after evaluating the last frame, a scope was left open holding the last frame in
// in the shutter.
childNamesScope.setFrame( frames.back() );

return GafferScene::SceneAlgo::parallelReduceLocations(
baseScene,
Filters(),
[&] ( const ScenePlug *scene, const ScenePlug::ScenePath &path ) -> Filters
{
bool attributesMatch = true;
bool objectsMatch = true;
bool canPrune = true;

Context::EditableScope scope( Context::current() );
for( auto frame : frames )
{
scope.setFrame( frame );

ScenePlug::ScenePath childPath = path;
childPath.push_back( InternedString() );
for( const auto &childName : range )
if( !layerScene->existsPlug()->getValue() )
{
childPath.back() = childName;
const Filters childFilters = filtersWalk( baseScene, layerScene, frames, childPath, threadState, taskGroupContext );
x.add( childFilters );
return { false, g_emptyPathMatcher, g_emptyPathMatcher, g_emptyPathMatcher, ScenePlug::ScenePath() };
}
return x;
},

[] ( Filters x, const Filters &y ) {

x.add( y );
return x;

},
if( attributesMatch )
{
attributesMatch = baseScene->attributesPlug()->hash() == layerScene->attributesPlug()->hash();
canPrune = canPrune && attributesMatch;
}

taskGroupContext
if( objectsMatch )
{
objectsMatch = baseScene->objectPlug()->hash() == layerScene->objectPlug()->hash();
canPrune = canPrune && objectsMatch;
}

if( canPrune )
{
canPrune = baseScene->transformPlug()->hash() == layerScene->transformPlug()->hash();
}

);
if( canPrune )
{
canPrune = baseScene->boundPlug()->hash() == layerScene->boundPlug()->hash();
}
}

result.addWithPrefix( childFilters, localPath );
}
Filters result;
if( path.size() )
{
// We only need the last part, because we prefix with
// the parent path as we unwind the recursion.
result.localPath.push_back( path.back() );
}

if( result.fullyPruned )
{
result.prune.addPath( localPath );
}
result.fullyPruned = canPrune;
if( attributesMatch )
{
result.deleteAttributes.addPath( result.localPath );
}
if( objectsMatch )
{
result.deleteObject.addPath( result.localPath );
}

return result;
return result;
},
[]( Filters &result, const Filters &childrenResult )
{
result.addWithPrefix( childrenResult );
if( result.fullyPruned )
{
result.prune.addPath( result.localPath );
}
},
[]( Filters &result, const Filters &sibling )
{
result.add( sibling );
},
path
);
}

class ScopedDirectory : boost::noncopyable
Expand Down Expand Up @@ -547,8 +522,7 @@ void USDLayerWriter::executeSequence( const std::vector<float> &frames ) const

// Figure out the filters for our Prune, DeleteObject and DeleteAttribute
// nodes.
tbb::task_group_context taskGroupContext( tbb::task_group_context::isolated ); // Prevents outer tasks silently cancelling our tasks
const Filters filters = filtersWalk( basePlug(), layerPlug(), frames, ScenePlug::ScenePath(), ThreadState::current(), taskGroupContext );
const Filters filters = buildFilters( basePlug(), layerPlug(), frames, ScenePlug::ScenePath() );

// Pass the filter settings via context variables since we can't call
// `Plug::setValue()` from `executeSequence()` because it would violate
Expand Down

0 comments on commit c635c14

Please sign in to comment.