Fabric: The diffing algorithm does not use source nodes anymore

Summary:
@public
... and it's as efficient as it was before.

The previous version of the algorithm used `sourceNode` reference to know the previous state of the node to call the algorithm recursively.
That wasn't so good because of several reasons:
 - It was fragile because we had two different sources of the truth of the "previous state of the tree": committed tree and source node pointer;
 - We had to store weak pointers to source nodes inside cloned nodes. That is not free in terms of performance;
 - The old approach introduced a constraint that all previously used and now reinserted nodes must be cloned to update source node (otherwise, the algorithm would regenerate instructions recreating already existing subtrees);
 - That cloning required access to `isSealed` flag which is supposed to be a debug-only thing (that actually affects performance and must be compile-out for release builds).

The new approach compares nodes with same react tag and naturally cloning-artifacts resilient.

Yes, the new approach uses a map of inserted nodes, but the previous one already had it (otherwise there is no way to tell which nodes should be "deleted"). And anyway, this is a very little map that exists for a very little period of time.

Reviewed By: mdvacca

Differential Revision: D8709953

fbshipit-source-id: 027abb326cf45f00f7bb0bbd7c4e612578268c66
This commit is contained in:
Valentin Shergin 2018-07-06 14:34:40 -07:00 committed by Facebook Github Bot
parent e0e9c1549e
commit e8ec1cb16a

View File

@ -30,7 +30,7 @@ static void calculateMutationInstructions(
return;
}
std::unordered_set<Tag> insertedTags;
std::unordered_map<Tag, SharedShadowNode> insertedNodes;
int index = 0;
TreeMutationInstructionList createInstructions = {};
@ -43,8 +43,8 @@ static void calculateMutationInstructions(
// Stage 1: Collectings Updates
for (index = 0; index < oldChildNodes->size() && index < newChildNodes->size(); index++) {
SharedShadowNode oldChildNode = oldChildNodes->at(index);
SharedShadowNode newChildNode = newChildNodes->at(index);
const auto &oldChildNode = oldChildNodes->at(index);
const auto &newChildNode = newChildNodes->at(index);
if (oldChildNode->getTag() != newChildNode->getTag()) {
// Totally different nodes, updating is impossible.
@ -74,7 +74,7 @@ static void calculateMutationInstructions(
// Stage 2: Collectings Insertions
for (; index < newChildNodes->size(); index++) {
SharedShadowNode newChildNode = newChildNodes->at(index);
const auto &newChildNode = newChildNodes->at(index);
insertInstructions.push_back(
TreeMutationInstruction::Insert(
@ -84,23 +84,12 @@ static void calculateMutationInstructions(
)
);
insertedTags.insert(newChildNode->getTag());
SharedShadowNode newChildSourceNode = newChildNode->getSourceNode();
SharedShadowNodeSharedList newChildSourceChildNodes =
newChildSourceNode ? newChildSourceNode->getChildren() : ShadowNode::emptySharedShadowNodeSharedList();
calculateMutationInstructions(
*(newChildNode->getChildren()->size() ? &downwardInstructions : &destructionDownwardInstructions),
newChildNode,
newChildSourceChildNodes,
newChildNode->getChildren()
);
insertedNodes.insert({newChildNode->getTag(), newChildNode});
}
// Stage 3: Collectings Deletions and Removals
for (index = lastIndexAfterFirstStage; index < oldChildNodes->size(); index++) {
SharedShadowNode oldChildNode = oldChildNodes->at(index);
const auto &oldChildNode = oldChildNodes->at(index);
// Even if the old node was (re)inserted, we have to generate `remove`
// instruction.
@ -112,12 +101,11 @@ static void calculateMutationInstructions(
)
);
auto numberOfRemovedTags = insertedTags.erase(oldChildNode->getTag());
assert(numberOfRemovedTags == 0 || numberOfRemovedTags == 1);
const auto &it = insertedNodes.find(oldChildNode->getTag());
if (numberOfRemovedTags == 0) {
// The old node was *not* (re)inserted,
// so we have to generate `delete` instruction and apply the algorithm
if (it == insertedNodes.end()) {
// The old node was *not* (re)inserted.
// We have to generate `delete` instruction and apply the algorithm
// recursively.
deleteInstructions.push_back(
TreeMutationInstruction::Delete(
@ -125,19 +113,41 @@ static void calculateMutationInstructions(
)
);
// We also have to call the algorithm recursively to clean up the entire
// subtree starting from the removed node.
calculateMutationInstructions(
destructionDownwardInstructions,
oldChildNode,
oldChildNode->getChildren(),
ShadowNode::emptySharedShadowNodeSharedList()
);
} else {
// The old node *was* (re)inserted.
// We have to call the algorithm recursively if the inserted node
// is *not* the same as removed one.
const auto &newChildNode = it->second;
if (newChildNode != oldChildNode) {
calculateMutationInstructions(
*(newChildNode->getChildren()->size() ? &downwardInstructions : &destructionDownwardInstructions),
newChildNode,
oldChildNode->getChildren(),
newChildNode->getChildren()
);
}
// In any case we have to remove the node from `insertedNodes` as
// indication that the node was actually removed (which means that
// the node existed before), hence we don't have to generate
// `create` instruction.
insertedNodes.erase(it);
}
}
// Stage 4: Collectings Creations
for (index = lastIndexAfterFirstStage; index < newChildNodes->size(); index++) {
SharedShadowNode newChildNode = newChildNodes->at(index);
if (insertedTags.find(newChildNode->getTag()) == insertedTags.end()) {
const auto &newChildNode = newChildNodes->at(index);
if (insertedNodes.find(newChildNode->getTag()) == insertedNodes.end()) {
// The new node was (re)inserted, so there is no need to create it.
continue;
}
@ -147,6 +157,13 @@ static void calculateMutationInstructions(
newChildNode
)
);
calculateMutationInstructions(
downwardInstructions,
newChildNode,
ShadowNode::emptySharedShadowNodeSharedList(),
newChildNode->getChildren()
);
}
// All instructions in an optimal order: