Skip to content

Commit

Permalink
[compiler] Renames and no-op refactor for next PR
Browse files Browse the repository at this point in the history
Rename for clarity:
- `CollectHoistablePropertyLoads:Tree` -> `CollectHoistablePropertyLoads:PropertyPathRegistry`
    - `getPropertyLoadNode` -> `getOrCreateProperty`
    - `getOrCreateRoot` -> `getOrCreateIdentifier`
- `PropertyLoadNode` -> `PropertyPathNode`

Refactor to CFG joining logic for `CollectHoistablePropertyLoads`. We now write to the same set of inferredNonNullObjects when traversing from entry and exit blocks. This is more correct, as non-nulls inferred from a forward traversal should be included when computing the backward traversal (and vice versa). This fix is needed by an edge case in #31036

Added invariant into fixed-point iteration to terminate (instead of infinite looping).

ghstack-source-id: 1e8eb2d566b649ede93de9a9c13dad09b96416a5
Pull Request resolved: #31036
  • Loading branch information
mofeiZ committed Sep 30, 2024
1 parent 2cbea24 commit c67e241
Show file tree
Hide file tree
Showing 3 changed files with 95 additions and 108 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import {Set_intersect, Set_union, getOrInsertDefault} from '../Utils/utils';
import {
BasicBlock,
BlockId,
DependencyPathEntry,
GeneratedSource,
HIRFunction,
Identifier,
Expand Down Expand Up @@ -66,7 +67,9 @@ export function collectHoistablePropertyLoads(
fn: HIRFunction,
temporaries: ReadonlyMap<IdentifierId, ReactiveScopeDependency>,
): ReadonlyMap<ScopeId, BlockInfo> {
const nodes = collectNonNullsInBlocks(fn, temporaries);
const registry = new PropertyPathRegistry();

const nodes = collectNonNullsInBlocks(fn, temporaries, registry);
propagateNonNull(fn, nodes);

const nodesKeyedByScopeId = new Map<ScopeId, BlockInfo>();
Expand All @@ -84,33 +87,33 @@ export function collectHoistablePropertyLoads(

export type BlockInfo = {
block: BasicBlock;
assumedNonNullObjects: ReadonlySet<PropertyLoadNode>;
assumedNonNullObjects: ReadonlySet<PropertyPathNode>;
};

/**
* Tree data structure to dedupe property loads (e.g. a.b.c)
* PropertyLoadRegistry data structure to dedupe property loads (e.g. a.b.c)
* and make computing sets intersections simpler.
*/
type RootNode = {
properties: Map<string, PropertyLoadNode>;
properties: Map<string, PropertyPathNode>;
parent: null;
// Recorded to make later computations simpler
fullPath: ReactiveScopeDependency;
root: IdentifierId;
};

type PropertyLoadNode =
type PropertyPathNode =
| {
properties: Map<string, PropertyLoadNode>;
parent: PropertyLoadNode;
properties: Map<string, PropertyPathNode>;
parent: PropertyPathNode;
fullPath: ReactiveScopeDependency;
}
| RootNode;

class Tree {
class PropertyPathRegistry {
roots: Map<IdentifierId, RootNode> = new Map();

getOrCreateRoot(identifier: Identifier): PropertyLoadNode {
getOrCreateIdentifier(identifier: Identifier): PropertyPathNode {
/**
* Reads from a statically scoped variable are always safe in JS,
* with the exception of TDZ (not addressed by this pass).
Expand All @@ -132,49 +135,61 @@ class Tree {
return rootNode;
}

static #getOrCreateProperty(
node: PropertyLoadNode,
property: string,
): PropertyLoadNode {
let child = node.properties.get(property);
static getOrCreatePropertyEntry(
parent: PropertyPathNode,
entry: DependencyPathEntry,
): PropertyPathNode {
if (entry.optional) {
CompilerError.throwTodo({
reason: 'handle optional nodes',
loc: GeneratedSource,
});
}
let child = parent.properties.get(entry.property);
if (child == null) {
child = {
properties: new Map(),
parent: node,
parent: parent,
fullPath: {
identifier: node.fullPath.identifier,
path: node.fullPath.path.concat([{property, optional: false}]),
identifier: parent.fullPath.identifier,
path: parent.fullPath.path.concat(entry),
},
};
node.properties.set(property, child);
parent.properties.set(entry.property, child);
}
return child;
}

getPropertyLoadNode(n: ReactiveScopeDependency): PropertyLoadNode {
getOrCreateProperty(n: ReactiveScopeDependency): PropertyPathNode {
/**
* We add ReactiveScopeDependencies according to instruction ordering,
* so all subpaths of a PropertyLoad should already exist
* (e.g. a.b is added before a.b.c),
*/
let currNode = this.getOrCreateRoot(n.identifier);
let currNode = this.getOrCreateIdentifier(n.identifier);
if (n.path.length === 0) {
return currNode;
}
for (let i = 0; i < n.path.length - 1; i++) {
currNode = assertNonNull(currNode.properties.get(n.path[i].property));
currNode = PropertyPathRegistry.getOrCreatePropertyEntry(
currNode,
n.path[i],
);
}

return Tree.#getOrCreateProperty(currNode, n.path.at(-1)!.property);
return PropertyPathRegistry.getOrCreatePropertyEntry(
currNode,
n.path.at(-1)!,
);
}
}

function pushPropertyLoadNode(
loadSource: Identifier,
loadSourceNode: PropertyLoadNode,
function addNonNullPropertyPath(
source: Identifier,
sourceNode: PropertyPathNode,
instrId: InstructionId,
knownImmutableIdentifiers: Set<IdentifierId>,
result: Set<PropertyLoadNode>,
result: Set<PropertyPathNode>,
): void {
/**
* Since this runs *after* buildReactiveScopeTerminals, identifier mutable ranges
Expand All @@ -187,26 +202,22 @@ function pushPropertyLoadNode(
* See comment at top of function for why we track known immutable identifiers.
*/
const isMutableAtInstr =
loadSource.mutableRange.end > loadSource.mutableRange.start + 1 &&
loadSource.scope != null &&
inRange({id: instrId}, loadSource.scope.range);
source.mutableRange.end > source.mutableRange.start + 1 &&
source.scope != null &&
inRange({id: instrId}, source.scope.range);
if (
!isMutableAtInstr ||
knownImmutableIdentifiers.has(loadSourceNode.fullPath.identifier.id)
knownImmutableIdentifiers.has(sourceNode.fullPath.identifier.id)
) {
let curr: PropertyLoadNode | null = loadSourceNode;
while (curr != null) {
result.add(curr);
curr = curr.parent;
}
result.add(sourceNode);
}
}

function collectNonNullsInBlocks(
fn: HIRFunction,
temporaries: ReadonlyMap<IdentifierId, ReactiveScopeDependency>,
registry: PropertyPathRegistry,
): ReadonlyMap<BlockId, BlockInfo> {
const tree = new Tree();
/**
* Due to current limitations of mutable range inference, there are edge cases in
* which we infer known-immutable values (e.g. props or hook params) to have a
Expand All @@ -227,18 +238,18 @@ function collectNonNullsInBlocks(
* Known non-null objects such as functional component props can be safely
* read from any block.
*/
const knownNonNullIdentifiers = new Set<PropertyLoadNode>();
const knownNonNullIdentifiers = new Set<PropertyPathNode>();
if (
fn.fnType === 'Component' &&
fn.params.length > 0 &&
fn.params[0].kind === 'Identifier'
) {
const identifier = fn.params[0].identifier;
knownNonNullIdentifiers.add(tree.getOrCreateRoot(identifier));
knownNonNullIdentifiers.add(registry.getOrCreateIdentifier(identifier));
}
const nodes = new Map<BlockId, BlockInfo>();
for (const [_, block] of fn.body.blocks) {
const assumedNonNullObjects = new Set<PropertyLoadNode>(
const assumedNonNullObjects = new Set<PropertyPathNode>(
knownNonNullIdentifiers,
);
for (const instr of block.instructions) {
Expand All @@ -247,9 +258,9 @@ function collectNonNullsInBlocks(
identifier: instr.value.object.identifier,
path: [],
};
pushPropertyLoadNode(
addNonNullPropertyPath(
instr.value.object.identifier,
tree.getPropertyLoadNode(source),
registry.getOrCreateProperty(source),
instr.id,
knownImmutableIdentifiers,
assumedNonNullObjects,
Expand All @@ -258,9 +269,9 @@ function collectNonNullsInBlocks(
const source = instr.value.value.identifier.id;
const sourceNode = temporaries.get(source);
if (sourceNode != null) {
pushPropertyLoadNode(
addNonNullPropertyPath(
instr.value.value.identifier,
tree.getPropertyLoadNode(sourceNode),
registry.getOrCreateProperty(sourceNode),
instr.id,
knownImmutableIdentifiers,
assumedNonNullObjects,
Expand All @@ -270,9 +281,9 @@ function collectNonNullsInBlocks(
const source = instr.value.object.identifier.id;
const sourceNode = temporaries.get(source);
if (sourceNode != null) {
pushPropertyLoadNode(
addNonNullPropertyPath(
instr.value.object.identifier,
tree.getPropertyLoadNode(sourceNode),
registry.getOrCreateProperty(sourceNode),
instr.id,
knownImmutableIdentifiers,
assumedNonNullObjects,
Expand Down Expand Up @@ -314,7 +325,6 @@ function propagateNonNull(
nodeId: BlockId,
direction: 'forward' | 'backward',
traversalState: Map<BlockId, 'active' | 'done'>,
nonNullObjectsByBlock: Map<BlockId, ReadonlySet<PropertyLoadNode>>,
): boolean {
/**
* Avoid re-visiting computed or currently active nodes, which can
Expand Down Expand Up @@ -345,7 +355,6 @@ function propagateNonNull(
pred,
direction,
traversalState,
nonNullObjectsByBlock,
);
changed ||= neighborChanged;
}
Expand Down Expand Up @@ -374,38 +383,36 @@ function propagateNonNull(
const neighborAccesses = Set_intersect(
Array.from(neighbors)
.filter(n => traversalState.get(n) === 'done')
.map(n => assertNonNull(nonNullObjectsByBlock.get(n))),
.map(n => assertNonNull(nodes.get(n)).assumedNonNullObjects),
);

const prevObjects = assertNonNull(nonNullObjectsByBlock.get(nodeId));
const newObjects = Set_union(prevObjects, neighborAccesses);
const prevObjects = assertNonNull(nodes.get(nodeId)).assumedNonNullObjects;
const mergedObjects = Set_union(prevObjects, neighborAccesses);

nonNullObjectsByBlock.set(nodeId, newObjects);
assertNonNull(nodes.get(nodeId)).assumedNonNullObjects = mergedObjects;
traversalState.set(nodeId, 'done');
changed ||= prevObjects.size !== newObjects.size;
changed ||= prevObjects.size !== mergedObjects.size;
return changed;
}
const fromEntry = new Map<BlockId, ReadonlySet<PropertyLoadNode>>();
const fromExit = new Map<BlockId, ReadonlySet<PropertyLoadNode>>();
for (const [blockId, blockInfo] of nodes) {
fromEntry.set(blockId, blockInfo.assumedNonNullObjects);
fromExit.set(blockId, blockInfo.assumedNonNullObjects);
}
const traversalState = new Map<BlockId, 'done' | 'active'>();
const reversedBlocks = [...fn.body.blocks];
reversedBlocks.reverse();

let i = 0;
let changed;
let i = 0;
do {
i++;
CompilerError.invariant(i++ < 100, {
reason:
'[CollectHoistablePropertyLoads] fixed point iteration did not terminate after 100 loops',
loc: GeneratedSource,
});

changed = false;
for (const [blockId] of fn.body.blocks) {
const forwardChanged = recursivelyPropagateNonNull(
blockId,
'forward',
traversalState,
fromEntry,
);
changed ||= forwardChanged;
}
Expand All @@ -415,43 +422,14 @@ function propagateNonNull(
blockId,
'backward',
traversalState,
fromExit,
);
changed ||= backwardChanged;
}
traversalState.clear();
} while (changed);

/**
* TODO: validate against meta internal code, then remove in future PR.
* Currently cannot come up with a case that requires fixed-point iteration.
*/
CompilerError.invariant(i <= 2, {
reason: 'require fixed-point iteration',
description: `#iterations = ${i}`,
loc: GeneratedSource,
});

CompilerError.invariant(
fromEntry.size === fromExit.size && fromEntry.size === nodes.size,
{
reason:
'bad sizes after calculating fromEntry + fromExit ' +
`${fromEntry.size} ${fromExit.size} ${nodes.size}`,
loc: GeneratedSource,
},
);

for (const [id, node] of nodes) {
const assumedNonNullObjects = Set_union(
assertNonNull(fromEntry.get(id)),
assertNonNull(fromExit.get(id)),
);
node.assumedNonNullObjects = assumedNonNullObjects;
}
}

function assertNonNull<T extends NonNullable<U>, U>(
export function assertNonNull<T extends NonNullable<U>, U>(
value: T | null | undefined,
source?: string,
): T {
Expand Down
Loading

0 comments on commit c67e241

Please sign in to comment.