From ea7e845bd357f7d9a576a344379eab8819f86a14 Mon Sep 17 00:00:00 2001 From: Luka Macan Date: Thu, 16 Oct 2025 17:02:42 +0200 Subject: [PATCH 1/5] Fix buffer aliasing --- .../MemoryAllocation.py | 2 +- Deeploy/DeeployTypes.py | 61 +++++-------------- Deeploy/Targets/Generic/Parsers.py | 34 ++--------- .../Generic/Templates/ReshapeTemplate.py | 13 ++-- 4 files changed, 29 insertions(+), 81 deletions(-) diff --git a/Deeploy/CommonExtensions/CodeTransformationPasses/MemoryAllocation.py b/Deeploy/CommonExtensions/CodeTransformationPasses/MemoryAllocation.py index b73fcafe3..f10d33350 100644 --- a/Deeploy/CommonExtensions/CodeTransformationPasses/MemoryAllocation.py +++ b/Deeploy/CommonExtensions/CodeTransformationPasses/MemoryAllocation.py @@ -141,7 +141,7 @@ def apply(self, assert buffer._live == True, f"Tried to deallocate already dead buffer {buffer.name}" buffer._live = False # Don't deallocate if it's an alias of a live buffer - if not buffer.has_live_ancestors(ctxt = ctxt): + if not buffer.has_live_aliases(ctxt): memoryLevel = "None" if not hasattr(buffer, "_memoryLevel") else buffer._memoryLevel if memoryLevel not in ctxt._dynamicSize: ctxt._dynamicSize[memoryLevel] = 0 diff --git a/Deeploy/DeeployTypes.py b/Deeploy/DeeployTypes.py index e6ca25c9b..c7b3555c0 100644 --- a/Deeploy/DeeployTypes.py +++ b/Deeploy/DeeployTypes.py @@ -238,7 +238,7 @@ class VariableBuffer(): allocTemplate: NodeTemplate #: NodeTemplate: Holds the buffer's allocation code deallocTemplate: NodeTemplate #: NodeTemplate: Holds the buffer's deallocation code - def __init__(self, name: str = '', shape = [1], alias_of: Optional[List[str]] = []): + def __init__(self, name: str = '', shape = [1], aliases: Optional[List[str]] = None): self.name: str = name #: str: Canonical name that this buffer is registered as in the NetworkContext self.shape: Sequence[ int] = shape #: Sequence[int]: Represents the dimensions of the underlying tensor as a sequence of dimension sizes @@ -257,7 +257,7 @@ def __init__(self, name: str = '', shape = [1], alias_of: Optional[List[str]] = self.is_input: bool = False self.is_output: bool = False - self.alias_of: List[str] = alias_of if alias_of is not None else [] + self.aliases: Set[str] = set(aliases) if aliases is not None else set() def _bufferRepresentation(self) -> Dict: return {"type": self._instance, "name": self.name, "size": int(np.prod(self.shape))} @@ -324,42 +324,7 @@ def __getstate__(self): def fromNode(cls, node: gs.Node): return (cls(name = node.name, shape = node.shape if not isinstance(node, gs.Constant) else node.values.shape)) - def add_aliases(self, aliases_to_add: List[str]): - """ - Adds list of aliases to the alias_of attribute. - Parameters - ---------- - alias_to_add : List[str] - List of names of aliases to add to the alias_of attribute. - Returns - ------- - None - """ - - if not hasattr(self, "alias_of"): - return None - - for alias in aliases_to_add: - if alias not in self.alias_of: - self.alias_of.append(alias) - - return None - - def get_aliases_of(self): - """ - Getter function for the alias_of attribute. - Returns - ------- - List[str] - List of names o all aliases of this VariableBuffer. - """ - - if hasattr(self, "alias_of"): - return self.alias_of - else: - return list() - - def has_live_ancestors(self, ctxt: NetworkContext) -> bool: + def has_live_aliases(self, ctxt: NetworkContext) -> bool: """Checks whether this VariableBuffer has any live ancestors, i.e. buffers that are still live and are aliased by this buffer. Parameters ---------- @@ -370,14 +335,18 @@ def has_live_ancestors(self, ctxt: NetworkContext) -> bool: bool True if this VariableBuffer has any live ancestors, False otherwise """ - if not hasattr(self, "alias_of"): - return False - - for alias in self.alias_of: - if ctxt.lookup(alias)._live: - return True - - return False + # Do a breadth-first search across the aliasing double-linked list + live = self._live + queue = set(self.aliases) + visited = set(self.name) + while len(queue) > 0: + next = queue.pop() + buffNext = ctxt.lookup(next) + assert isinstance(buffNext, VariableBuffer) + live |= buffNext._live + visited.add(next) + queue |= buffNext.aliases - visited + return live def sizeInBytes(self) -> int: """Returns the size of this VariableBuffer in bytes diff --git a/Deeploy/Targets/Generic/Parsers.py b/Deeploy/Targets/Generic/Parsers.py index adc48ffe1..7752834c5 100644 --- a/Deeploy/Targets/Generic/Parsers.py +++ b/Deeploy/Targets/Generic/Parsers.py @@ -1059,44 +1059,18 @@ def parseNodeCtxt(self, class ReshapeParser(NodeParser): - def __init__(self): - super().__init__() - def parseNode(self, node: gs.Node) -> (bool): - ret = all([len(node.inputs) == 2, len(node.outputs) == 1]) - return ret def parseNodeCtxt(self, ctxt: NetworkContext, node: gs.Node, channels_first: bool = True) -> Tuple[NetworkContext, bool]: - - # Define names of node inputs and outputs, according to the ONNX standard - inputs = ['data_in', 'shape'] - outputs = ['data_out'] - - # Map inputs and outputs to their corresponding names in the operator representation - for idx, inputNode in enumerate(node.inputs): - self.operatorRepresentation[inputs[idx]] = ctxt.lookup(inputNode.name).name - for idx, outputNode in enumerate(node.outputs): - self.operatorRepresentation[outputs[idx]] = ctxt.lookup(outputNode.name).name - - # Update alias_of parameter for the output node - output_node = ctxt.lookup(node.outputs[outputs.index("data_out")].name) - input_node = ctxt.lookup(node.inputs[inputs.index("data_in")].name) - - # Prepare new aliases - new_output_node_aliases = input_node.get_aliases_of() - new_output_node_aliases.append(input_node.name) - - # Add new aliases - output_node.add_aliases(aliases_to_add = new_output_node_aliases) - - # Compute data size - self.operatorRepresentation['size'] = np.prod(ctxt.lookup(node.inputs[0].name).shape) - + for tensor, symName in zip(node.inputs, ['data_in', 'shape']): + self.operatorRepresentation[symName] = ctxt.lookup(tensor.name).name + for tensor, symName in zip(node.outputs, ['data_out']): + self.operatorRepresentation[symName] = ctxt.lookup(tensor.name).name return ctxt, True diff --git a/Deeploy/Targets/Generic/Templates/ReshapeTemplate.py b/Deeploy/Targets/Generic/Templates/ReshapeTemplate.py index 1ba3d9965..15b7d64be 100644 --- a/Deeploy/Targets/Generic/Templates/ReshapeTemplate.py +++ b/Deeploy/Targets/Generic/Templates/ReshapeTemplate.py @@ -4,7 +4,7 @@ from typing import Dict, List, Tuple -from Deeploy.DeeployTypes import NetworkContext, NodeTemplate, OperatorRepresentation +from Deeploy.DeeployTypes import NetworkContext, NodeTemplate, OperatorRepresentation, VariableBuffer class _ReshapeTemplate(NodeTemplate): @@ -25,9 +25,14 @@ def alignToContext(self, ctxt: NetworkContext, ctxt.globalObjects[operatorRepresentation["shape"]]._deploy = False ctxt.globalObjects[operatorRepresentation["shape"]]._live = False - inBuffer = ctxt.lookup(operatorRepresentation['data_in']) - outBuffer = ctxt.lookup(operatorRepresentation['data_out']) - outBuffer._alias = inBuffer.name + bufferIn = ctxt.lookup(operatorRepresentation['data_in']) + assert isinstance(bufferIn, VariableBuffer) + bufferOut = ctxt.lookup(operatorRepresentation['data_out']) + assert isinstance(bufferOut, VariableBuffer) + + # Link aliases to each buffer + bufferIn.aliases.add(bufferOut.name) + bufferOut.aliases.add(bufferIn.name) return ctxt, operatorRepresentation, [] From 7d04efc9a697854b6ca589560c64f04de6452652 Mon Sep 17 00:00:00 2001 From: Luka Macan Date: Thu, 16 Oct 2025 23:25:26 +0200 Subject: [PATCH 2/5] Refactor hoistConstant --- Deeploy/DeeployTypes.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/Deeploy/DeeployTypes.py b/Deeploy/DeeployTypes.py index c7b3555c0..1f930525e 100644 --- a/Deeploy/DeeployTypes.py +++ b/Deeploy/DeeployTypes.py @@ -968,12 +968,15 @@ def hoistReference(self, ref._instance = ref._type(name, ctxt = self) return ref - def hoistConstant(self, node: gs.Node, name: str = '', _type: Optional[Type[Pointer]] = None) -> str: - """Register a ConstantBuffer extracted directly from a graphsurgeon Node + def hoistConstant(self, + constant: gs.Constant, + name: Optional[str] = None, + _type: Optional[Type[Pointer]] = None) -> str: + """Register a ConstantBuffer extracted directly from a graphsurgeon Constant Parameters ---------- - node : gs.Node + constant : gs.Constant graphsurgeon.Node containing a single constant output name : str Name of the ConstantBuffer to be registered @@ -986,21 +989,18 @@ def hoistConstant(self, node: gs.Node, name: str = '', _type: Optional[Type[Poin Returns the name of the newly registed ConstantBuffer """ + assert len(constant.outputs) <= 1, f"Constant {constant.name} has more than one output" - assert len(node.outputs) <= 1, f"Constant {node.name} has more than one output" - - if name == "": - name = node.name + name = name if name is not None else constant.name - # SCHEREMO: This is currently heuristic, but should be annotated in ONNX - localBuffer = self.VariableBuffer.fromNode(node = node) - globalBuffer = self.ConstantBuffer.fromVariableBuffer(localBuffer, values = node.values) - globalBuffer.name = name - globalBuffer._type = _type + # LMACAN: The shape needs to be copied into a tuple for pickling to work. Don't ask me why.. + buffer = self.ConstantBuffer(name, tuple(constant.shape), constant.values) + self.add(buffer, 'global') - self.add(globalBuffer, 'global') + if _type is not None: + self.annotateType(name, _type) - return globalBuffer.name + return name def addUser(self, name: str, node: gs.Node): """Adds an operator's name to the _user list of a VariableBuffer in the context From 633dcde7e5366453fd6b3354bd150898a1a728b3 Mon Sep 17 00:00:00 2001 From: Luka Macan Date: Thu, 16 Oct 2025 23:44:48 +0200 Subject: [PATCH 3/5] Refactor TransientBuffer __init__ to reuse VariableBuffer __init__ --- Deeploy/DeeployTypes.py | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/Deeploy/DeeployTypes.py b/Deeploy/DeeployTypes.py index 1f930525e..3a5be7698 100644 --- a/Deeploy/DeeployTypes.py +++ b/Deeploy/DeeployTypes.py @@ -367,28 +367,13 @@ class TransientBuffer(VariableBuffer): """ def __init__(self, name: str = '', size = 0): - self.name = name - self.size = size #: int: Total BYTE size of this TransientBuffer - - # Do not override - Should be written in the parsing passes - self._users = [] + super().__init__(name, shape = (size,)) # Do not override - Should be written in the parsing passes self._type: Type[Pointer] = PointerClass(VoidType) - - # Do not override - Should be written in the deployment passes - self._live = False - - # Do not override - Set in Templates depending on platform - self._deploy = True - - self.is_input: bool = False - self.is_output: bool = False - - self.alias_of: List[str] = [] + self.size = size def __eq__(self, other): - ret = all([self.name == other.name, self.size == other.size]) return ret From 806ebc6d1f5aa944870afb18490dd2d9b3083e5f Mon Sep 17 00:00:00 2001 From: Luka Macan Date: Thu, 16 Oct 2025 23:45:24 +0200 Subject: [PATCH 4/5] Remove fromVariableBuffer --- Deeploy/DeeployTypes.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/Deeploy/DeeployTypes.py b/Deeploy/DeeployTypes.py index 3a5be7698..936931c2e 100644 --- a/Deeploy/DeeployTypes.py +++ b/Deeploy/DeeployTypes.py @@ -386,10 +386,6 @@ def __str__(self) -> str: def __repr__(self) -> str: return f'TransientBuffer: name: {self.name}, size: {self.size}' - @classmethod - def fromVariableBuffer(cls, buffer: VariableBuffer): - ret = cls(name = buffer.name, size = np.prod(buffer.shape) * buffer._type.typeWidth // 8) - def sizeInBytes(self) -> int: return int(self.size) @@ -433,12 +429,6 @@ def __repr__(self) -> str: def _bufferRepresentation(self) -> Dict: return {"type": self._type, "name": self.name, "size": int(np.prod(self.shape)), "values": self._valueString()} - @classmethod - def fromVariableBuffer(cls, buffer: VariableBuffer, values): - ret = cls(name = buffer.name, shape = buffer.shape, values = values) - - return ret - class StructBuffer(VariableBuffer): """Class to represent Struct object needed by the generated C Code From bbc544bf8f0ca4ddef711831f680d9ce2d53c7f8 Mon Sep 17 00:00:00 2001 From: Luka Macan Date: Fri, 17 Oct 2025 08:14:48 +0200 Subject: [PATCH 5/5] Update changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 158138ccf..9c82f50bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ This file contains the changelog for the Deeploy project. The changelog is divid ## Unreleased (Planned Release Target: v0.2.1) ### List of Pull Requests +- Fix aliasing [#125](https://github.com/pulp-platform/Deeploy/pull/125) - Support for 1D Autoencoder [#98](https://github.com/pulp-platform/Deeploy/pull/98) - Refactor Logging for Improved Debugging [#115](https://github.com/pulp-platform/Deeploy/pull/115) - Add reuse-tool as an SPDX license header linter [#113](https://github.com/pulp-platform/Deeploy/pull/113) @@ -74,6 +75,9 @@ This file contains the changelog for the Deeploy project. The changelog is divid - Changed types and added correct casts to fix many compiler warnings in the PULP target library - Use [reuse-tool](https://github.com/fsfe/reuse-tool) in pre-commit, CI, and Makefile for SPDX license header linting - Deployer workflow now uses `prepare(...)` instead of `generateFunction(...)`. +- Removed `fromVariableBuffer` +- Refactored `hoistConstant` +- Refactored TransientBuffer's `__init__` ### Fixed - Prevent node duplication for graphs generated via GraphSurgeon @@ -84,6 +88,7 @@ This file contains the changelog for the Deeploy project. The changelog is divid - Corrected method usage in `importDeeployState` to call `NetworkContext.importNetworkContext` instead of the incorrect method name - Correctly return `signProp` from `setupDeployer` instead of hardcoding the value to `False` in `testMVP.py` - Fixed `Unsqueeze` Op. when using ONNX opset 13 or higher (from attribute to input) +- Fixed aliasing ### Removed - Delete outdated and unused `.gitlab-ci.yml` file