Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ This file contains the changelog for the Deeploy project. The changelog is divid
## Unreleased (Planned Release Target: v0.2.1)

### List of Pull Requests
- Fix aliasing [#125](https://github.com/pulp-platform/Deeploy/pull/125)
- Support for 1D Autoencoder [#98](https://github.com/pulp-platform/Deeploy/pull/98)
- Refactor Logging for Improved Debugging [#115](https://github.com/pulp-platform/Deeploy/pull/115)
- Add reuse-tool as an SPDX license header linter [#113](https://github.com/pulp-platform/Deeploy/pull/113)
Expand Down Expand Up @@ -74,6 +75,9 @@ This file contains the changelog for the Deeploy project. The changelog is divid
- Changed types and added correct casts to fix many compiler warnings in the PULP target library
- Use [reuse-tool](https://github.com/fsfe/reuse-tool) in pre-commit, CI, and Makefile for SPDX license header linting
- Deployer workflow now uses `prepare(...)` instead of `generateFunction(...)`.
- Removed `fromVariableBuffer`
- Refactored `hoistConstant`
- Refactored TransientBuffer's `__init__`

### Fixed
- Prevent node duplication for graphs generated via GraphSurgeon
Expand All @@ -84,6 +88,7 @@ This file contains the changelog for the Deeploy project. The changelog is divid
- Corrected method usage in `importDeeployState` to call `NetworkContext.importNetworkContext` instead of the incorrect method name
- Correctly return `signProp` from `setupDeployer` instead of hardcoding the value to `False` in `testMVP.py`
- Fixed `Unsqueeze` Op. when using ONNX opset 13 or higher (from attribute to input)
- Fixed aliasing

### Removed
- Delete outdated and unused `.gitlab-ci.yml` file
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def apply(self,
assert buffer._live == True, f"Tried to deallocate already dead buffer {buffer.name}"
buffer._live = False
# Don't deallocate if it's an alias of a live buffer
if not buffer.has_live_ancestors(ctxt = ctxt):
if not buffer.has_live_aliases(ctxt):
memoryLevel = "None" if not hasattr(buffer, "_memoryLevel") else buffer._memoryLevel
if memoryLevel not in ctxt._dynamicSize:
ctxt._dynamicSize[memoryLevel] = 0
Expand Down
118 changes: 31 additions & 87 deletions Deeploy/DeeployTypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ class VariableBuffer():
allocTemplate: NodeTemplate #: NodeTemplate: Holds the buffer's allocation code
deallocTemplate: NodeTemplate #: NodeTemplate: Holds the buffer's deallocation code

def __init__(self, name: str = '', shape = [1], alias_of: Optional[List[str]] = []):
def __init__(self, name: str = '', shape = [1], aliases: Optional[List[str]] = None):
self.name: str = name #: str: Canonical name that this buffer is registered as in the NetworkContext
self.shape: Sequence[
int] = shape #: Sequence[int]: Represents the dimensions of the underlying tensor as a sequence of dimension sizes
Expand All @@ -257,7 +257,7 @@ def __init__(self, name: str = '', shape = [1], alias_of: Optional[List[str]] =
self.is_input: bool = False
self.is_output: bool = False

self.alias_of: List[str] = alias_of if alias_of is not None else []
self.aliases: Set[str] = set(aliases) if aliases is not None else set()

def _bufferRepresentation(self) -> Dict:
return {"type": self._instance, "name": self.name, "size": int(np.prod(self.shape))}
Expand Down Expand Up @@ -324,42 +324,7 @@ def __getstate__(self):
def fromNode(cls, node: gs.Node):
return (cls(name = node.name, shape = node.shape if not isinstance(node, gs.Constant) else node.values.shape))

def add_aliases(self, aliases_to_add: List[str]):
"""
Adds list of aliases to the alias_of attribute.
Parameters
----------
alias_to_add : List[str]
List of names of aliases to add to the alias_of attribute.
Returns
-------
None
"""

if not hasattr(self, "alias_of"):
return None

for alias in aliases_to_add:
if alias not in self.alias_of:
self.alias_of.append(alias)

return None

def get_aliases_of(self):
"""
Getter function for the alias_of attribute.
Returns
-------
List[str]
List of names o all aliases of this VariableBuffer.
"""

if hasattr(self, "alias_of"):
return self.alias_of
else:
return list()

def has_live_ancestors(self, ctxt: NetworkContext) -> bool:
def has_live_aliases(self, ctxt: NetworkContext) -> bool:
"""Checks whether this VariableBuffer has any live ancestors, i.e. buffers that are still live and are aliased by this buffer.
Parameters
----------
Expand All @@ -370,14 +335,18 @@ def has_live_ancestors(self, ctxt: NetworkContext) -> bool:
bool
True if this VariableBuffer has any live ancestors, False otherwise
"""
if not hasattr(self, "alias_of"):
return False

for alias in self.alias_of:
if ctxt.lookup(alias)._live:
return True

return False
# Do a breadth-first search across the aliasing double-linked list
live = self._live
queue = set(self.aliases)
visited = set(self.name)
while len(queue) > 0:
next = queue.pop()
buffNext = ctxt.lookup(next)
assert isinstance(buffNext, VariableBuffer)
live |= buffNext._live
visited.add(next)
queue |= buffNext.aliases - visited
return live

def sizeInBytes(self) -> int:
"""Returns the size of this VariableBuffer in bytes
Expand All @@ -398,28 +367,13 @@ class TransientBuffer(VariableBuffer):
"""

def __init__(self, name: str = '', size = 0):
self.name = name
self.size = size #: int: Total BYTE size of this TransientBuffer

# Do not override - Should be written in the parsing passes
self._users = []
super().__init__(name, shape = (size,))

# Do not override - Should be written in the parsing passes
self._type: Type[Pointer] = PointerClass(VoidType)

# Do not override - Should be written in the deployment passes
self._live = False

# Do not override - Set in Templates depending on platform
self._deploy = True

self.is_input: bool = False
self.is_output: bool = False

self.alias_of: List[str] = []
self.size = size

def __eq__(self, other):

ret = all([self.name == other.name, self.size == other.size])
return ret

Expand All @@ -432,10 +386,6 @@ def __str__(self) -> str:
def __repr__(self) -> str:
return f'TransientBuffer: name: {self.name}, size: {self.size}'

@classmethod
def fromVariableBuffer(cls, buffer: VariableBuffer):
ret = cls(name = buffer.name, size = np.prod(buffer.shape) * buffer._type.typeWidth // 8)

def sizeInBytes(self) -> int:
return int(self.size)

Expand Down Expand Up @@ -479,12 +429,6 @@ def __repr__(self) -> str:
def _bufferRepresentation(self) -> Dict:
return {"type": self._type, "name": self.name, "size": int(np.prod(self.shape)), "values": self._valueString()}

@classmethod
def fromVariableBuffer(cls, buffer: VariableBuffer, values):
ret = cls(name = buffer.name, shape = buffer.shape, values = values)

return ret


class StructBuffer(VariableBuffer):
"""Class to represent Struct object needed by the generated C Code
Expand Down Expand Up @@ -999,12 +943,15 @@ def hoistReference(self,
ref._instance = ref._type(name, ctxt = self)
return ref

def hoistConstant(self, node: gs.Node, name: str = '', _type: Optional[Type[Pointer]] = None) -> str:
"""Register a ConstantBuffer extracted directly from a graphsurgeon Node
def hoistConstant(self,
constant: gs.Constant,
name: Optional[str] = None,
_type: Optional[Type[Pointer]] = None) -> str:
"""Register a ConstantBuffer extracted directly from a graphsurgeon Constant

Parameters
----------
node : gs.Node
constant : gs.Constant
graphsurgeon.Node containing a single constant output
name : str
Name of the ConstantBuffer to be registered
Expand All @@ -1017,21 +964,18 @@ def hoistConstant(self, node: gs.Node, name: str = '', _type: Optional[Type[Poin
Returns the name of the newly registed ConstantBuffer

"""
assert len(constant.outputs) <= 1, f"Constant {constant.name} has more than one output"

assert len(node.outputs) <= 1, f"Constant {node.name} has more than one output"
name = name if name is not None else constant.name

if name == "":
name = node.name
# LMACAN: The shape needs to be copied into a tuple for pickling to work. Don't ask me why..
buffer = self.ConstantBuffer(name, tuple(constant.shape), constant.values)
self.add(buffer, 'global')

# SCHEREMO: This is currently heuristic, but should be annotated in ONNX
localBuffer = self.VariableBuffer.fromNode(node = node)
globalBuffer = self.ConstantBuffer.fromVariableBuffer(localBuffer, values = node.values)
globalBuffer.name = name
globalBuffer._type = _type
if _type is not None:
self.annotateType(name, _type)

self.add(globalBuffer, 'global')

return globalBuffer.name
return name

def addUser(self, name: str, node: gs.Node):
"""Adds an operator's name to the _user list of a VariableBuffer in the context
Expand Down
34 changes: 4 additions & 30 deletions Deeploy/Targets/Generic/Parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1059,44 +1059,18 @@ def parseNodeCtxt(self,

class ReshapeParser(NodeParser):

def __init__(self):
super().__init__()

def parseNode(self, node: gs.Node) -> (bool):

ret = all([len(node.inputs) == 2, len(node.outputs) == 1])

return ret

def parseNodeCtxt(self,
ctxt: NetworkContext,
node: gs.Node,
channels_first: bool = True) -> Tuple[NetworkContext, bool]:

# Define names of node inputs and outputs, according to the ONNX standard
inputs = ['data_in', 'shape']
outputs = ['data_out']

# Map inputs and outputs to their corresponding names in the operator representation
for idx, inputNode in enumerate(node.inputs):
self.operatorRepresentation[inputs[idx]] = ctxt.lookup(inputNode.name).name
for idx, outputNode in enumerate(node.outputs):
self.operatorRepresentation[outputs[idx]] = ctxt.lookup(outputNode.name).name

# Update alias_of parameter for the output node
output_node = ctxt.lookup(node.outputs[outputs.index("data_out")].name)
input_node = ctxt.lookup(node.inputs[inputs.index("data_in")].name)

# Prepare new aliases
new_output_node_aliases = input_node.get_aliases_of()
new_output_node_aliases.append(input_node.name)

# Add new aliases
output_node.add_aliases(aliases_to_add = new_output_node_aliases)

# Compute data size
self.operatorRepresentation['size'] = np.prod(ctxt.lookup(node.inputs[0].name).shape)

for tensor, symName in zip(node.inputs, ['data_in', 'shape']):
self.operatorRepresentation[symName] = ctxt.lookup(tensor.name).name
for tensor, symName in zip(node.outputs, ['data_out']):
self.operatorRepresentation[symName] = ctxt.lookup(tensor.name).name
return ctxt, True


Expand Down
13 changes: 9 additions & 4 deletions Deeploy/Targets/Generic/Templates/ReshapeTemplate.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from typing import Dict, List, Tuple

from Deeploy.DeeployTypes import NetworkContext, NodeTemplate, OperatorRepresentation
from Deeploy.DeeployTypes import NetworkContext, NodeTemplate, OperatorRepresentation, VariableBuffer


class _ReshapeTemplate(NodeTemplate):
Expand All @@ -25,9 +25,14 @@ def alignToContext(self, ctxt: NetworkContext,
ctxt.globalObjects[operatorRepresentation["shape"]]._deploy = False
ctxt.globalObjects[operatorRepresentation["shape"]]._live = False

inBuffer = ctxt.lookup(operatorRepresentation['data_in'])
outBuffer = ctxt.lookup(operatorRepresentation['data_out'])
outBuffer._alias = inBuffer.name
bufferIn = ctxt.lookup(operatorRepresentation['data_in'])
assert isinstance(bufferIn, VariableBuffer)
bufferOut = ctxt.lookup(operatorRepresentation['data_out'])
assert isinstance(bufferOut, VariableBuffer)

# Link aliases to each buffer
bufferIn.aliases.add(bufferOut.name)
bufferOut.aliases.add(bufferIn.name)

return ctxt, operatorRepresentation, []

Expand Down
Loading