Skip to content

Commit

Permalink
v0.20.0 (#356)
Browse files Browse the repository at this point in the history
  • Loading branch information
aidemsined authored Feb 5, 2025
2 parents bf78e58 + 23d3b12 commit 0e583da
Show file tree
Hide file tree
Showing 41 changed files with 1,014 additions and 344 deletions.
2 changes: 1 addition & 1 deletion backend/ttnn_visualizer/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,11 +114,11 @@ class Tensor(SerializeableDataclass):
device_id: int
address: int
buffer_type: BufferType
device_addresses: list[int]

def __post_init__(self):
self.memory_config = parse_memory_config(self.memory_config)


@dataclasses.dataclass
class InputTensor(SerializeableDataclass):
operation_id: int
Expand Down
19 changes: 18 additions & 1 deletion backend/ttnn_visualizer/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,24 @@ def query_tensors(
) -> Generator[Tensor, None, None]:
rows = self._query_table("tensors", filters)
for row in rows:
yield Tensor(*row)
device_addresses = []

try:
device_tensors = self._query_table(
"device_tensors", filters={"tensor_id": row[0]}
)
except sqlite3.OperationalError as err:
if str(err).startswith("no such table"):
pass
else:
raise err
else:
for device_tensor in sorted(device_tensors, key=lambda x: x[1]):
while len(device_addresses) < device_tensor[1]:
device_addresses.append(None)
device_addresses.append(device_tensor[2])

yield Tensor(*row, device_addresses)

def query_input_tensors(
self, filters: Optional[Dict[str, Any]] = None
Expand Down
8 changes: 4 additions & 4 deletions backend/ttnn_visualizer/serializers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import dataclasses
from collections import defaultdict
from typing import List, Optional
from typing import List
# SPDX-License-Identifier: Apache-2.0
#
# SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC
Expand Down Expand Up @@ -191,7 +191,7 @@ def serialize_operation(


def serialize_operation_buffers(operation: Operation, operation_buffers):
buffer_data = [dataclasses.asdict(b) for b in operation_buffers]
buffer_data = [b.to_dict() for b in operation_buffers]
for b in buffer_data:
b.pop("operation_id")
b.update({"size": b.pop("max_size_per_bank")})
Expand All @@ -203,7 +203,7 @@ def serialize_operation_buffers(operation: Operation, operation_buffers):


def serialize_devices(devices):
return [dataclasses.asdict(d) for d in devices]
return [d.to_dict() for d in devices]


def serialize_operations_buffers(operations, buffers):
Expand All @@ -226,7 +226,7 @@ def serialize_tensors(
results = []
comparisons = comparisons_by_tensor_id(local_comparisons, global_comparisons)
for tensor in tensors:
tensor_data = dataclasses.asdict(tensor)
tensor_data = tensor.to_dict()
tensor_id = tensor_data.pop("tensor_id")
tensor_data.update(
{
Expand Down
Loading

0 comments on commit 0e583da

Please sign in to comment.