Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 40 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Python bytecode
__pycache__/
*.py[cod]
*$py.class

# Build artifacts / packaging
build/
dist/
.eggs/
*.egg-info/
wheels/
pip-wheel-metadata/
*.egg
MANIFEST

# Tests / coverage
.pytest_cache/
htmlcov/
.coverage
.coverage.*
nosetests.xml
coverage.xml
*.cover

# Virtual environments
.venv/
venv/

# Tool caches
.mypy_cache/
.pytype/
.pyre/
.ruff_cache/

# Editors / IDE
.vscode/

# OS files
.DS_Store
Thumbs.db
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ visualkeras.layered_view(model, sizing_mode='accurate')
visualkeras.layered_view(model, sizing_mode='balanced')
```

<img src="https://raw.githubusercontent.com/paulgavrikov/visualkeras/master/figures/sizing_balanced.pn" height="400"/>
<img src="https://raw.githubusercontent.com/paulgavrikov/visualkeras/master/figures/sizing_balanced.png" height="400"/>

**Capped mode**: Caps dimensions at specified limits while preserving ratios

Expand Down
61 changes: 28 additions & 33 deletions visualkeras/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,34 +189,15 @@ def graph_view(model, to_file: str = None,
id_to_num_mapping, adj_matrix = model_to_adj_matrix(model)
model_layers = model_to_hierarchy_lists(model, id_to_num_mapping, adj_matrix)

# Add fake output layers only when needed
# When inout_as_tensor=False, only add dummy layers if output-producing layers
# are not in the last hierarchy level (to avoid duplication)
should_add_dummy_outputs = inout_as_tensor

if not inout_as_tensor:
# Check if all output-producing layers are in the last hierarchy level
last_level_layers = model_layers[-1] if model_layers else []
layers_producing_outputs = []

for output_tensor in model.outputs:
for layer in model.layers:
if hasattr(layer, 'output') and layer.output is output_tensor:
layers_producing_outputs.append(layer)
break

# Only add dummy outputs if some output-producing layers are NOT in the last level
should_add_dummy_outputs = not all(layer in last_level_layers for layer in layers_producing_outputs)
# Add fake output layers to provide explicit sinks so connectors always end
# in a visible node column. This matches the reference renderer outputs and
# keeps tensor/neuron views consistent.
should_add_dummy_outputs = True

if should_add_dummy_outputs:
# Normalize output_shape to always be a list of tuples
if isinstance(model.output_shape, tuple):
# Single output model: output_shape is a tuple, convert to list of tuples
output_shapes = [model.output_shape]
else:
# Multi-output model: output_shape is already a list of tuples
output_shapes = model.output_shape

# Normalize output_shape using helper to handle Keras 3
output_shapes = get_model_output_shapes(model)

model_layers.append([
_DummyLayer(
output_names[i],
Expand All @@ -238,6 +219,7 @@ def graph_view(model, to_file: str = None,
for layer in layer_list:
is_box = True
units = 1
node_scale_override = None # optional per-node scale for circles to normalize column height

if show_neurons:
if hasattr(layer, 'units'):
Expand All @@ -248,15 +230,21 @@ def graph_view(model, to_file: str = None,
units = layer.filters
elif is_internal_input(layer) and not inout_as_tensor:
is_box = False
# Normalize input_shape to handle both tuple and list formats
input_shape = layer.input_shape
if isinstance(input_shape, tuple):
shape = input_shape
elif isinstance(input_shape, list) and len(input_shape) == 1:
# Normalize input shape using helper
input_shape = get_layer_input_shape(layer)
if isinstance(input_shape, (list, tuple)) and len(input_shape) > 0 and isinstance(input_shape[0], (list, tuple)):
shape = input_shape[0]
else:
raise RuntimeError(f"not supported input shape {input_shape}")
shape = input_shape
units = self_multiply(shape)
# Keep the overall column height similar to the default box height (3*node_size)
# Compute per-node scale so that: units * scale * node_size + (units-1)*node_spacing ≈ 3*node_size
if units and units > 0:
target = 3 * node_size
numerator = target - max(units - 1, 0) * node_spacing
denom = units * node_size
s = max(0.2, min(1.0, numerator / denom)) if denom > 0 else 1.0
node_scale_override = s

n = min(units, ellipsize_after)
layer_nodes = list()
Expand All @@ -275,7 +263,9 @@ def graph_view(model, to_file: str = None,
c.x1 = current_x
c.y1 = current_y
c.x2 = c.x1 + node_size
c.y2 = c.y1 + node_size * scale
# For neuron circles, allow per-layer scale override to normalize column height
eff_scale = node_scale_override if (node_scale_override is not None and not is_box) else scale
c.y2 = c.y1 + node_size * eff_scale

current_y = c.y2 + node_spacing

Expand All @@ -296,6 +286,11 @@ def graph_view(model, to_file: str = None,

img_width = len(layers) * node_size + (len(layers) - 1) * layer_spacing + 2 * padding
img_height = max(*layer_y) + 2 * padding
# Keep height comparable between tensor and flattened views
if not inout_as_tensor and show_neurons:
baseline = 3 * node_size + 2 * padding
if img_height < baseline:
img_height = baseline
img = Image.new('RGBA', (int(ceil(img_width)), int(ceil(img_height))), background_fill)

draw = aggdraw.Draw(img)
Expand Down
138 changes: 136 additions & 2 deletions visualkeras/layer_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from .utils import get_keys_by_value
from collections.abc import Iterable
import warnings
from typing import List, Tuple, Union, Optional

try:
from tensorflow.keras.layers import Layer
Expand Down Expand Up @@ -74,9 +75,18 @@ def get_incoming_layers(layer):


def get_outgoing_layers(layer):
"""Yield outgoing (child) layers for a given layer."""
"""Yield outgoing (child) layers for a given layer.

Supports both legacy Node API (TF/Keras <= 2.15) and the new Node API
(TF >= 2.16 / Keras >= 3).
"""
for i, node in enumerate(layer._outbound_nodes):
yield node.outbound_layer
if hasattr(node, 'outbound_layer'):
# Old Node API
yield node.outbound_layer
else:
# New Node API (Keras 3): node.operation is the target layer
yield node.operation


def model_to_adj_matrix(model):
Expand Down Expand Up @@ -305,6 +315,130 @@ def is_internal_input(layer):

return False


# ----------------------------
# Shape utilities (Keras 2/3)
# ----------------------------

def _tensor_shape_to_tuple(shape_obj) -> Optional[Tuple]:
"""Convert TensorShape/KerasTensor.shape to a Python tuple of ints/None.

Returns None if conversion is not possible.
"""
if shape_obj is None:
return None
# TensorFlow TensorShape has as_list
if hasattr(shape_obj, 'as_list'):
try:
return tuple(shape_obj.as_list())
except Exception:
pass
# Otherwise assume iterable of dims
try:
dims = []
for d in shape_obj:
# Some dims are Dimension-like; try int() with fallback to None
if d is None:
dims.append(None)
else:
try:
dims.append(int(d))
except Exception:
dims.append(None)
return tuple(dims)
except TypeError:
return None


def get_layer_output_shape(layer) -> Union[Tuple, List[Tuple], None]:
"""Best-effort retrieval of a layer's output shape as tuple(s).

Works across Keras/TF versions where `.output_shape` might not be present
(e.g., Keras 3 InputLayer).
"""
# 1) Direct attribute (older versions)
s = getattr(layer, 'output_shape', None)
if s is not None:
return s

# 2) From `output` tensor(s)
out = getattr(layer, 'output', None)
if out is not None:
if isinstance(out, (list, tuple)):
shapes = [_tensor_shape_to_tuple(t.shape) for t in out]
return shapes
else:
return _tensor_shape_to_tuple(out.shape)

# 3) Fallbacks for Input-like layers
for attr in ('batch_shape', 'batch_input_shape', 'input_shape', 'shape'):
s = getattr(layer, attr, None)
if s is not None:
# Ensure tuple(s)
if isinstance(s, (list, tuple)) and len(s) > 0 and isinstance(s[0], (list, tuple)):
return [tuple(x) for x in s]
if hasattr(s, 'as_list'):
try:
return tuple(s.as_list())
except Exception:
pass
# Single tuple
if isinstance(s, (list, tuple)):
return tuple(s)
# Unknown format
break
return None


def get_layer_input_shape(layer) -> Union[Tuple, List[Tuple], None]:
"""Best-effort retrieval of a layer's input shape as tuple(s)."""
# 1) Direct attribute
s = getattr(layer, 'input_shape', None)
if s is not None:
return s

# 2) From `input` tensor(s)
inp = getattr(layer, 'input', None)
if inp is not None:
if isinstance(inp, (list, tuple)):
shapes = [_tensor_shape_to_tuple(t.shape) for t in inp]
return shapes
else:
return _tensor_shape_to_tuple(inp.shape)

# 3) Fallbacks common for InputLayer
for attr in ('batch_input_shape', 'batch_shape', 'shape'):
s = getattr(layer, attr, None)
if s is not None:
if isinstance(s, (list, tuple)) and len(s) > 0 and isinstance(s[0], (list, tuple)):
return [tuple(x) for x in s]
if hasattr(s, 'as_list'):
try:
return tuple(s.as_list())
except Exception:
pass
if isinstance(s, (list, tuple)):
return tuple(s)
break
return None


def get_model_output_shapes(model) -> List[Tuple]:
"""Return list of output shape tuples for a model across Keras versions."""
shapes = getattr(model, 'output_shape', None)
if shapes is not None:
if isinstance(shapes, tuple):
return [shapes]
# Assume already list-like of tuples
return list(shapes)
# Derive from model.outputs tensors
outputs = getattr(model, 'outputs', None) or []
result: List[Tuple] = []
for t in outputs:
result.append(_tensor_shape_to_tuple(getattr(t, 'shape', None)))
return result


def extract_primary_shape(layer_output_shape, layer_name: str = None) -> tuple:
"""
Extract the primary shape from a layer's output shape to handle multi-output scenarios.
Expand Down
47 changes: 1 addition & 46 deletions visualkeras/layered.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,52 +17,8 @@
from keras import layers
except:
warnings.warn("Could not import the 'layers' module from Keras. text_callable will not work.")

_BUILT_IN_TEXT_CALLABLES = tuple(LAYERED_TEXT_CALLABLES.values())


def _resolve_layer_output_shape(layer) -> Any:
"""
Attempt to retrieve a layer's output shape across keras/tensorflow versions.

Prefers an explicit ``output_shape`` attribute, falls back to the tensor's
shape, and finally tries ``compute_output_shape`` when available.
"""
shape = getattr(layer, "output_shape", None)
if shape is not None:
return _shape_to_tuple(shape)

output = getattr(layer, "output", None)
tensor_shape = getattr(output, "shape", None)
if tensor_shape is not None:
return _shape_to_tuple(tensor_shape)

compute_output_shape = getattr(layer, "compute_output_shape", None)
if callable(compute_output_shape):
input_shape = getattr(layer, "input_shape", None)
if input_shape is not None:
try:
return _shape_to_tuple(compute_output_shape(input_shape))
except Exception: # noqa: BLE001
pass

return None


def _shape_to_tuple(shape: Any) -> Any:
if shape is None:
return None
if isinstance(shape, tuple):
return shape
if hasattr(shape, "as_list"):
try:
return tuple(shape.as_list())
except Exception: # noqa: BLE001
return tuple(shape)
if isinstance(shape, list):
return tuple(shape)
return shape

def layered_view(model,
to_file: str = None,
min_z: int = 20,
Expand Down Expand Up @@ -415,9 +371,8 @@ def layered_view(model,
layer_name = f'unknown_layer_{index}'

# Get the primary shape of the layer's output
raw_shape = _resolve_layer_output_shape(layer)
raw_shape = get_layer_output_shape(layer)
shape = extract_primary_shape(raw_shape, layer_name)

# Calculate dimensions with flexible sizing
x, y, z = calculate_layer_dimensions(
shape, scale_z, scale_xy,
Expand Down