Harness the power of the Terrell-Penrose effect for novel information processing paradigms β‘
torch-relativistic provides neural network modules that incorporate concepts from special relativity into machine learning. The key insight is that the Terrell-Penrose effect, where rapidly moving objects appear rotated rather than contracted, can inspire revolutionary information processing paradigms in neural networks.
- π§ Relativistic Graph Neural Networks (GNNs) - Process graphs with relativistic information propagation
- β‘ Relativistic Spiking Neural Networks (SNNs) - Time dilation effects in spiking neurons
- π Relativistic Attention Mechanisms - Multi-reference frame attention heads
- π Relativistic Transformations - Lorentz boosts and Terrell-Penrose transforms
- π¬ Physics-Inspired Architecture - Grounded in real relativistic physics
pip install torch-relativisticgit clone https://github.com/yourusername/torch-relativistic.git
cd torch-relativistic
pip install -e .- π Python β₯ 3.11
- π₯ PyTorch β₯ 2.0.0
- π PyTorch Geometric β₯ 2.6.1
- π’ NumPy β₯ 1.20.0
import torch
from torch_relativistic import RelativisticGraphConv
# Create a relativistic GNN layer
conv = RelativisticGraphConv(16, 32, max_relative_velocity=0.8)
x = torch.randn(10, 16)
edge_index = torch.tensor([[0, 1, 2], [1, 2, 0]], dtype=torch.long)
# Process with relativistic effects
output = conv(x, edge_index) # Shape: [10, 32]GNN modules that process information as if affected by relativistic phenomena:
import torch
from torch_relativistic.gnn import RelativisticGraphConv, MultiObserverGNN
# Create a simple graph
num_nodes = 10
feature_dim = 16
edge_index = torch.tensor([[0, 1, 1, 2, 2, 3, 3, 4, 4, 0],
[1, 0, 2, 1, 3, 2, 4, 3, 0, 4]], dtype=torch.long)
node_features = torch.randn(num_nodes, feature_dim)
# Create a relativistic GNN layer
conv = RelativisticGraphConv(
in_channels=feature_dim,
out_channels=32,
max_relative_velocity=0.8
)
# Process the graph
output_features = conv(node_features, edge_index)
print(f"Output shape: {output_features.shape}") # [10, 32]
# Multi-observer GNN processes the graph from multiple relativistic perspectives
multi_observer_gnn = MultiObserverGNN(
feature_dim=feature_dim,
hidden_dim=32,
output_dim=8,
num_observers=4
)
output = multi_observer_gnn(node_features, edge_index)
print(f"Multi-observer output shape: {output.shape}") # [10, 8]SNN components that incorporate relativistic time dilation:
import torch
from torch_relativistic.snn import RelativisticLIFNeuron, TerrellPenroseSNN
# Create input spikes (batch_size=32, input_size=10)
input_spikes = torch.bernoulli(torch.ones(32, 10) * 0.3)
# Create a relativistic LIF neuron
neuron = RelativisticLIFNeuron(
input_size=10,
threshold=1.0,
beta=0.9
)
# Initialize neuron state
initial_state = neuron.init_state(batch_size=32)
# Process input spikes
output_spikes, new_state = neuron(input_spikes, initial_state)
print(f"Output spikes shape: {output_spikes.shape}") # [32]
# Create a complete SNN
snn = TerrellPenroseSNN(
input_size=10,
hidden_size=20,
output_size=5,
simulation_steps=100
)
# Process input
output = snn(input_spikes)
print(f"SNN output shape: {output.shape}") # [32, 5]
# Get spike history for visualization
spike_history = snn.get_spike_history(input_spikes)
print(f"Hidden spike history shape: {spike_history['hidden_spikes'].shape}") # [32, 100, 20]Attention where different heads operate in different reference frames:
import torch
from torch_relativistic.attention import RelativisticSelfAttention
# Create input sequence (batch_size=16, seq_len=24, feature_dim=64)
seq = torch.randn(16, 24, 64)
# Create relativistic self-attention module
attention = RelativisticSelfAttention(
hidden_dim=64,
num_heads=8,
dropout=0.1,
max_velocity=0.9
)
# Optional: Create positions for spacetime distances
positions = torch.randn(16, 24, 3) # 3D positions for each token
# Process sequence
output = attention(seq, positions=positions)
print(f"Output shape: {output.shape}") # [16, 24, 64]Apply transformations inspired by special relativity to feature vectors:
import torch
from torch_relativistic.transforms import TerrellPenroseTransform, LorentzBoost
# Create feature vectors (batch_size=8, feature_dim=64)
features = torch.randn(8, 64)
# Apply Terrell-Penrose inspired transformation
transform = TerrellPenroseTransform(
feature_dim=64,
max_velocity=0.9,
mode="rotation"
)
transformed = transform(features)
print(f"Transformed shape: {transformed.shape}") # [8, 64]
# For spacetime features (batch_size=8, feature_dim=8 including 4D spacetime)
spacetime_features = torch.randn(8, 8)
# Apply Lorentz boost
boost = LorentzBoost(
feature_dim=8,
time_dim=0, # First dimension is time
max_velocity=0.8
)
boosted = boost(spacetime_features)
print(f"Boosted shape: {boosted.shape}") # [8, 8]# Install development dependencies
pip install -e ".[dev]"
# Run tests
pytest tests/ -v
# Run with coverage
pytest tests/ --cov=torch_relativistic# Format code
black src/ tests/
# Check linting
ruff check src/ tests/
# Type checking
mypy src/We welcome contributions! Please see our Contributing Guidelines for details.
- Fork the repository
- Clone your fork:
git clone https://github.com/yourusername/torch-relativistic.git - Install in development mode:
pip install -e ".[dev]" - Create a feature branch:
git checkout -b feature/amazing-feature - Make your changes and add tests
- Run tests:
pytest tests/ - Submit a pull request
This project is licensed under the MIT License - see the LICENSE file for details.
- π Inspired by Einstein's Special Theory of Relativity
- π¬ Built on the Terrell-Penrose effect from relativistic physics
- π₯ Powered by PyTorch and PyTorch Geometric
- β‘ Thanks to the open-source ML community