# Resnet
There is a shortcut for data to flow deeper into the network. This allows us to train deeper networks. Resent can be created with blocks
```python
class CNNResidual(Layer):
def __init__(self, layers, filters, **kwargs):
super().__init__(**kwargs)
self.hidden = [Conv2D(filters, (3,3), activation='relu') for _ in range(layers)]
def call(self, inputs):
x = inputs
for layer in self.hidden:
x = layer(x)
return inputs + x
class DenseResidual(Layer):
def __init__(self, layers, neurons, **kwargs):
super().__init__(**kwargs)
self.hidden = [Dense(neurons, activation='relu') for _ in range(layers)]
def call(self, inputs):
x = inputs
for layer in self.hidden:
x = layer(x)
return inputs + x
class MyResnet(Model):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.hidden1 = Dense(30, activation='relu')
self.block1 = CNNResidual(2,32)
self.block2 = DNNResidual(2, 64)
self.out = Dense(1)
def call(self, inputs):
x = self.hidden1(inputs)
x = self.block1(x)
for _ in range(1,4):
x = block2(x)
return self.out(x)
```