DEV Community

Super Kai (Kazuya Ito)
Super Kai (Kazuya Ito)

Posted on

tile() and repeat_interleave() in PyTorch

tile() can repeat the zero or more elements of a 0D or more D tensor as shown below:

*Memos:

  • tile() can be called both from torch and a tensor.
  • The 2nd argument is one or more dimensions(dims) with torch.
  • The 1st argument or the 1st argument... is one or more dimensions(dims) with a tensor.
  • If at least one dimension is 0, the returned tensor is empty.

1D tensor:

import torch

my_tensor = torch.tensor([3, 5, 1])

torch.tile(my_tensor, (0,))
my_tensor.tile((0,))
my_tensor.tile(0)
# tensor([], dtype=torch.int64)

torch.tile(my_tensor, (1,))
my_tensor.tile((1,))
my_tensor.tile(1)
# tensor([3, 5, 1])

torch.tile(my_tensor, (2,))
my_tensor.tile((2,))
my_tensor.tile(2)
# tensor([3, 5, 1, 3, 5, 1])

torch.tile(my_tensor, (3,))
my_tensor.tile((3,))
my_tensor.tile(3)
# tensor([3, 5, 1, 3, 5, 1, 3, 5, 1])
etc.

torch.tile(my_tensor, (1, 1))
my_tensor.tile((1, 1))
my_tensor.tile(1, 1)
# tensor([[3, 5, 1]])

torch.tile(my_tensor, (1, 2))
my_tensor.tile((1, 2))
my_tensor.tile(1, 2)
# tensor([[3, 5, 1, 3, 5, 1]])

torch.tile(my_tensor, (1, 3))
my_tensor.tile((1, 3))
my_tensor.tile(1, 3)
# tensor([[3, 5, 1, 3, 5, 1, 3, 5, 1]])
etc.

torch.tile(my_tensor, (2, 1))
my_tensor.tile((2, 1))
my_tensor.tile(2, 1)
# tensor([[3, 5, 1],
#         [3, 5, 1]])

torch.tile(my_tensor, (2, 2))
my_tensor.tile((2, 2))
my_tensor.tile(2, 2)
# tensor([[3, 5, 1, 3, 5, 1],
#         [3, 5, 1, 3, 5, 1]])

torch.tile(my_tensor, (2, 3))
my_tensor.tile((2, 3))
my_tensor.tile(2, 3)
# tensor([[3, 5, 1, 3, 5, 1, 3, 5, 1],
#         [3, 5, 1, 3, 5, 1, 3, 5, 1]])
etc.

torch.tile(my_tensor, (3, 1))
my_tensor.tile((3, 1))
my_tensor.tile(3, 1)
# tensor([[3, 5, 1],
#         [3, 5, 1],
#         [3, 5, 1]])
etc.

torch.tile(my_tensor, (1, 1, 1))
my_tensor.tile((1, 1, 1))
my_tensor.tile(1, 1, 1)
# tensor([[[3, 5, 1]]])
etc.
Enter fullscreen mode Exit fullscreen mode

2D tensor:

import torch

my_tensor = torch.tensor([[3, 5, 1], [6, 0, 5]])

torch.tile(my_tensor, (0,))
my_tensor.tile((0,))
my_tensor.tile(0)
# tensor([], size=(2, 0), dtype=torch.int64)

torch.tile(my_tensor, (1,))
my_tensor.tile((1,))
my_tensor.tile(1)
# tensor([[3, 5, 1], [6, 0, 5]])

torch.tile(my_tensor, (2,))
my_tensor.tile((2,))
my_tensor.tile(2)
torch.tile(my_tensor, (1, 2))
my_tensor.tile((1, 2))
my_tensor.tile(1, 2)
# tensor([[3, 5, 1, 3, 5, 1], [6, 0, 5, 6, 0, 5]])

torch.tile(my_tensor, (3,))
my_tensor.tile((3,))
my_tensor.tile(3)
torch.tile(my_tensor, (1, 3))
my_tensor.tile((1, 3))
my_tensor.tile(1, 3)
# tensor([[3, 5, 1, 3, 5, 1, 3, 5, 1], [6, 0, 5, 6, 0, 5, 6, 0, 5]])
etc.

torch.tile(my_tensor, (2, 1))
my_tensor.tile((2, 1))
my_tensor.tile(2, 1)
# tensor([[3, 5, 1],
#         [6, 0, 5],
#         [3, 5, 1],
#         [6, 0, 5]])

torch.tile(my_tensor, (2, 2))
my_tensor.tile((2, 2))
my_tensor.tile(2, 2)
# tensor([[3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5],
#         [3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5]])

torch.tile(my_tensor, (2, 3))
my_tensor.tile((2, 3))
my_tensor.tile(2, 3)
# tensor([[3, 5, 1, 3, 5, 1, 3, 5, 1], 
#         [6, 0, 5, 6, 0, 5, 6, 0, 5],
#         [3, 5, 1, 3, 5, 1, 3, 5, 1], 
#         [6, 0, 5, 6, 0, 5, 6, 0, 5]])
etc.

torch.tile(my_tensor, (3, 1))
my_tensor.tile((3, 1))
my_tensor.tile(3, 1)
# tensor([[3, 5, 1],
#         [6, 0, 5],
#         [3, 5, 1],
#         [6, 0, 5],
#         [3, 5, 1],
#         [6, 0, 5]])

torch.tile(my_tensor, (3, 2))
my_tensor.tile((3, 2))
my_tensor.tile(3, 2)
# tensor([[3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5],
#         [3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5],
#         [3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5]])

torch.tile(my_tensor, (3, 3))
my_tensor.tile((3, 3))
my_tensor.tile(3, 3)
# tensor([[3, 5, 1, 3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5, 6, 0, 5],
#         [3, 5, 1, 3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5, 6, 0, 5],
#         [3, 5, 1, 3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5, 6, 0, 5]])
etc.

torch.tile(my_tensor, (1, 1, 1))
my_tensor.tile((1, 1, 1))
my_tensor.tile(1, 1, 1)
# tensor([[[3, 5, 1], [6, 0, 5]]])
etc.
Enter fullscreen mode Exit fullscreen mode

repeat_interleave() can immediately repeat the zero or more elements of a 0D or more D tensor as shown below:

*Memos:

  • repeat_interleave() can be called both from torch and a tensor.
  • The 2nd argument is repeats with torch.
  • The 1st argument is repeats with a tensor.
  • The 3rd argument is a dimension(dim) with torch.
  • The 2nd argument is a dimension(dim) with a tensor.
  • Only a 1D tensor is possible to be set to repeat_interleave() with torch as only one argument to get zero or more numbers from 0 onwards.

1D tensor:

import torch

my_tensor = torch.tensor([3, 5, 1])

torch.repeat_interleave(my_tensor)
# tensor([0, 0, 0, 1, 1, 1, 1, 1, 2])

torch.repeat_interleave(my_tensor, 0)
torch.repeat_interleave(my_tensor, 0, 0)
torch.repeat_interleave(my_tensor, 0, -1)
my_tensor.repeat_interleave(0)
my_tensor.repeat_interleave(0, 0)
my_tensor.repeat_interleave(0, -1)
# tensor([], dtype=torch.int64)

torch.repeat_interleave(my_tensor, 1)
torch.repeat_interleave(my_tensor, 1, 0)
torch.repeat_interleave(my_tensor, 1, -1)
my_tensor.repeat_interleave(1)
my_tensor.repeat_interleave(1, 0)
my_tensor.repeat_interleave(1, -1)
# tensor([3, 5, 1])

my_tensor.repeat_interleave(2)
my_tensor.repeat_interleave(2, 0)
my_tensor.repeat_interleave(2, -1)
# tensor([3, 3, 5, 5, 1, 1])

my_tensor.repeat_interleave(3)
my_tensor.repeat_interleave(3, 0)
my_tensor.repeat_interleave(3, -1)
# tensor([3, 3, 3, 5, 5, 5, 1, 1, 1])
Enter fullscreen mode Exit fullscreen mode

2D tensor:

import torch

my_tensor = torch.tensor([[3, 5, 1], [6, 0, 5]])

torch.repeat_interleave(my_tensor, 0)
my_tensor.repeat_interleave(0)
# tensor([], dtype=torch.int64)

torch.repeat_interleave(my_tensor, 1)
my_tensor.repeat_interleave(1)
# tensor([3, 5, 1, 6, 0, 5])

torch.repeat_interleave(my_tensor, 2)
my_tensor.repeat_interleave(2)
# tensor([3, 3, 5, 5, 1, 1, 6, 6, 0, 0, 5, 5])

torch.repeat_interleave(my_tensor, 3)
my_tensor.repeat_interleave(3)
# tensor([3, 3, 3, 5, 5, 5, 1, 1, 1, 6, 6, 6, 0, 0, 0, 5, 5, 5])

torch.repeat_interleave(my_tensor, 0, 0)
my_tensor.repeat_interleave(0, 0)
torch.repeat_interleave(my_tensor, 0, -2)
my_tensor.repeat_interleave(0, -2)
# tensor([], size=(0, 3), dtype=torch.int64)

torch.repeat_interleave(my_tensor, 0, 1)
my_tensor.repeat_interleave(0, 1)
torch.repeat_interleave(my_tensor, 0, -1)
my_tensor.repeat_interleave(0, -1)
# tensor([], size=(2, 0), dtype=torch.int64)

torch.repeat_interleave(my_tensor, 1, 0)
my_tensor.repeat_interleave(1, 0)
torch.repeat_interleave(my_tensor, 1, 1)
my_tensor.repeat_interleave(1, 1)
torch.repeat_interleave(my_tensor, 1, -1)
my_tensor.repeat_interleave(1, -1)
torch.repeat_interleave(my_tensor, 1, -2)
my_tensor.repeat_interleave(1, -2)
# tensor([[3, 5, 1], [6, 0, 5]])

torch.repeat_interleave(my_tensor, 2, 0)
my_tensor.repeat_interleave(2, 0)
torch.repeat_interleave(my_tensor, 2, -2)
my_tensor.repeat_interleave(2, -2)
# tensor([[3, 5, 1], [3, 5, 1], [6, 0, 5], [6, 0, 5]])

torch.repeat_interleave(my_tensor, 2, 1)
my_tensor.repeat_interleave(2, 1)
torch.repeat_interleave(my_tensor, 2, -1)
my_tensor.repeat_interleave(2, -1)
# tensor([[3, 3, 5, 5, 1, 1], [6, 6, 0, 0, 5, 5]])

torch.repeat_interleave(my_tensor, 3, 0)
my_tensor.repeat_interleave(3, 0)
torch.repeat_interleave(my_tensor, 3, -2)
my_tensor.repeat_interleave(3, -2)
# tensor([[3, 5, 1], [3, 5, 1], [3, 5, 1],
          [6, 0, 5], [6, 0, 5], [6, 0, 5]])

torch.repeat_interleave(my_tensor, 3, 1)
my_tensor.repeat_interleave(3, 1)
torch.repeat_interleave(my_tensor, 3, -1)
my_tensor.repeat_interleave(3, -1)
# tensor([[3, 3, 3, 5, 5, 5, 1, 1, 1],
#         [6, 6, 6, 0, 0, 0, 5, 5, 5]])
Enter fullscreen mode Exit fullscreen mode

Top comments (0)