Your browser doesn't support the features required by impress.js, so you are presented with a simplified version of this presentation.
For the best experience please use the latest Chrome, Safari or Firefox browser.
from PIL import Image # a library for image IO
import numpy as np # a library for linear algebra
import torch # a library for deep learning
file_path = "./cvpr.png"
im = Image.open(file_path)
# display the image on screen
im.show()
# convert the image to numpy format and then to torch format, and normalize pixel value scale to [0, 1]
image_tensor = torch.FloatTensor(np.array(im) / 255)
# print the size of the matrix
print(image_tensor.shape)
# out:
# torch.Size([853, 1259, 3])
# print the upper-left corner of the first channel of the image
print(image_tensor[:3, :3, 0])
# out:
# tensor([[0.4510, 0.4471, 0.4471],
# [0.4549, 0.4549, 0.4549],
# [0.4549, 0.4549, 0.4549]])
A = np.random.rand(3, 3)
b = np.random.rand(3, 1)
x = np.linalg.pinv(A) @ b
print('A=', A)
print('b=', b)
print('x=', x)
print('Ax-b', A @ x - b)
# A= [[6.97446218e-01 3.47415656e-01 5.93783190e-01]
# [3.20606804e-01 2.51155615e-04 8.31997605e-01]
# [4.16578041e-01 4.96157978e-01 4.90465357e-01]]
# b= [[0.59153536]
# [0.92799786]
# [0.51953575]]
# x= [[-0.16647919]
# [ 0.02089704]
# [ 1.179531 ]]
# Ax-b [[3.33066907e-16]
# [1.11022302e-16]
# [3.33066907e-16]]
A = np.random.rand(2, 3)
b = np.random.rand(2, 1)
x = np.linalg.pinv(A) @ b
print('A=', A)
print('b=', b)
print('x=', x)
print('Ax-b', A @ x - b)
# A= [[0.05871486 0.033507 0.94260723]
# [0.69295901 0.84740045 0.74196673]]
# b= [[0.63610879]
# [0.07260413]]
# x= [[-0.23861937]
# [-0.33346011]
# [ 0.70155682]]
# Ax-b [[4.44089210e-16]
# [2.77555756e-16]]
A = np.random.rand(3, 2)
b = np.random.rand(3, 1)
x = np.linalg.pinv(A) @ b
print('A=', A)
print('b=', b)
print('x=', x)
print('Ax-b', A @ x - b)
# A= [[0.26404115 0.29162541]
# [0.1664837 0.07120312]
# [0.87827123 0.31327209]]
# b= [[0.99762258]
# [0.54386582]
# [0.86558511]]
# x= [[-0.28797735]
# [ 3.74833987]]
# Ax-b [[ 0.01945071]
# [-0.32491585]
# [ 0.05574292]]