import torch import numpy as np # initialize a tensor directly as data data = [[1,2],[3,4]] x_data = torch.tensor(data).to('xpu') print(x_data) # from a numpy array arr = np.array(data) x_np = torch.from_numpy(arr).to('xpu') print(x_np) # from another tensor (could override the shape, datatype) x_ones = torch.ones_like(x_data) # retains properties of x_data print(f"Ones Tensor: \n {x_ones} \n") x_rand = torch.rand_like(x_data, dtype=torch.float) # overrides datatype print(f"Random Tensor: \n {x_rand} \n") # use `shape` to define dimentions shape = (3,5) rand_tensor = torch.rand(shape) ones_tensor = torch.ones(shape) zeros_tensor = torch.zeros(shape) print(f"Random: \n {rand_tensor} \n") print(f"Ones: \n {ones_tensor} \n") print(f"Zeros: \n {zeros_tensor} \n") # tensors have attributes tensor_attr = torch.rand(3,4).to('xpu') # print(f"Shape: {tensor_attr.shape}") # print(f"Datatype: {tensor_attr.dtype}") # print(f"Device: {tensor_attr.device}") # indexing & slicing tensor = torch.ones(4,4).to('xpu') print(f"First row: {tensor[0]}") print(f"First column: {tensor[:,0]}") print(f"Last column: {tensor[...,-1]}") print(tensor) # joining tensors using torch.cat t1 = torch.cat([tensor, tensor, tensor], dim=1).to('xpu') print(t1) # different ways to do matrix multiplication (y1, y2, y3 will have same values) y1 = tensor @ tensor.T y2 = tensor.matmul(tensor.T) y3 = torch.rand_like(y1) # create a new tensor with same shape as y1 torch.matmul(tensor, tensor.T, out=y3) # comput the element-wise product (z1, z2, z3 will have same values) z1 = tensor * tensor z2 = tensor.mul(tensor) z3 = torch.rand_like(z1) torch.mul(tensor, tensor, out=z3) # convert a one-item tensor into a number with `item()` agg = tensor.sum() agg_item = agg.item() print(agg, type(agg)) print(agg_item, type(agg_item)) # in-place operations, will assign the result into the operand tensor.add_(5) # will assign the result to tensor (ie. override the original tensor)