You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
16 lines
453 B
16 lines
453 B
import torch
|
|
|
|
|
|
def normalize(v: torch.Tensor) -> torch.Tensor:
|
|
return v / torch.linalg.norm(v, dim=-1, keepdim=True)
|
|
|
|
|
|
def cross_product(v1: torch.Tensor, v2: torch.Tensor) -> torch.Tensor:
|
|
return torch.stack(
|
|
[
|
|
v1[..., 1] * v2[..., 2] - v2[..., 1] * v1[..., 2],
|
|
-(v1[..., 0] * v2[..., 2] - v2[..., 0] * v1[..., 2]),
|
|
v1[..., 0] * v2[..., 1] - v2[..., 0] * v1[..., 1],
|
|
],
|
|
dim=-1,
|
|
)
|
|
|