Skip to content

Commit

Permalink
Split torch ops into separate files
Browse files Browse the repository at this point in the history
Fixes sbrunk#29

Bonus additions

- Add torch.multinomial
- Add torch.oneHot
- Add torch.gradient
  • Loading branch information
davoclavo committed Jun 18, 2023
1 parent 45146d5 commit f2d6fcf
Show file tree
Hide file tree
Showing 16 changed files with 2,395 additions and 2,011 deletions.
28 changes: 28 additions & 0 deletions core/src/main/scala/torch/nn/functional/sparse.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* Copyright 2022 storch.dev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package torch
package nn
package functional

import org.bytedeco.pytorch.global.torch as torchNative

/** Takes LongTensor with index values of shape `(*)` and returns a tensor of shape `(*,
* numClasses)` that have zeros everywhere except where the index of last dimension matches the
* corresponding value of the input tensor, in which case it will be 1.
*/
def oneHot(input: Tensor[Int64], numClasses: Long = -1): Tensor[Int64] =
Tensor(torchNative.one_hot(input.native, numClasses))
25 changes: 25 additions & 0 deletions core/src/main/scala/torch/ops/BLASOps.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
/*
* Copyright 2022 storch.dev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package torch

/** BLAS and LAPACK Operations
*
* https://pytorch.org/docs/stable/torch.html#blas-and-lapack-operations
*/

def matmul[D1 <: DType, D2 <: DType](t1: Tensor[D1], t2: Tensor[D2]): Tensor[Promoted[D1, D2]] =
t1.matmul(t2)
33 changes: 33 additions & 0 deletions core/src/main/scala/torch/ops/ComparisonOps.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
/*
* Copyright 2022 storch.dev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package torch

import org.bytedeco.pytorch.global.torch as torchNative

/** Comparison Ops
*
* https://pytorch.org/docs/stable/torch.html#comparison-ops
*/

def allclose(
input: Tensor[?],
other: Tensor[?],
rtol: Double = 1e-05,
atol: Double = 1e-08,
equalNan: Boolean = false
) =
torchNative.allclose(input.native, other.native, rtol, atol, equalNan)
Loading

0 comments on commit f2d6fcf

Please sign in to comment.