Repository URL to install this package:
Version:
2.1.2+cpu ▾
|
"""Module for handling ATen to ONNX functions registration."""
from __future__ import annotations
import dataclasses
from typing import Optional, TYPE_CHECKING, Union
import torch._ops
from torch.onnx._internal import _beartype
# We can only import onnx from this module in a type-checking context to ensure that
# 'import torch.onnx' continues to work without having 'onnx' installed. We fully
# 'import onnx' inside of dynamo_export (by way of _assert_dependencies).
if TYPE_CHECKING:
import onnxscript # type: ignore[import]
@dataclasses.dataclass(frozen=True, eq=True)
class ONNXFunction:
"""A wrapper of onnx-script function.
op_full_name: The qualified name of the function. In the form of '<namespace>::<op_name>.<overload>'.
onnx_function: The onnx-script function from torchlib.
is_custom: Whether the function is a custom function.
is_complex: Whether the function is a function that handles complex valued inputs.
"""
onnx_function: Union["onnxscript.OnnxFunction", "onnxscript.TracedOnnxFunction"]
op_full_name: str
is_custom: bool = False
is_complex: bool = False
@dataclasses.dataclass(frozen=True, eq=True)
class OpName:
"""A class representing an operator name in internal ONNX converter."""
namespace: str
op_name: str
overload: str
@classmethod
@_beartype.beartype
def from_name_parts(
cls, namespace: str, op_name: str, overload: Optional[str] = None
) -> OpName:
# NOTE: in PyTorch, the overload could be unprovided to indicate the
# default overload
# TODO: This is slightly unsafe that dev could accidentally create illegal
# OpName by using initializer directly
# https://github.com/pytorch/pytorch/pull/103943#discussion_r1256511069
if overload is None or overload == "":
overload = "default"
return cls(namespace, op_name, overload)
@classmethod
@_beartype.beartype
def from_qualified_name(cls, qualified_name: str) -> OpName:
"""When the name is <namespace>::<op_name>[.<overload>]"""
namespace, opname_overload = qualified_name.split("::")
op_name, *overload = opname_overload.split(".", 1)
overload = overload[0] if overload else "default"
return cls(namespace, op_name, overload)
@classmethod
@_beartype.beartype
def from_op_overload(cls, op_overload: torch._ops.OpOverload) -> OpName:
return cls.from_qualified_name(op_overload.name())
@_beartype.beartype
def qualified_name(self) -> str:
return f"{self.namespace}::{self.op_name}.{self.overload}"