Learn more  » Push, build, and install  RubyGems npm packages Python packages Maven artifacts PHP packages Go Modules Bower components Debian packages RPM packages NuGet packages

edgify / torch   python

Repository URL to install this package:

/ optim / optimizer.pyi

from typing import Iterable, Union, Callable, Optional, List, Dict, Any
from .. import Tensor
from torch.utils.hooks import RemovableHandle

_params_t = Union[Iterable[Tensor], Iterable[Dict[str, Any]]]

def register_optimizer_step_pre_hook(hook: Callable[..., None]) -> RemovableHandle: ...

def register_optimizer_step_post_hook(hook: Callable[..., None]) -> RemovableHandle: ...

class Optimizer:
    defaults: Dict[str, Any]
    state: Dict[str, Any]
    param_groups: List[Dict[str, Any]]

    def __init__(self, params: _params_t, defaults: Dict[str, Any]) -> None: ...
    def __setstate__(self, state: Dict[str, Any]) -> None: ...
    def register_step_pre_hook(self, hook: Callable[..., None]) -> RemovableHandle: ...
    def register_step_post_hook(self, hook: Callable[..., None]) -> RemovableHandle: ...
    def state_dict(self) -> Dict[str, Any]: ...
    def load_state_dict(self, state_dict: Dict[str, Any]) -> None: ...
    def zero_grad(self, set_to_none: bool=...) -> None: ...
    def step(self, closure: Optional[Callable[[], float]]=...) -> Optional[float]: ...
    def add_param_group(self, param_group: Dict[str, Any]) -> None: ...