File size: 674 Bytes
9c6594c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
"""
:mod:`fairscale.optim` is a package implementing various torch optimization algorithms.
"""
import logging
from typing import List
from .adascale import AdaScale, AdaScaleWrapper
from .oss import OSS
try:
from .adam import Adam, Precision
except ImportError: # pragma: no cover
pass # pragma: no cover
try:
from .grad_scaler import GradScaler
except ImportError:
logging.warning("Torch AMP is not available on this platform")
__all__: List[str] = []
|