Source code for mindspore.device_context.ascend.op_tuning

# Copyright 2024 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================

"""Op tuning interfaces."""
from mindspore.device_manager import _check_runtime_conf_env_valid


try:
    from mindspore._c_expression import AscendOpTuningConf
except ImportError:
    pass

[docs]def op_compile(value): """ Whether to select online compilation.The default settings by the framework are online compilation for static shape, and compiled operator binary files for dynamic shape. The default settings may change in the future. For detailed information, please refer to `Ascend community <https://www.hiascend.com/document/detail/zh/canncommercial/80RC3/apiref/appdevgapi/aclcppdevg_03_1371.html/>`_ . Args: value (bool): Whether to select online compilation or not. - ``True``: online compilation is prioritized. - ``False``: compiled operator binary files are prioritized to improve compilation performance. Examples: >>> import mindspore as ms >>> ms.device_context.ascend.op_tuning.op_compile(True) """ # Check the configuration environment whether valid _check_runtime_conf_env_valid() if AscendOpTuningConf.get_instance().is_jit_compile_configured(): raise RuntimeError("The 'op_compile' can not be set repeatedly.") supported_modes = [True, False] if value not in supported_modes: raise TypeError(f"For 'op_compile', the type of input value must be one of " f"{supported_modes}, but got {value}.") is_enable = "1" if value else "0" AscendOpTuningConf.get_instance().set_jit_compile(is_enable)
[docs]def aoe_tune_mode(tune_mode): """ AOE tuning mode setting, which is not set by default. For detailed information, please refer to `Ascend Optimization Enging <https://www.mindspore.cn/docs/en/master/model_train/optimize/aoe.html>`_ . Args: tune_mode (str): AOE tuning mode setting. - ``"online"``: the online tuning function is turned on. - ``"offline"``: ge graph will be saved for offline tuning. Examples: >>> import mindspore as ms >>> ms.device_context.ascend.op_tuning.aoe_tune_mode("online") """ # Check the configuration environment whether valid _check_runtime_conf_env_valid() if AscendOpTuningConf.get_instance().is_aoe_tune_mode_configured(): raise RuntimeError("The 'aoe_tune_mode' can not be set repeatedly.") candidate = ["online", "offline"] if tune_mode not in candidate: raise ValueError( f"For 'device_context.ascend.op_tuning.aoe_tune_mode', the argument 'tune_mode' must be in " f"['online', 'offline'], but got {tune_mode}." ) AscendOpTuningConf.get_instance().set_aoe_tune_mode(tune_mode)
[docs]def aoe_job_type(config): """ Set the parameters specific to Ascend Optimization Engine.It needs to be used in conjunction with mindspore.device_context.op_tuning.aoe_tune_mode(tune_mode). The framework set to "2" by default. For detailed information, please refer to `Ascend Optimization Enging <https://www.mindspore.cn/docs/en/master/model_train/optimize/aoe.html>`_ . Args: job_type (str): Choose the tuning type. - ``"1"``: Set to subgraph tuning. - ``"2"``: Set to operator tuning. Examples: >>> import mindspore as ms >>> ms.device_context.ascend.op_tuning.aoe_job_type("1") """ # Check the configuration environment whether valid _check_runtime_conf_env_valid() if AscendOpTuningConf.get_instance().is_aoe_job_type_configured(): raise RuntimeError("The 'aoe_job_type' can not be set repeatedly.") aoe_cfgs = ["1", "2"] if config not in aoe_cfgs: raise ValueError( f"For 'aoe_job_type', the config must be one of {aoe_cfgs}, but got {config}." ) AscendOpTuningConf.get_instance().set_aoe_job_type(config)