Files
FastDeploy/examples/multimodal/stable_diffusion/config_utils.py
Jack Zhou d4995e5468 [Model] Add stable diffusion model based on fastdeploy (#297)
* Add stable diffusion model base on fastdeploy

* Add sd infer

* pipelines->multimodal

* add create_ort_runtime

* use fp16 input

* fix pil

* Add optimize unet model

* add hf license

* Add workspace args

* Add profile func

* Add schedulers

* usrelace torch.Tenosr  byp.ndarray

* Add readme

* Add trt shape setting

* add dynamic shape

* Add dynamic shape for stable diffusion

* fix max shape setting

* rename tensorrt file suffix

* update dynamic shape setting

* Add scheduler output

* Add inference_steps and benchmark steps

* add diffuser benchmark

* Add paddle infer script

* Rename 1

* Rename infer.py to torch_onnx_infer.py

* Add export torch to onnx model

* renmove export model

* Add paddle export model for diffusion

* Fix export model

* mv torch onnx infer to infer

* Fix export model

* Fix infer

* modif create_trt_runtime create_ort_runtime

* update export torch

* update requirements

* add paddle inference backend

* Fix unet pp run

* remove print

* Add paddle model export and infer

* Add device id

* remove profile to utils

* Add -1 device id

* Add safety checker args

* remove safety checker temporarily

* Add export model description

* Add predict description

* Fix readme

* Fix device_id description

* add timestep shape

* add use fp16 precision

* move use gpu

* Add EulerAncestralDiscreteScheduler

* Use EulerAncestralDiscreteScheduler with v1-5 model

* Add export model readme

* Add link of exported model

* Update scheduler on README

* Addd stable-diffusion-v1-5
2022-11-10 14:59:07 +08:00

157 lines
5.8 KiB
Python

# Copyright 2022 The HuggingFace Inc. team.
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import inspect
from collections import OrderedDict
from typing import Any, Dict, Tuple, Union
class ConfigMixin:
r"""
Base class for all configuration classes. Stores all configuration parameters under `self.config` Also handles all
methods for loading/downloading/saving classes inheriting from [`ConfigMixin`] with
- [`~ConfigMixin.from_config`]
- [`~ConfigMixin.save_config`]
Class attributes:
- **config_name** (`str`) -- A filename under which the config should stored when calling
[`~ConfigMixin.save_config`] (should be overridden by parent class).
- **ignore_for_config** (`List[str]`) -- A list of attributes that should not be saved in the config (should be
overridden by parent class).
"""
config_name = None
ignore_for_config = []
def register_to_config(self, **kwargs):
if self.config_name is None:
raise NotImplementedError(
f"Make sure that {self.__class__} has defined a class name `config_name`"
)
kwargs["_class_name"] = self.__class__.__name__
# Special case for `kwargs` used in deprecation warning added to schedulers
# TODO: remove this when we remove the deprecation warning, and the `kwargs` argument,
# or solve in a more general way.
kwargs.pop("kwargs", None)
for key, value in kwargs.items():
try:
setattr(self, key, value)
except AttributeError as err:
logger.error(f"Can't set {key} with value {value} for {self}")
raise err
if not hasattr(self, "_internal_dict"):
internal_dict = kwargs
else:
previous_dict = dict(self._internal_dict)
internal_dict = { ** self._internal_dict, ** kwargs}
logger.debug(
f"Updating config from {previous_dict} to {internal_dict}")
self._internal_dict = FrozenDict(internal_dict)
@property
def config(self) -> Dict[str, Any]:
return self._internal_dict
class FrozenDict(OrderedDict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for key, value in self.items():
setattr(self, key, value)
self.__frozen = True
def __delitem__(self, *args, **kwargs):
raise Exception(
f"You cannot use ``__delitem__`` on a {self.__class__.__name__} instance."
)
def setdefault(self, *args, **kwargs):
raise Exception(
f"You cannot use ``setdefault`` on a {self.__class__.__name__} instance."
)
def pop(self, *args, **kwargs):
raise Exception(
f"You cannot use ``pop`` on a {self.__class__.__name__} instance.")
def update(self, *args, **kwargs):
raise Exception(
f"You cannot use ``update`` on a {self.__class__.__name__} instance."
)
def __setattr__(self, name, value):
if hasattr(self, "__frozen") and self.__frozen:
raise Exception(
f"You cannot use ``__setattr__`` on a {self.__class__.__name__} instance."
)
super().__setattr__(name, value)
def __setitem__(self, name, value):
if hasattr(self, "__frozen") and self.__frozen:
raise Exception(
f"You cannot use ``__setattr__`` on a {self.__class__.__name__} instance."
)
super().__setitem__(name, value)
def register_to_config(init):
r"""
Decorator to apply on the init of classes inheriting from [`ConfigMixin`] so that all the arguments are
automatically sent to `self.register_for_config`. To ignore a specific argument accepted by the init but that
shouldn't be registered in the config, use the `ignore_for_config` class variable
Warning: Once decorated, all private arguments (beginning with an underscore) are trashed and not sent to the init!
"""
@functools.wraps(init)
def inner_init(self, *args, **kwargs):
# Ignore private kwargs in the init.
init_kwargs = {
k: v
for k, v in kwargs.items() if not k.startswith("_")
}
init(self, *args, **init_kwargs)
if not isinstance(self, ConfigMixin):
raise RuntimeError(
f"`@register_for_config` was applied to {self.__class__.__name__} init method, but this class does "
"not inherit from `ConfigMixin`.")
ignore = getattr(self, "ignore_for_config", [])
# Get positional arguments aligned with kwargs
new_kwargs = {}
signature = inspect.signature(init)
parameters = {
name: p.default
for i, (name, p) in enumerate(signature.parameters.items())
if i > 0 and name not in ignore
}
for arg, name in zip(args, parameters.keys()):
new_kwargs[name] = arg
# Then add all kwargs
new_kwargs.update({
k: init_kwargs.get(k, default)
for k, default in parameters.items()
if k not in ignore and k not in new_kwargs
})
getattr(self, "register_to_config")(**new_kwargs)
return inner_init