Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
S
stable-diffusion-webui
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Administrator
stable-diffusion-webui
Commits
92d7a138
Commit
92d7a138
authored
Oct 10, 2022
by
Martin Cairns
Committed by
AUTOMATIC1111
Oct 11, 2022
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Handle different parameters for DPM fast & adaptive
parent
9b8faefd
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
18 additions
and
7 deletions
+18
-7
sd_samplers.py
modules/sd_samplers.py
+18
-7
No files found.
modules/sd_samplers.py
View file @
92d7a138
...
@@ -57,7 +57,7 @@ def set_samplers():
...
@@ -57,7 +57,7 @@ def set_samplers():
global
samplers
,
samplers_for_img2img
global
samplers
,
samplers_for_img2img
hidden
=
set
(
opts
.
hide_samplers
)
hidden
=
set
(
opts
.
hide_samplers
)
hidden_img2img
=
set
(
opts
.
hide_samplers
+
[
'PLMS'
,
'DPM fast'
,
'DPM adaptive'
])
hidden_img2img
=
set
(
opts
.
hide_samplers
+
[
'PLMS'
])
samplers
=
[
x
for
x
in
all_samplers
if
x
.
name
not
in
hidden
]
samplers
=
[
x
for
x
in
all_samplers
if
x
.
name
not
in
hidden
]
samplers_for_img2img
=
[
x
for
x
in
all_samplers
if
x
.
name
not
in
hidden_img2img
]
samplers_for_img2img
=
[
x
for
x
in
all_samplers
if
x
.
name
not
in
hidden_img2img
]
...
@@ -365,16 +365,27 @@ class KDiffusionSampler:
...
@@ -365,16 +365,27 @@ class KDiffusionSampler:
else
:
else
:
sigmas
=
self
.
model_wrap
.
get_sigmas
(
steps
)
sigmas
=
self
.
model_wrap
.
get_sigmas
(
steps
)
noise
=
noise
*
sigmas
[
steps
-
t_enc
-
1
]
xi
=
x
+
noise
extra_params_kwargs
=
self
.
initialize
(
p
)
sigma_sched
=
sigmas
[
steps
-
t_enc
-
1
:]
sigma_sched
=
sigmas
[
steps
-
t_enc
-
1
:]
print
(
'check values same'
,
sigmas
[
steps
-
t_enc
-
1
]
,
sigma_sched
[
0
],
sigmas
[
steps
-
t_enc
-
1
]
-
sigma_sched
[
0
])
xi
=
x
+
noise
*
sigma_sched
[
0
]
extra_params_kwargs
=
self
.
initialize
(
p
)
if
'sigma_min'
in
inspect
.
signature
(
self
.
func
)
.
parameters
:
## last sigma is zero which is allowed by DPM Fast & Adaptive so taking value before last
extra_params_kwargs
[
'sigma_min'
]
=
sigma_sched
[
-
2
]
if
'sigma_max'
in
inspect
.
signature
(
self
.
func
)
.
parameters
:
extra_params_kwargs
[
'sigma_max'
]
=
sigma_sched
[
0
]
if
'n'
in
inspect
.
signature
(
self
.
func
)
.
parameters
:
extra_params_kwargs
[
'n'
]
=
len
(
sigma_sched
)
-
1
if
'sigma_sched'
in
inspect
.
signature
(
self
.
func
)
.
parameters
:
extra_params_kwargs
[
'sigma_sched'
]
=
sigma_sched
if
'sigmas'
in
inspect
.
signature
(
self
.
func
)
.
parameters
:
extra_params_kwargs
[
'sigmas'
]
=
sigma_sched
self
.
model_wrap_cfg
.
init_latent
=
x
self
.
model_wrap_cfg
.
init_latent
=
x
return
self
.
func
(
self
.
model_wrap_cfg
,
xi
,
sigma_sched
,
extra_args
=
{
'cond'
:
conditioning
,
'uncond'
:
unconditional_conditioning
,
'cond_scale'
:
p
.
cfg_scale
},
disable
=
False
,
callback
=
self
.
callback_state
,
**
extra_params_kwargs
)
return
self
.
func
(
self
.
model_wrap_cfg
,
xi
,
extra_args
=
{
'cond'
:
conditioning
,
'uncond'
:
unconditional_conditioning
,
'cond_scale'
:
p
.
cfg_scale
},
disable
=
False
,
callback
=
self
.
callback_state
,
**
extra_params_kwargs
)
def
sample
(
self
,
p
,
x
,
conditioning
,
unconditional_conditioning
,
steps
=
None
):
def
sample
(
self
,
p
,
x
,
conditioning
,
unconditional_conditioning
,
steps
=
None
):
steps
=
steps
or
p
.
steps
steps
=
steps
or
p
.
steps
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment