Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
S
stable-diffusion-webui
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Administrator
stable-diffusion-webui
Commits
122d4268
Commit
122d4268
authored
Oct 09, 2022
by
Fampai
Committed by
AUTOMATIC1111
Oct 09, 2022
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Fix VRAM Issue by only loading in hypernetwork when selected in settings
parent
e00b4df7
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
23 additions
and
16 deletions
+23
-16
hypernetwork.py
modules/hypernetwork.py
+15
-8
sd_hijack_optimizations.py
modules/sd_hijack_optimizations.py
+3
-3
shared.py
modules/shared.py
+2
-5
webui.py
webui.py
+3
-0
No files found.
modules/hypernetwork.py
View file @
122d4268
...
@@ -40,18 +40,25 @@ class Hypernetwork:
...
@@ -40,18 +40,25 @@ class Hypernetwork:
self
.
layers
[
size
]
=
(
HypernetworkModule
(
size
,
sd
[
0
]),
HypernetworkModule
(
size
,
sd
[
1
]))
self
.
layers
[
size
]
=
(
HypernetworkModule
(
size
,
sd
[
0
]),
HypernetworkModule
(
size
,
sd
[
1
]))
def
l
oad
_hypernetworks
(
path
):
def
l
ist
_hypernetworks
(
path
):
res
=
{}
res
=
{}
for
filename
in
glob
.
iglob
(
os
.
path
.
join
(
path
,
'**/*.pt'
),
recursive
=
True
):
for
filename
in
glob
.
iglob
(
os
.
path
.
join
(
path
,
'**/*.pt'
),
recursive
=
True
):
name
=
os
.
path
.
splitext
(
os
.
path
.
basename
(
filename
))[
0
]
res
[
name
]
=
filename
return
res
def
load_hypernetwork
(
filename
):
print
(
f
"Loading hypernetwork {filename}"
)
path
=
shared
.
hypernetworks
.
get
(
filename
,
None
)
if
(
path
is
not
None
):
try
:
try
:
hn
=
Hypernetwork
(
filename
)
shared
.
loaded_hypernetwork
=
Hypernetwork
(
path
)
res
[
hn
.
name
]
=
hn
except
Exception
:
except
Exception
:
print
(
f
"Error loading hypernetwork {
filename
}"
,
file
=
sys
.
stderr
)
print
(
f
"Error loading hypernetwork {
path
}"
,
file
=
sys
.
stderr
)
print
(
traceback
.
format_exc
(),
file
=
sys
.
stderr
)
print
(
traceback
.
format_exc
(),
file
=
sys
.
stderr
)
else
:
return
res
shared
.
loaded_hypernetwork
=
None
def
attention_CrossAttention_forward
(
self
,
x
,
context
=
None
,
mask
=
None
):
def
attention_CrossAttention_forward
(
self
,
x
,
context
=
None
,
mask
=
None
):
...
@@ -60,7 +67,7 @@ def attention_CrossAttention_forward(self, x, context=None, mask=None):
...
@@ -60,7 +67,7 @@ def attention_CrossAttention_forward(self, x, context=None, mask=None):
q
=
self
.
to_q
(
x
)
q
=
self
.
to_q
(
x
)
context
=
default
(
context
,
x
)
context
=
default
(
context
,
x
)
hypernetwork
=
shared
.
selected_hypernetwork
()
hypernetwork
=
shared
.
loaded_hypernetwork
hypernetwork_layers
=
(
hypernetwork
.
layers
if
hypernetwork
is
not
None
else
{})
.
get
(
context
.
shape
[
2
],
None
)
hypernetwork_layers
=
(
hypernetwork
.
layers
if
hypernetwork
is
not
None
else
{})
.
get
(
context
.
shape
[
2
],
None
)
if
hypernetwork_layers
is
not
None
:
if
hypernetwork_layers
is
not
None
:
...
...
modules/sd_hijack_optimizations.py
View file @
122d4268
...
@@ -28,7 +28,7 @@ def split_cross_attention_forward_v1(self, x, context=None, mask=None):
...
@@ -28,7 +28,7 @@ def split_cross_attention_forward_v1(self, x, context=None, mask=None):
q_in
=
self
.
to_q
(
x
)
q_in
=
self
.
to_q
(
x
)
context
=
default
(
context
,
x
)
context
=
default
(
context
,
x
)
hypernetwork
=
shared
.
selected_hypernetwork
()
hypernetwork
=
shared
.
loaded_hypernetwork
hypernetwork_layers
=
(
hypernetwork
.
layers
if
hypernetwork
is
not
None
else
{})
.
get
(
context
.
shape
[
2
],
None
)
hypernetwork_layers
=
(
hypernetwork
.
layers
if
hypernetwork
is
not
None
else
{})
.
get
(
context
.
shape
[
2
],
None
)
if
hypernetwork_layers
is
not
None
:
if
hypernetwork_layers
is
not
None
:
...
@@ -68,7 +68,7 @@ def split_cross_attention_forward(self, x, context=None, mask=None):
...
@@ -68,7 +68,7 @@ def split_cross_attention_forward(self, x, context=None, mask=None):
q_in
=
self
.
to_q
(
x
)
q_in
=
self
.
to_q
(
x
)
context
=
default
(
context
,
x
)
context
=
default
(
context
,
x
)
hypernetwork
=
shared
.
selected_hypernetwork
()
hypernetwork
=
shared
.
loaded_hypernetwork
hypernetwork_layers
=
(
hypernetwork
.
layers
if
hypernetwork
is
not
None
else
{})
.
get
(
context
.
shape
[
2
],
None
)
hypernetwork_layers
=
(
hypernetwork
.
layers
if
hypernetwork
is
not
None
else
{})
.
get
(
context
.
shape
[
2
],
None
)
if
hypernetwork_layers
is
not
None
:
if
hypernetwork_layers
is
not
None
:
...
@@ -132,7 +132,7 @@ def xformers_attention_forward(self, x, context=None, mask=None):
...
@@ -132,7 +132,7 @@ def xformers_attention_forward(self, x, context=None, mask=None):
h
=
self
.
heads
h
=
self
.
heads
q_in
=
self
.
to_q
(
x
)
q_in
=
self
.
to_q
(
x
)
context
=
default
(
context
,
x
)
context
=
default
(
context
,
x
)
hypernetwork
=
shared
.
selected_hypernetwork
()
hypernetwork
=
shared
.
loaded_hypernetwork
hypernetwork_layers
=
(
hypernetwork
.
layers
if
hypernetwork
is
not
None
else
{})
.
get
(
context
.
shape
[
2
],
None
)
hypernetwork_layers
=
(
hypernetwork
.
layers
if
hypernetwork
is
not
None
else
{})
.
get
(
context
.
shape
[
2
],
None
)
if
hypernetwork_layers
is
not
None
:
if
hypernetwork_layers
is
not
None
:
k_in
=
self
.
to_k
(
hypernetwork_layers
[
0
](
context
))
k_in
=
self
.
to_k
(
hypernetwork_layers
[
0
](
context
))
...
...
modules/shared.py
View file @
122d4268
...
@@ -79,11 +79,8 @@ parallel_processing_allowed = not cmd_opts.lowvram and not cmd_opts.medvram
...
@@ -79,11 +79,8 @@ parallel_processing_allowed = not cmd_opts.lowvram and not cmd_opts.medvram
xformers_available
=
False
xformers_available
=
False
config_filename
=
cmd_opts
.
ui_settings_file
config_filename
=
cmd_opts
.
ui_settings_file
hypernetworks
=
hypernetwork
.
load_hypernetworks
(
os
.
path
.
join
(
models_path
,
'hypernetworks'
))
hypernetworks
=
hypernetwork
.
list_hypernetworks
(
os
.
path
.
join
(
models_path
,
'hypernetworks'
))
loaded_hypernetwork
=
None
def
selected_hypernetwork
():
return
hypernetworks
.
get
(
opts
.
sd_hypernetwork
,
None
)
class
State
:
class
State
:
...
...
webui.py
View file @
122d4268
...
@@ -82,6 +82,9 @@ modules.scripts.load_scripts(os.path.join(script_path, "scripts"))
...
@@ -82,6 +82,9 @@ modules.scripts.load_scripts(os.path.join(script_path, "scripts"))
shared
.
sd_model
=
modules
.
sd_models
.
load_model
()
shared
.
sd_model
=
modules
.
sd_models
.
load_model
()
shared
.
opts
.
onchange
(
"sd_model_checkpoint"
,
wrap_queued_call
(
lambda
:
modules
.
sd_models
.
reload_model_weights
(
shared
.
sd_model
)))
shared
.
opts
.
onchange
(
"sd_model_checkpoint"
,
wrap_queued_call
(
lambda
:
modules
.
sd_models
.
reload_model_weights
(
shared
.
sd_model
)))
loaded_hypernetwork
=
modules
.
hypernetwork
.
load_hypernetwork
(
shared
.
opts
.
sd_hypernetwork
)
shared
.
opts
.
onchange
(
"sd_hypernetwork"
,
wrap_queued_call
(
lambda
:
modules
.
hypernetwork
.
load_hypernetwork
(
shared
.
opts
.
sd_hypernetwork
)))
def
webui
():
def
webui
():
# make the program just exit at ctrl+c without waiting for anything
# make the program just exit at ctrl+c without waiting for anything
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment