Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
S
stable-diffusion-webui
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Administrator
stable-diffusion-webui
Commits
c702d4d0
Unverified
Commit
c702d4d0
authored
Oct 26, 2022
by
guaneec
Committed by
GitHub
Oct 26, 2022
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Fix off-by-one
parent
2f4c9189
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
2 additions
and
2 deletions
+2
-2
hypernetwork.py
modules/hypernetworks/hypernetwork.py
+2
-2
No files found.
modules/hypernetworks/hypernetwork.py
View file @
c702d4d0
...
@@ -42,7 +42,7 @@ class HypernetworkModule(torch.nn.Module):
...
@@ -42,7 +42,7 @@ class HypernetworkModule(torch.nn.Module):
linears
.
append
(
torch
.
nn
.
Linear
(
int
(
dim
*
layer_structure
[
i
]),
int
(
dim
*
layer_structure
[
i
+
1
])))
linears
.
append
(
torch
.
nn
.
Linear
(
int
(
dim
*
layer_structure
[
i
]),
int
(
dim
*
layer_structure
[
i
+
1
])))
# Add an activation func except last layer
# Add an activation func except last layer
if
activation_func
==
"linear"
or
activation_func
is
None
or
i
>=
len
(
layer_structure
)
-
3
:
if
activation_func
==
"linear"
or
activation_func
is
None
or
i
>=
len
(
layer_structure
)
-
2
:
pass
pass
elif
activation_func
in
self
.
activation_dict
:
elif
activation_func
in
self
.
activation_dict
:
linears
.
append
(
self
.
activation_dict
[
activation_func
]())
linears
.
append
(
self
.
activation_dict
[
activation_func
]())
...
@@ -54,7 +54,7 @@ class HypernetworkModule(torch.nn.Module):
...
@@ -54,7 +54,7 @@ class HypernetworkModule(torch.nn.Module):
linears
.
append
(
torch
.
nn
.
LayerNorm
(
int
(
dim
*
layer_structure
[
i
+
1
])))
linears
.
append
(
torch
.
nn
.
LayerNorm
(
int
(
dim
*
layer_structure
[
i
+
1
])))
# Add dropout except last layer
# Add dropout except last layer
if
use_dropout
and
i
<
len
(
layer_structure
)
-
3
:
if
use_dropout
and
i
<
len
(
layer_structure
)
-
2
:
linears
.
append
(
torch
.
nn
.
Dropout
(
p
=
0.3
))
linears
.
append
(
torch
.
nn
.
Dropout
(
p
=
0.3
))
self
.
linear
=
torch
.
nn
.
Sequential
(
*
linears
)
self
.
linear
=
torch
.
nn
.
Sequential
(
*
linears
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment