Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
S
Stable Diffusion Webui
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Locked Files
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Security & Compliance
Security & Compliance
Dependency List
License Compliance
Packages
Packages
List
Container Registry
Analytics
Analytics
CI / CD
Code Review
Insights
Issues
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
novelai-storage
Stable Diffusion Webui
Commits
7912acef
Commit
7912acef
authored
Oct 22, 2022
by
discus0434
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
small fix
parent
fccba472
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
5 additions
and
8 deletions
+5
-8
modules/hypernetworks/hypernetwork.py
modules/hypernetworks/hypernetwork.py
+5
-7
modules/ui.py
modules/ui.py
+0
-1
No files found.
modules/hypernetworks/hypernetwork.py
View file @
7912acef
...
...
@@ -42,22 +42,20 @@ class HypernetworkModule(torch.nn.Module):
# Add an activation func
if
activation_func
==
"linear"
or
activation_func
is
None
:
pass
# If ReLU, Skip adding it to the first layer to avoid dying ReLU
elif
activation_func
==
"relu"
and
i
<
1
:
pass
elif
activation_func
in
self
.
activation_dict
:
linears
.
append
(
self
.
activation_dict
[
activation_func
]())
else
:
raise
RuntimeError
(
f
'hypernetwork uses an unsupported activation function: {activation_func}'
)
# Add dropout
if
use_dropout
:
linears
.
append
(
torch
.
nn
.
Dropout
(
p
=
0.3
))
# Add layer normalization
if
add_layer_norm
:
linears
.
append
(
torch
.
nn
.
LayerNorm
(
int
(
dim
*
layer_structure
[
i
+
1
])))
# Add dropout
if
use_dropout
:
p
=
0.5
if
0
<=
i
<=
len
(
layer_structure
)
-
3
else
0.2
linears
.
append
(
torch
.
nn
.
Dropout
(
p
=
p
))
self
.
linear
=
torch
.
nn
.
Sequential
(
*
linears
)
if
state_dict
is
not
None
:
...
...
modules/ui.py
View file @
7912acef
...
...
@@ -1244,7 +1244,6 @@ def create_ui(wrap_gradio_gpu_call):
new_hypernetwork_add_layer_norm
=
gr
.
Checkbox
(
label
=
"Add layer normalization"
)
new_hypernetwork_use_dropout
=
gr
.
Checkbox
(
label
=
"Use dropout"
)
overwrite_old_hypernetwork
=
gr
.
Checkbox
(
value
=
False
,
label
=
"Overwrite Old Hypernetwork"
)
new_hypernetwork_activation_func
=
gr
.
Dropdown
(
value
=
"relu"
,
label
=
"Select activation function of hypernetwork"
,
choices
=
[
"linear"
,
"relu"
,
"leakyrelu"
])
with
gr
.
Row
():
with
gr
.
Column
(
scale
=
3
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment