Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
S
Stable Diffusion Webui
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Locked Files
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Security & Compliance
Security & Compliance
Dependency List
License Compliance
Packages
Packages
List
Container Registry
Analytics
Analytics
CI / CD
Code Review
Insights
Issues
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
novelai-storage
Stable Diffusion Webui
Commits
a26fc283
Commit
a26fc283
authored
Oct 21, 2022
by
AUTOMATIC1111
Committed by
GitHub
Oct 21, 2022
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #3199 from discus0434/master
Add features to insert activation functions to hypernetworks
parents
12a97c53
f8733ad0
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
23 additions
and
11 deletions
+23
-11
modules/hypernetworks/hypernetwork.py
modules/hypernetworks/hypernetwork.py
+19
-10
modules/hypernetworks/ui.py
modules/hypernetworks/ui.py
+2
-1
modules/ui.py
modules/ui.py
+2
-0
No files found.
modules/hypernetworks/hypernetwork.py
View file @
a26fc283
...
...
@@ -22,16 +22,20 @@ from modules.textual_inversion.learn_schedule import LearnRateScheduler
class
HypernetworkModule
(
torch
.
nn
.
Module
):
multiplier
=
1.0
def
__init__
(
self
,
dim
,
state_dict
=
None
,
layer_structure
=
None
,
add_layer_norm
=
False
):
def
__init__
(
self
,
dim
,
state_dict
=
None
,
layer_structure
=
None
,
add_layer_norm
=
False
,
activation_func
=
None
):
super
()
.
__init__
()
assert
layer_structure
is
not
None
,
"layer_structure mut not be None"
assert
layer_structure
is
not
None
,
"layer_structure mu
s
t not be None"
assert
layer_structure
[
0
]
==
1
,
"Multiplier Sequence should start with size 1!"
assert
layer_structure
[
-
1
]
==
1
,
"Multiplier Sequence should end with size 1!"
linears
=
[]
for
i
in
range
(
len
(
layer_structure
)
-
1
):
linears
.
append
(
torch
.
nn
.
Linear
(
int
(
dim
*
layer_structure
[
i
]),
int
(
dim
*
layer_structure
[
i
+
1
])))
if
activation_func
==
"relu"
:
linears
.
append
(
torch
.
nn
.
ReLU
())
if
activation_func
==
"leakyrelu"
:
linears
.
append
(
torch
.
nn
.
LeakyReLU
())
if
add_layer_norm
:
linears
.
append
(
torch
.
nn
.
LayerNorm
(
int
(
dim
*
layer_structure
[
i
+
1
])))
...
...
@@ -42,8 +46,9 @@ class HypernetworkModule(torch.nn.Module):
self
.
load_state_dict
(
state_dict
)
else
:
for
layer
in
self
.
linear
:
layer
.
weight
.
data
.
normal_
(
mean
=
0.0
,
std
=
0.01
)
layer
.
bias
.
data
.
zero_
()
if
not
"ReLU"
in
layer
.
__str__
():
layer
.
weight
.
data
.
normal_
(
mean
=
0.0
,
std
=
0.01
)
layer
.
bias
.
data
.
zero_
()
self
.
to
(
devices
.
device
)
...
...
@@ -69,7 +74,8 @@ class HypernetworkModule(torch.nn.Module):
def
trainables
(
self
):
layer_structure
=
[]
for
layer
in
self
.
linear
:
layer_structure
+=
[
layer
.
weight
,
layer
.
bias
]
if
not
"ReLU"
in
layer
.
__str__
():
layer_structure
+=
[
layer
.
weight
,
layer
.
bias
]
return
layer_structure
...
...
@@ -81,7 +87,7 @@ class Hypernetwork:
filename
=
None
name
=
None
def
__init__
(
self
,
name
=
None
,
enable_sizes
=
None
,
layer_structure
=
None
,
add_layer_norm
=
False
):
def
__init__
(
self
,
name
=
None
,
enable_sizes
=
None
,
layer_structure
=
None
,
add_layer_norm
=
False
,
activation_func
=
None
):
self
.
filename
=
None
self
.
name
=
name
self
.
layers
=
{}
...
...
@@ -90,11 +96,12 @@ class Hypernetwork:
self
.
sd_checkpoint_name
=
None
self
.
layer_structure
=
layer_structure
self
.
add_layer_norm
=
add_layer_norm
self
.
activation_func
=
activation_func
for
size
in
enable_sizes
or
[]:
self
.
layers
[
size
]
=
(
HypernetworkModule
(
size
,
None
,
self
.
layer_structure
,
self
.
add_layer_norm
),
HypernetworkModule
(
size
,
None
,
self
.
layer_structure
,
self
.
add_layer_norm
),
HypernetworkModule
(
size
,
None
,
self
.
layer_structure
,
self
.
add_layer_norm
,
self
.
activation_func
),
HypernetworkModule
(
size
,
None
,
self
.
layer_structure
,
self
.
add_layer_norm
,
self
.
activation_func
),
)
def
weights
(
self
):
...
...
@@ -117,6 +124,7 @@ class Hypernetwork:
state_dict
[
'name'
]
=
self
.
name
state_dict
[
'layer_structure'
]
=
self
.
layer_structure
state_dict
[
'is_layer_norm'
]
=
self
.
add_layer_norm
state_dict
[
'activation_func'
]
=
self
.
activation_func
state_dict
[
'sd_checkpoint'
]
=
self
.
sd_checkpoint
state_dict
[
'sd_checkpoint_name'
]
=
self
.
sd_checkpoint_name
...
...
@@ -131,12 +139,13 @@ class Hypernetwork:
self
.
layer_structure
=
state_dict
.
get
(
'layer_structure'
,
[
1
,
2
,
1
])
self
.
add_layer_norm
=
state_dict
.
get
(
'is_layer_norm'
,
False
)
self
.
activation_func
=
state_dict
.
get
(
'activation_func'
,
None
)
for
size
,
sd
in
state_dict
.
items
():
if
type
(
size
)
==
int
:
self
.
layers
[
size
]
=
(
HypernetworkModule
(
size
,
sd
[
0
],
self
.
layer_structure
,
self
.
add_layer_norm
),
HypernetworkModule
(
size
,
sd
[
1
],
self
.
layer_structure
,
self
.
add_layer_norm
),
HypernetworkModule
(
size
,
sd
[
0
],
self
.
layer_structure
,
self
.
add_layer_norm
,
self
.
activation_func
),
HypernetworkModule
(
size
,
sd
[
1
],
self
.
layer_structure
,
self
.
add_layer_norm
,
self
.
activation_func
),
)
self
.
name
=
state_dict
.
get
(
'name'
,
self
.
name
)
...
...
modules/hypernetworks/ui.py
View file @
a26fc283
...
...
@@ -10,7 +10,7 @@ from modules import sd_hijack, shared, devices
from
modules.hypernetworks
import
hypernetwork
def
create_hypernetwork
(
name
,
enable_sizes
,
layer_structure
=
None
,
add_layer_norm
=
False
):
def
create_hypernetwork
(
name
,
enable_sizes
,
layer_structure
=
None
,
add_layer_norm
=
False
,
activation_func
=
None
):
fn
=
os
.
path
.
join
(
shared
.
cmd_opts
.
hypernetwork_dir
,
f
"{name}.pt"
)
assert
not
os
.
path
.
exists
(
fn
),
f
"file {fn} already exists"
...
...
@@ -22,6 +22,7 @@ def create_hypernetwork(name, enable_sizes, layer_structure=None, add_layer_norm
enable_sizes
=
[
int
(
x
)
for
x
in
enable_sizes
],
layer_structure
=
layer_structure
,
add_layer_norm
=
add_layer_norm
,
activation_func
=
activation_func
,
)
hypernet
.
save
(
fn
)
...
...
modules/ui.py
View file @
a26fc283
...
...
@@ -1224,6 +1224,7 @@ def create_ui(wrap_gradio_gpu_call):
new_hypernetwork_sizes
=
gr
.
CheckboxGroup
(
label
=
"Modules"
,
value
=
[
"768"
,
"320"
,
"640"
,
"1280"
],
choices
=
[
"768"
,
"320"
,
"640"
,
"1280"
])
new_hypernetwork_layer_structure
=
gr
.
Textbox
(
"1, 2, 1"
,
label
=
"Enter hypernetwork layer structure"
,
placeholder
=
"1st and last digit must be 1. ex:'1, 2, 1'"
)
new_hypernetwork_add_layer_norm
=
gr
.
Checkbox
(
label
=
"Add layer normalization"
)
new_hypernetwork_activation_func
=
gr
.
Dropdown
(
value
=
"relu"
,
label
=
"Select activation function of hypernetwork"
,
choices
=
[
"linear"
,
"relu"
,
"leakyrelu"
])
with
gr
.
Row
():
with
gr
.
Column
(
scale
=
3
):
...
...
@@ -1308,6 +1309,7 @@ def create_ui(wrap_gradio_gpu_call):
new_hypernetwork_sizes
,
new_hypernetwork_layer_structure
,
new_hypernetwork_add_layer_norm
,
new_hypernetwork_activation_func
,
],
outputs
=
[
train_hypernetwork_name
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment