Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
S
Stable Diffusion Webui
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Locked Files
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Security & Compliance
Security & Compliance
Dependency List
License Compliance
Packages
Packages
List
Container Registry
Analytics
Analytics
CI / CD
Code Review
Insights
Issues
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
novelai-storage
Stable Diffusion Webui
Commits
7cb31a27
Commit
7cb31a27
authored
Jan 28, 2023
by
AUTOMATIC
Browse files
Options
Browse Files
Download
Plain Diff
initial work on SD2 Lora support
parents
91c8d0dc
2abd89ac
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
24 additions
and
2 deletions
+24
-2
extensions-builtin/Lora/lora.py
extensions-builtin/Lora/lora.py
+19
-2
extensions-builtin/Lora/scripts/lora_script.py
extensions-builtin/Lora/scripts/lora_script.py
+5
-0
No files found.
extensions-builtin/Lora/lora.py
View file @
7cb31a27
...
...
@@ -12,7 +12,7 @@ re_unet_up_blocks = re.compile(r"lora_unet_up_blocks_(\d+)_attentions_(\d+)_(.+)
re_text_block
=
re
.
compile
(
r"lora_te_text_model_encoder_layers_(\d+)_(.+)"
)
def
convert_diffusers_name_to_compvis
(
key
):
def
convert_diffusers_name_to_compvis
(
key
,
is_sd2
):
def
match
(
match_list
,
regex
):
r
=
re
.
match
(
regex
,
key
)
if
not
r
:
...
...
@@ -34,6 +34,14 @@ def convert_diffusers_name_to_compvis(key):
return
f
"diffusion_model_output_blocks_{m[0] * 3 + m[1]}_1_{m[2]}"
if
match
(
m
,
re_text_block
):
if
is_sd2
:
if
'mlp_fc1'
in
m
[
1
]:
return
f
"model_transformer_resblocks_{m[0]}_{m[1].replace('mlp_fc1', 'mlp_c_fc')}"
elif
'mlp_fc2'
in
m
[
1
]:
return
f
"model_transformer_resblocks_{m[0]}_{m[1].replace('mlp_fc2', 'mlp_c_proj')}"
else
:
return
f
"model_transformer_resblocks_{m[0]}_{m[1].replace('self_attn', 'attn')}"
return
f
"transformer_text_model_encoder_layers_{m[0]}_{m[1]}"
return
key
...
...
@@ -83,9 +91,10 @@ def load_lora(name, filename):
sd
=
sd_models
.
read_state_dict
(
filename
)
keys_failed_to_match
=
[]
is_sd2
=
'model_transformer_resblocks'
in
shared
.
sd_model
.
lora_layer_mapping
for
key_diffusers
,
weight
in
sd
.
items
():
fullkey
=
convert_diffusers_name_to_compvis
(
key_diffusers
)
fullkey
=
convert_diffusers_name_to_compvis
(
key_diffusers
,
is_sd2
)
key
,
lora_key
=
fullkey
.
split
(
"."
,
1
)
sd_module
=
shared
.
sd_model
.
lora_layer_mapping
.
get
(
key
,
None
)
...
...
@@ -104,9 +113,13 @@ def load_lora(name, filename):
if
type
(
sd_module
)
==
torch
.
nn
.
Linear
:
module
=
torch
.
nn
.
Linear
(
weight
.
shape
[
1
],
weight
.
shape
[
0
],
bias
=
False
)
elif
type
(
sd_module
)
==
torch
.
nn
.
modules
.
linear
.
NonDynamicallyQuantizableLinear
:
module
=
torch
.
nn
.
modules
.
linear
.
NonDynamicallyQuantizableLinear
(
weight
.
shape
[
1
],
weight
.
shape
[
0
],
bias
=
False
)
elif
type
(
sd_module
)
==
torch
.
nn
.
Conv2d
:
module
=
torch
.
nn
.
Conv2d
(
weight
.
shape
[
1
],
weight
.
shape
[
0
],
(
1
,
1
),
bias
=
False
)
else
:
print
(
f
'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}'
)
continue
assert
False
,
f
'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}'
with
torch
.
no_grad
():
...
...
@@ -182,6 +195,10 @@ def lora_Conv2d_forward(self, input):
return
lora_forward
(
self
,
input
,
torch
.
nn
.
Conv2d_forward_before_lora
(
self
,
input
))
def
lora_NonDynamicallyQuantizableLinear_forward
(
self
,
input
):
return
lora_forward
(
self
,
input
,
torch
.
nn
.
NonDynamicallyQuantizableLinear_forward_before_lora
(
self
,
input
))
def
list_available_loras
():
available_loras
.
clear
()
...
...
extensions-builtin/Lora/scripts/lora_script.py
View file @
7cb31a27
...
...
@@ -10,6 +10,7 @@ from modules import script_callbacks, ui_extra_networks, extra_networks, shared
def
unload
():
torch
.
nn
.
Linear
.
forward
=
torch
.
nn
.
Linear_forward_before_lora
torch
.
nn
.
Conv2d
.
forward
=
torch
.
nn
.
Conv2d_forward_before_lora
torch
.
nn
.
modules
.
linear
.
NonDynamicallyQuantizableLinear
.
forward
=
torch
.
nn
.
NonDynamicallyQuantizableLinear_forward_before_lora
def
before_ui
():
...
...
@@ -23,8 +24,12 @@ if not hasattr(torch.nn, 'Linear_forward_before_lora'):
if
not
hasattr
(
torch
.
nn
,
'Conv2d_forward_before_lora'
):
torch
.
nn
.
Conv2d_forward_before_lora
=
torch
.
nn
.
Conv2d
.
forward
if
not
hasattr
(
torch
.
nn
,
'NonDynamicallyQuantizableLinear_forward_before_lora'
):
torch
.
nn
.
NonDynamicallyQuantizableLinear_forward_before_lora
=
torch
.
nn
.
modules
.
linear
.
NonDynamicallyQuantizableLinear
.
forward
torch
.
nn
.
Linear
.
forward
=
lora
.
lora_Linear_forward
torch
.
nn
.
Conv2d
.
forward
=
lora
.
lora_Conv2d_forward
torch
.
nn
.
modules
.
linear
.
NonDynamicallyQuantizableLinear
.
forward
=
lora
.
lora_NonDynamicallyQuantizableLinear_forward
script_callbacks
.
on_model_loaded
(
lora
.
assign_lora_names_to_compvis_modules
)
script_callbacks
.
on_script_unloaded
(
unload
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment