Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
S
Stable Diffusion Webui
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Locked Files
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Security & Compliance
Security & Compliance
Dependency List
License Compliance
Packages
Packages
List
Container Registry
Analytics
Analytics
CI / CD
Code Review
Insights
Issues
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
novelai-storage
Stable Diffusion Webui
Commits
2aa485b5
Commit
2aa485b5
authored
Oct 09, 2023
by
Kohaku-Blueleaf
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
add lora bundle system
parent
7d60076b
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
49 additions
and
0 deletions
+49
-0
extensions-builtin/Lora/network.py
extensions-builtin/Lora/network.py
+1
-0
extensions-builtin/Lora/networks.py
extensions-builtin/Lora/networks.py
+48
-0
No files found.
extensions-builtin/Lora/network.py
View file @
2aa485b5
...
@@ -93,6 +93,7 @@ class Network: # LoraModule
...
@@ -93,6 +93,7 @@ class Network: # LoraModule
self
.
unet_multiplier
=
1.0
self
.
unet_multiplier
=
1.0
self
.
dyn_dim
=
None
self
.
dyn_dim
=
None
self
.
modules
=
{}
self
.
modules
=
{}
self
.
bundle_embeddings
=
{}
self
.
mtime
=
None
self
.
mtime
=
None
self
.
mentioned_name
=
None
self
.
mentioned_name
=
None
...
...
extensions-builtin/Lora/networks.py
View file @
2aa485b5
...
@@ -15,6 +15,7 @@ import torch
...
@@ -15,6 +15,7 @@ import torch
from
typing
import
Union
from
typing
import
Union
from
modules
import
shared
,
devices
,
sd_models
,
errors
,
scripts
,
sd_hijack
from
modules
import
shared
,
devices
,
sd_models
,
errors
,
scripts
,
sd_hijack
from
modules.textual_inversion.textual_inversion
import
Embedding
module_types
=
[
module_types
=
[
network_lora
.
ModuleTypeLora
(),
network_lora
.
ModuleTypeLora
(),
...
@@ -149,9 +150,15 @@ def load_network(name, network_on_disk):
...
@@ -149,9 +150,15 @@ def load_network(name, network_on_disk):
is_sd2
=
'model_transformer_resblocks'
in
shared
.
sd_model
.
network_layer_mapping
is_sd2
=
'model_transformer_resblocks'
in
shared
.
sd_model
.
network_layer_mapping
matched_networks
=
{}
matched_networks
=
{}
bundle_embeddings
=
{}
for
key_network
,
weight
in
sd
.
items
():
for
key_network
,
weight
in
sd
.
items
():
key_network_without_network_parts
,
network_part
=
key_network
.
split
(
"."
,
1
)
key_network_without_network_parts
,
network_part
=
key_network
.
split
(
"."
,
1
)
if
key_network_without_network_parts
==
"bundle_emb"
:
emb_name
,
vec_name
=
network_part
.
split
(
"."
,
1
)
emb_dict
=
bundle_embeddings
.
get
(
emb_name
,
{})
emb_dict
[
vec_name
]
=
weight
bundle_embeddings
[
emb_name
]
=
emb_dict
key
=
convert_diffusers_name_to_compvis
(
key_network_without_network_parts
,
is_sd2
)
key
=
convert_diffusers_name_to_compvis
(
key_network_without_network_parts
,
is_sd2
)
sd_module
=
shared
.
sd_model
.
network_layer_mapping
.
get
(
key
,
None
)
sd_module
=
shared
.
sd_model
.
network_layer_mapping
.
get
(
key
,
None
)
...
@@ -195,6 +202,8 @@ def load_network(name, network_on_disk):
...
@@ -195,6 +202,8 @@ def load_network(name, network_on_disk):
net
.
modules
[
key
]
=
net_module
net
.
modules
[
key
]
=
net_module
net
.
bundle_embeddings
=
bundle_embeddings
if
keys_failed_to_match
:
if
keys_failed_to_match
:
logging
.
debug
(
f
"Network {network_on_disk.filename} didn't match keys: {keys_failed_to_match}"
)
logging
.
debug
(
f
"Network {network_on_disk.filename} didn't match keys: {keys_failed_to_match}"
)
...
@@ -210,11 +219,14 @@ def purge_networks_from_memory():
...
@@ -210,11 +219,14 @@ def purge_networks_from_memory():
def
load_networks
(
names
,
te_multipliers
=
None
,
unet_multipliers
=
None
,
dyn_dims
=
None
):
def
load_networks
(
names
,
te_multipliers
=
None
,
unet_multipliers
=
None
,
dyn_dims
=
None
):
emb_db
=
sd_hijack
.
model_hijack
.
embedding_db
already_loaded
=
{}
already_loaded
=
{}
for
net
in
loaded_networks
:
for
net
in
loaded_networks
:
if
net
.
name
in
names
:
if
net
.
name
in
names
:
already_loaded
[
net
.
name
]
=
net
already_loaded
[
net
.
name
]
=
net
for
emb_name
in
net
.
bundle_embeddings
:
emb_db
.
register_embedding_by_name
(
None
,
shared
.
sd_model
,
emb_name
)
loaded_networks
.
clear
()
loaded_networks
.
clear
()
...
@@ -257,6 +269,41 @@ def load_networks(names, te_multipliers=None, unet_multipliers=None, dyn_dims=No
...
@@ -257,6 +269,41 @@ def load_networks(names, te_multipliers=None, unet_multipliers=None, dyn_dims=No
net
.
dyn_dim
=
dyn_dims
[
i
]
if
dyn_dims
else
1.0
net
.
dyn_dim
=
dyn_dims
[
i
]
if
dyn_dims
else
1.0
loaded_networks
.
append
(
net
)
loaded_networks
.
append
(
net
)
for
emb_name
,
data
in
net
.
bundle_embeddings
.
items
():
# textual inversion embeddings
if
'string_to_param'
in
data
:
param_dict
=
data
[
'string_to_param'
]
param_dict
=
getattr
(
param_dict
,
'_parameters'
,
param_dict
)
# fix for torch 1.12.1 loading saved file from torch 1.11
assert
len
(
param_dict
)
==
1
,
'embedding file has multiple terms in it'
emb
=
next
(
iter
(
param_dict
.
items
()))[
1
]
vec
=
emb
.
detach
()
.
to
(
devices
.
device
,
dtype
=
torch
.
float32
)
shape
=
vec
.
shape
[
-
1
]
vectors
=
vec
.
shape
[
0
]
elif
type
(
data
)
==
dict
and
'clip_g'
in
data
and
'clip_l'
in
data
:
# SDXL embedding
vec
=
{
k
:
v
.
detach
()
.
to
(
devices
.
device
,
dtype
=
torch
.
float32
)
for
k
,
v
in
data
.
items
()}
shape
=
data
[
'clip_g'
]
.
shape
[
-
1
]
+
data
[
'clip_l'
]
.
shape
[
-
1
]
vectors
=
data
[
'clip_g'
]
.
shape
[
0
]
elif
type
(
data
)
==
dict
and
type
(
next
(
iter
(
data
.
values
())))
==
torch
.
Tensor
:
# diffuser concepts
assert
len
(
data
.
keys
())
==
1
,
'embedding file has multiple terms in it'
emb
=
next
(
iter
(
data
.
values
()))
if
len
(
emb
.
shape
)
==
1
:
emb
=
emb
.
unsqueeze
(
0
)
vec
=
emb
.
detach
()
.
to
(
devices
.
device
,
dtype
=
torch
.
float32
)
shape
=
vec
.
shape
[
-
1
]
vectors
=
vec
.
shape
[
0
]
else
:
raise
Exception
(
f
"Couldn't identify {emb_name} in lora: {name} as neither textual inversion embedding nor diffuser concept."
)
embedding
=
Embedding
(
vec
,
emb_name
)
embedding
.
vectors
=
vectors
embedding
.
shape
=
shape
if
emb_db
.
expected_shape
==
-
1
or
emb_db
.
expected_shape
==
embedding
.
shape
:
emb_db
.
register_embedding
(
embedding
,
shared
.
sd_model
)
else
:
emb_db
.
skipped_embeddings
[
name
]
=
embedding
if
failed_to_load_networks
:
if
failed_to_load_networks
:
sd_hijack
.
model_hijack
.
comments
.
append
(
"Networks not found: "
+
", "
.
join
(
failed_to_load_networks
))
sd_hijack
.
model_hijack
.
comments
.
append
(
"Networks not found: "
+
", "
.
join
(
failed_to_load_networks
))
...
@@ -565,6 +612,7 @@ extra_network_lora = None
...
@@ -565,6 +612,7 @@ extra_network_lora = None
available_networks
=
{}
available_networks
=
{}
available_network_aliases
=
{}
available_network_aliases
=
{}
loaded_networks
=
[]
loaded_networks
=
[]
loaded_bundle_embeddings
=
{}
networks_in_memory
=
{}
networks_in_memory
=
{}
available_network_hash_lookup
=
{}
available_network_hash_lookup
=
{}
forbidden_network_aliases
=
{}
forbidden_network_aliases
=
{}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment