Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
S
Stable Diffusion Webui
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Locked Files
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Security & Compliance
Security & Compliance
Dependency List
License Compliance
Packages
Packages
List
Container Registry
Analytics
Analytics
CI / CD
Code Review
Insights
Issues
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
novelai-storage
Stable Diffusion Webui
Commits
c5aa7b65
Commit
c5aa7b65
authored
Mar 16, 2024
by
AUTOMATIC1111
Committed by
GitHub
Mar 16, 2024
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #15269 from AUTOMATIC1111/fix-Hires-prompt-Styles
fix issue with Styles when Hires prompt is used
parents
01ba5ad2
887a5122
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
29 additions
and
18 deletions
+29
-18
modules/infotext_utils.py
modules/infotext_utils.py
+20
-11
modules/infotext_versions.py
modules/infotext_versions.py
+1
-0
modules/processing.py
modules/processing.py
+8
-7
No files found.
modules/infotext_utils.py
View file @
c5aa7b65
...
...
@@ -265,17 +265,6 @@ Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 965400086, Size: 512x512, Model
else
:
prompt
+=
(
""
if
prompt
==
""
else
"
\n
"
)
+
line
if
shared
.
opts
.
infotext_styles
!=
"Ignore"
:
found_styles
,
prompt
,
negative_prompt
=
shared
.
prompt_styles
.
extract_styles_from_prompt
(
prompt
,
negative_prompt
)
if
shared
.
opts
.
infotext_styles
==
"Apply"
:
res
[
"Styles array"
]
=
found_styles
elif
shared
.
opts
.
infotext_styles
==
"Apply if any"
and
found_styles
:
res
[
"Styles array"
]
=
found_styles
res
[
"Prompt"
]
=
prompt
res
[
"Negative prompt"
]
=
negative_prompt
for
k
,
v
in
re_param
.
findall
(
lastline
):
try
:
if
v
[
0
]
==
'"'
and
v
[
-
1
]
==
'"'
:
...
...
@@ -290,6 +279,26 @@ Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 965400086, Size: 512x512, Model
except
Exception
:
print
(
f
"Error parsing
\"
{k}: {v}
\"
"
)
# Extract styles from prompt
if
shared
.
opts
.
infotext_styles
!=
"Ignore"
:
found_styles
,
prompt_no_styles
,
negative_prompt_no_styles
=
shared
.
prompt_styles
.
extract_styles_from_prompt
(
prompt
,
negative_prompt
)
same_hr_styles
=
True
if
(
"Hires prompt"
in
res
or
"Hires negative prompt"
in
res
)
and
(
infotext_ver
>
infotext_versions
.
v180_hr_styles
if
(
infotext_ver
:
=
infotext_versions
.
parse_version
(
res
.
get
(
"Version"
)))
else
True
):
hr_prompt
,
hr_negative_prompt
=
res
.
get
(
"Hires prompt"
,
prompt
),
res
.
get
(
"Hires negative prompt"
,
negative_prompt
)
hr_found_styles
,
hr_prompt_no_styles
,
hr_negative_prompt_no_styles
=
shared
.
prompt_styles
.
extract_styles_from_prompt
(
hr_prompt
,
hr_negative_prompt
)
if
same_hr_styles
:
=
found_styles
==
hr_found_styles
:
res
[
"Hires prompt"
]
=
''
if
hr_prompt_no_styles
==
prompt_no_styles
else
hr_prompt_no_styles
res
[
'Hires negative prompt'
]
=
''
if
hr_negative_prompt_no_styles
==
negative_prompt_no_styles
else
hr_negative_prompt_no_styles
if
same_hr_styles
:
prompt
,
negative_prompt
=
prompt_no_styles
,
negative_prompt_no_styles
if
(
shared
.
opts
.
infotext_styles
==
"Apply if any"
and
found_styles
)
or
shared
.
opts
.
infotext_styles
==
"Apply"
:
res
[
'Styles array'
]
=
found_styles
res
[
"Prompt"
]
=
prompt
res
[
"Negative prompt"
]
=
negative_prompt
# Missing CLIP skip means it was set to 1 (the default)
if
"Clip skip"
not
in
res
:
res
[
"Clip skip"
]
=
"1"
...
...
modules/infotext_versions.py
View file @
c5aa7b65
...
...
@@ -6,6 +6,7 @@ import re
v160
=
version
.
parse
(
"1.6.0"
)
v170_tsnr
=
version
.
parse
(
"v1.7.0-225"
)
v180
=
version
.
parse
(
"1.8.0"
)
v180_hr_styles
=
version
.
parse
(
"1.8.0-136"
)
# todo: change to the actual version number after merge
def
parse_version
(
text
):
...
...
modules/processing.py
View file @
c5aa7b65
...
...
@@ -702,7 +702,7 @@ def program_version():
return
res
def
create_infotext
(
p
,
all_prompts
,
all_seeds
,
all_subseeds
,
comments
=
None
,
iteration
=
0
,
position_in_batch
=
0
,
use_main_prompt
=
False
,
index
=
None
,
all_negative_prompts
=
None
):
def
create_infotext
(
p
,
all_prompts
,
all_seeds
,
all_subseeds
,
comments
=
None
,
iteration
=
0
,
position_in_batch
=
0
,
use_main_prompt
=
False
,
index
=
None
,
all_negative_prompts
=
None
,
all_hr_prompts
=
None
,
all_hr_negative_prompts
=
None
):
if
index
is
None
:
index
=
position_in_batch
+
iteration
*
p
.
batch_size
...
...
@@ -745,11 +745,18 @@ def create_infotext(p, all_prompts, all_seeds, all_subseeds, comments=None, iter
"RNG"
:
opts
.
randn_source
if
opts
.
randn_source
!=
"GPU"
else
None
,
"NGMS"
:
None
if
p
.
s_min_uncond
==
0
else
p
.
s_min_uncond
,
"Tiling"
:
"True"
if
p
.
tiling
else
None
,
"Hires prompt"
:
None
,
# This is set later, insert here to keep order
"Hires negative prompt"
:
None
,
# This is set later, insert here to keep order
**
p
.
extra_generation_params
,
"Version"
:
program_version
()
if
opts
.
add_version_to_infotext
else
None
,
"User"
:
p
.
user
if
opts
.
add_user_name_to_info
else
None
,
}
if
all_hr_prompts
:
=
all_hr_prompts
or
getattr
(
p
,
'all_hr_prompts'
,
None
):
generation_params
[
'Hires prompt'
]
=
all_hr_prompts
[
index
]
if
all_hr_prompts
[
index
]
!=
all_prompts
[
index
]
else
None
if
all_hr_negative_prompts
:
=
all_hr_negative_prompts
or
getattr
(
p
,
'all_hr_negative_prompts'
,
None
):
generation_params
[
'Hires negative prompt'
]
=
all_hr_negative_prompts
[
index
]
if
all_hr_negative_prompts
[
index
]
!=
all_negative_prompts
[
index
]
else
None
generation_params_text
=
", "
.
join
([
k
if
k
==
v
else
f
'{k}: {infotext_utils.quote(v)}'
for
k
,
v
in
generation_params
.
items
()
if
v
is
not
None
])
prompt_text
=
p
.
main_prompt
if
use_main_prompt
else
all_prompts
[
index
]
...
...
@@ -1194,12 +1201,6 @@ class StableDiffusionProcessingTxt2Img(StableDiffusionProcessing):
if
self
.
hr_sampler_name
is
not
None
and
self
.
hr_sampler_name
!=
self
.
sampler_name
:
self
.
extra_generation_params
[
"Hires sampler"
]
=
self
.
hr_sampler_name
if
tuple
(
self
.
hr_prompt
)
!=
tuple
(
self
.
prompt
):
self
.
extra_generation_params
[
"Hires prompt"
]
=
self
.
hr_prompt
if
tuple
(
self
.
hr_negative_prompt
)
!=
tuple
(
self
.
negative_prompt
):
self
.
extra_generation_params
[
"Hires negative prompt"
]
=
self
.
hr_negative_prompt
self
.
latent_scale_mode
=
shared
.
latent_upscale_modes
.
get
(
self
.
hr_upscaler
,
None
)
if
self
.
hr_upscaler
is
not
None
else
shared
.
latent_upscale_modes
.
get
(
shared
.
latent_upscale_default_mode
,
"nearest"
)
if
self
.
enable_hr
and
self
.
latent_scale_mode
is
None
:
if
not
any
(
x
.
name
==
self
.
hr_upscaler
for
x
in
shared
.
sd_upscalers
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment