Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
S
Stable Diffusion Webui
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Locked Files
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Security & Compliance
Security & Compliance
Dependency List
License Compliance
Packages
Packages
List
Container Registry
Analytics
Analytics
CI / CD
Code Review
Insights
Issues
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
novelai-storage
Stable Diffusion Webui
Commits
f1b090e9
Commit
f1b090e9
authored
Mar 17, 2024
by
AUTOMATIC1111
Committed by
GitHub
Mar 17, 2024
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #15287 from AUTOMATIC1111/diskcache
use diskcache library for caching
parents
611faadd
66355b47
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
42 additions
and
47 deletions
+42
-47
.gitignore
.gitignore
+1
-0
modules/cache.py
modules/cache.py
+39
-47
requirements.txt
requirements.txt
+1
-0
requirements_versions.txt
requirements_versions.txt
+1
-0
No files found.
.gitignore
View file @
f1b090e9
...
...
@@ -38,3 +38,4 @@ notification.mp3
/package-lock.json
/.coverage*
/test/test_outputs
/cache
modules/cache.py
View file @
f1b090e9
...
...
@@ -2,48 +2,47 @@ import json
import
os
import
os.path
import
threading
import
time
import
diskcache
import
tqdm
from
modules.paths
import
data_path
,
script_path
cache_filename
=
os
.
environ
.
get
(
'SD_WEBUI_CACHE_FILE'
,
os
.
path
.
join
(
data_path
,
"cache.json"
))
cache_data
=
None
cache_dir
=
os
.
environ
.
get
(
'SD_WEBUI_CACHE_DIR'
,
os
.
path
.
join
(
data_path
,
"cache"
))
caches
=
{}
cache_lock
=
threading
.
Lock
()
dump_cache_after
=
None
dump_cache_thread
=
None
def
dump_cache
():
"""
Marks cache for writing to disk. 5 seconds after no one else flags the cache for writing, it is written.
"""
"""old function for dumping cache to disk; does nothing since diskcache."""
global
dump_cache_after
global
dump_cache_thread
pass
def
thread_func
():
global
dump_cache_after
global
dump_cache_thread
while
dump_cache_after
is
not
None
and
time
.
time
()
<
dump_cache_after
:
time
.
sleep
(
1
)
def
convert_old_cached_data
():
try
:
with
open
(
cache_filename
,
"r"
,
encoding
=
"utf8"
)
as
file
:
data
=
json
.
load
(
file
)
except
FileNotFoundError
:
return
except
Exception
:
os
.
replace
(
cache_filename
,
os
.
path
.
join
(
script_path
,
"tmp"
,
"cache.json"
))
print
(
'[ERROR] issue occurred while trying to read cache.json; old cache has been moved to tmp/cache.json'
)
return
with
cache_lock
:
cache_filename_tmp
=
cache_filename
+
"-"
with
open
(
cache_filename_tmp
,
"w"
,
encoding
=
"utf8"
)
as
file
:
json
.
dump
(
cache_data
,
file
,
indent
=
4
,
ensure_ascii
=
False
)
total_count
=
sum
(
len
(
keyvalues
)
for
keyvalues
in
data
.
values
())
os
.
replace
(
cache_filename_tmp
,
cache_filename
)
with
tqdm
.
tqdm
(
total
=
total_count
,
desc
=
"converting cache"
)
as
progress
:
for
subsection
,
keyvalues
in
data
.
items
():
cache_obj
=
caches
.
get
(
subsection
)
if
cache_obj
is
None
:
cache_obj
=
diskcache
.
Cache
(
os
.
path
.
join
(
cache_dir
,
subsection
))
caches
[
subsection
]
=
cache_obj
dump_cache_after
=
None
dump_cache_thread
=
None
with
cache_lock
:
dump_cache_after
=
time
.
time
()
+
5
if
dump_cache_thread
is
None
:
dump_cache_thread
=
threading
.
Thread
(
name
=
'cache-writer'
,
target
=
thread_func
)
dump_cache_thread
.
start
()
for
key
,
value
in
keyvalues
.
items
():
cache_obj
[
key
]
=
value
progress
.
update
(
1
)
def
cache
(
subsection
):
...
...
@@ -54,28 +53,21 @@ def cache(subsection):
subsection (str): The subsection identifier for the cache.
Returns:
di
ct
: The cache data for the specified subsection.
di
skcache.Cache
: The cache data for the specified subsection.
"""
global
cache_data
if
cache_data
is
None
:
cache_obj
=
caches
.
get
(
subsection
)
if
not
cache_obj
:
with
cache_lock
:
if
cache_data
is
None
:
try
:
with
open
(
cache_filename
,
"r"
,
encoding
=
"utf8"
)
as
file
:
cache_data
=
json
.
load
(
file
)
except
FileNotFoundError
:
cache_data
=
{}
except
Exception
:
os
.
replace
(
cache_filename
,
os
.
path
.
join
(
script_path
,
"tmp"
,
"cache.json"
))
print
(
'[ERROR] issue occurred while trying to read cache.json, move current cache to tmp/cache.json and create new cache'
)
cache_data
=
{}
s
=
cache_data
.
get
(
subsection
,
{})
cache_data
[
subsection
]
=
s
return
s
if
not
os
.
path
.
exists
(
cache_dir
)
and
os
.
path
.
isfile
(
cache_filename
):
convert_old_cached_data
()
cache_obj
=
caches
.
get
(
subsection
)
if
not
cache_obj
:
cache_obj
=
diskcache
.
Cache
(
os
.
path
.
join
(
cache_dir
,
subsection
))
caches
[
subsection
]
=
cache_obj
return
cache_obj
def
cached_data_for_file
(
subsection
,
title
,
filename
,
func
):
...
...
requirements.txt
View file @
f1b090e9
...
...
@@ -4,6 +4,7 @@ accelerate
blendmodes
clean-fid
diskcache
einops
facexlib
fastapi
>=0.90.1
...
...
requirements_versions.txt
View file @
f1b090e9
...
...
@@ -3,6 +3,7 @@ Pillow==9.5.0
accelerate==0.21.0
blendmodes==2022
clean-fid==0.1.35
diskcache==5.6.3
einops==0.4.1
facexlib==0.3.0
fastapi==0.94.0
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment