Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
S
stable-diffusion-webui
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Administrator
stable-diffusion-webui
Commits
aa6e55e0
Commit
aa6e55e0
authored
Jan 29, 2023
by
AUTOMATIC
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
do not display the message for TI unless the list of loaded embeddings changed
parent
8d7382ab
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
7 additions
and
3 deletions
+7
-3
textual_inversion.py
modules/textual_inversion/textual_inversion.py
+7
-3
No files found.
modules/textual_inversion/textual_inversion.py
View file @
aa6e55e0
...
@@ -112,6 +112,7 @@ class EmbeddingDatabase:
...
@@ -112,6 +112,7 @@ class EmbeddingDatabase:
self
.
skipped_embeddings
=
{}
self
.
skipped_embeddings
=
{}
self
.
expected_shape
=
-
1
self
.
expected_shape
=
-
1
self
.
embedding_dirs
=
{}
self
.
embedding_dirs
=
{}
self
.
previously_displayed_embeddings
=
()
def
add_embedding_dir
(
self
,
path
):
def
add_embedding_dir
(
self
,
path
):
self
.
embedding_dirs
[
path
]
=
DirWithTextualInversionEmbeddings
(
path
)
self
.
embedding_dirs
[
path
]
=
DirWithTextualInversionEmbeddings
(
path
)
...
@@ -228,9 +229,12 @@ class EmbeddingDatabase:
...
@@ -228,9 +229,12 @@ class EmbeddingDatabase:
self
.
load_from_dir
(
embdir
)
self
.
load_from_dir
(
embdir
)
embdir
.
update
()
embdir
.
update
()
print
(
f
"Textual inversion embeddings loaded({len(self.word_embeddings)}): {', '.join(self.word_embeddings.keys())}"
)
displayed_embeddings
=
(
tuple
(
self
.
word_embeddings
.
keys
()),
tuple
(
self
.
skipped_embeddings
.
keys
()))
if
len
(
self
.
skipped_embeddings
)
>
0
:
if
self
.
previously_displayed_embeddings
!=
displayed_embeddings
:
print
(
f
"Textual inversion embeddings skipped({len(self.skipped_embeddings)}): {', '.join(self.skipped_embeddings.keys())}"
)
self
.
previously_displayed_embeddings
=
displayed_embeddings
print
(
f
"Textual inversion embeddings loaded({len(self.word_embeddings)}): {', '.join(self.word_embeddings.keys())}"
)
if
len
(
self
.
skipped_embeddings
)
>
0
:
print
(
f
"Textual inversion embeddings skipped({len(self.skipped_embeddings)}): {', '.join(self.skipped_embeddings.keys())}"
)
def
find_embedding_at_position
(
self
,
tokens
,
offset
):
def
find_embedding_at_position
(
self
,
tokens
,
offset
):
token
=
tokens
[
offset
]
token
=
tokens
[
offset
]
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment