Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
S
Stable Diffusion Webui
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Locked Files
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Security & Compliance
Security & Compliance
Dependency List
License Compliance
Packages
Packages
List
Container Registry
Analytics
Analytics
CI / CD
Code Review
Insights
Issues
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
novelai-storage
Stable Diffusion Webui
Commits
81e94de3
Commit
81e94de3
authored
Oct 10, 2023
by
Kohaku-Blueleaf
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Add warning when meet emb name conflicting
Choose standalone embedding (in /embeddings folder) first
parent
2282eb8d
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
81 additions
and
32 deletions
+81
-32
extensions-builtin/Lora/lora_logger.py
extensions-builtin/Lora/lora_logger.py
+33
-0
extensions-builtin/Lora/networks.py
extensions-builtin/Lora/networks.py
+48
-32
No files found.
extensions-builtin/Lora/lora_logger.py
0 → 100644
View file @
81e94de3
import
sys
import
copy
import
logging
class
ColoredFormatter
(
logging
.
Formatter
):
COLORS
=
{
"DEBUG"
:
"
\033
[0;36m"
,
# CYAN
"INFO"
:
"
\033
[0;32m"
,
# GREEN
"WARNING"
:
"
\033
[0;33m"
,
# YELLOW
"ERROR"
:
"
\033
[0;31m"
,
# RED
"CRITICAL"
:
"
\033
[0;37;41m"
,
# WHITE ON RED
"RESET"
:
"
\033
[0m"
,
# RESET COLOR
}
def
format
(
self
,
record
):
colored_record
=
copy
.
copy
(
record
)
levelname
=
colored_record
.
levelname
seq
=
self
.
COLORS
.
get
(
levelname
,
self
.
COLORS
[
"RESET"
])
colored_record
.
levelname
=
f
"{seq}{levelname}{self.COLORS['RESET']}"
return
super
()
.
format
(
colored_record
)
logger
=
logging
.
getLogger
(
"lora"
)
logger
.
propagate
=
False
if
not
logger
.
handlers
:
handler
=
logging
.
StreamHandler
(
sys
.
stdout
)
handler
.
setFormatter
(
ColoredFormatter
(
"[
%(name)
s]-
%(levelname)
s:
%(message)
s"
)
)
logger
.
addHandler
(
handler
)
\ No newline at end of file
extensions-builtin/Lora/networks.py
View file @
81e94de3
...
@@ -17,6 +17,8 @@ from typing import Union
...
@@ -17,6 +17,8 @@ from typing import Union
from
modules
import
shared
,
devices
,
sd_models
,
errors
,
scripts
,
sd_hijack
from
modules
import
shared
,
devices
,
sd_models
,
errors
,
scripts
,
sd_hijack
from
modules.textual_inversion.textual_inversion
import
Embedding
from
modules.textual_inversion.textual_inversion
import
Embedding
from
lora_logger
import
logger
module_types
=
[
module_types
=
[
network_lora
.
ModuleTypeLora
(),
network_lora
.
ModuleTypeLora
(),
network_hada
.
ModuleTypeHada
(),
network_hada
.
ModuleTypeHada
(),
...
@@ -206,7 +208,40 @@ def load_network(name, network_on_disk):
...
@@ -206,7 +208,40 @@ def load_network(name, network_on_disk):
net
.
modules
[
key
]
=
net_module
net
.
modules
[
key
]
=
net_module
net
.
bundle_embeddings
=
bundle_embeddings
embeddings
=
{}
for
emb_name
,
data
in
bundle_embeddings
.
items
():
# textual inversion embeddings
if
'string_to_param'
in
data
:
param_dict
=
data
[
'string_to_param'
]
param_dict
=
getattr
(
param_dict
,
'_parameters'
,
param_dict
)
# fix for torch 1.12.1 loading saved file from torch 1.11
assert
len
(
param_dict
)
==
1
,
'embedding file has multiple terms in it'
emb
=
next
(
iter
(
param_dict
.
items
()))[
1
]
vec
=
emb
.
detach
()
.
to
(
devices
.
device
,
dtype
=
torch
.
float32
)
shape
=
vec
.
shape
[
-
1
]
vectors
=
vec
.
shape
[
0
]
elif
type
(
data
)
==
dict
and
'clip_g'
in
data
and
'clip_l'
in
data
:
# SDXL embedding
vec
=
{
k
:
v
.
detach
()
.
to
(
devices
.
device
,
dtype
=
torch
.
float32
)
for
k
,
v
in
data
.
items
()}
shape
=
data
[
'clip_g'
]
.
shape
[
-
1
]
+
data
[
'clip_l'
]
.
shape
[
-
1
]
vectors
=
data
[
'clip_g'
]
.
shape
[
0
]
elif
type
(
data
)
==
dict
and
type
(
next
(
iter
(
data
.
values
())))
==
torch
.
Tensor
:
# diffuser concepts
assert
len
(
data
.
keys
())
==
1
,
'embedding file has multiple terms in it'
emb
=
next
(
iter
(
data
.
values
()))
if
len
(
emb
.
shape
)
==
1
:
emb
=
emb
.
unsqueeze
(
0
)
vec
=
emb
.
detach
()
.
to
(
devices
.
device
,
dtype
=
torch
.
float32
)
shape
=
vec
.
shape
[
-
1
]
vectors
=
vec
.
shape
[
0
]
else
:
raise
Exception
(
f
"Couldn't identify {emb_name} in lora: {name} as neither textual inversion embedding nor diffuser concept."
)
embedding
=
Embedding
(
vec
,
emb_name
)
embedding
.
vectors
=
vectors
embedding
.
shape
=
shape
embedding
.
loaded
=
None
embeddings
[
emb_name
]
=
embedding
net
.
bundle_embeddings
=
embeddings
if
keys_failed_to_match
:
if
keys_failed_to_match
:
logging
.
debug
(
f
"Network {network_on_disk.filename} didn't match keys: {keys_failed_to_match}"
)
logging
.
debug
(
f
"Network {network_on_disk.filename} didn't match keys: {keys_failed_to_match}"
)
...
@@ -229,8 +264,9 @@ def load_networks(names, te_multipliers=None, unet_multipliers=None, dyn_dims=No
...
@@ -229,8 +264,9 @@ def load_networks(names, te_multipliers=None, unet_multipliers=None, dyn_dims=No
for
net
in
loaded_networks
:
for
net
in
loaded_networks
:
if
net
.
name
in
names
:
if
net
.
name
in
names
:
already_loaded
[
net
.
name
]
=
net
already_loaded
[
net
.
name
]
=
net
for
emb_name
in
net
.
bundle_embeddings
:
for
emb_name
,
embedding
in
net
.
bundle_embeddings
.
items
():
emb_db
.
register_embedding_by_name
(
None
,
shared
.
sd_model
,
emb_name
)
if
embedding
.
loaded
:
emb_db
.
register_embedding_by_name
(
None
,
shared
.
sd_model
,
emb_name
)
loaded_networks
.
clear
()
loaded_networks
.
clear
()
...
@@ -273,37 +309,17 @@ def load_networks(names, te_multipliers=None, unet_multipliers=None, dyn_dims=No
...
@@ -273,37 +309,17 @@ def load_networks(names, te_multipliers=None, unet_multipliers=None, dyn_dims=No
net
.
dyn_dim
=
dyn_dims
[
i
]
if
dyn_dims
else
1.0
net
.
dyn_dim
=
dyn_dims
[
i
]
if
dyn_dims
else
1.0
loaded_networks
.
append
(
net
)
loaded_networks
.
append
(
net
)
for
emb_name
,
data
in
net
.
bundle_embeddings
.
items
():
for
emb_name
,
embedding
in
net
.
bundle_embeddings
.
items
():
# textual inversion embeddings
if
embedding
.
loaded
is
None
and
emb_name
in
emb_db
.
word_embeddings
:
if
'string_to_param'
in
data
:
logger
.
warning
(
param_dict
=
data
[
'string_to_param'
]
f
'Skip bundle embedding: "{emb_name}"'
param_dict
=
getattr
(
param_dict
,
'_parameters'
,
param_dict
)
# fix for torch 1.12.1 loading saved file from torch 1.11
' as it was already loaded from embeddings folder'
assert
len
(
param_dict
)
==
1
,
'embedding file has multiple terms in it'
)
emb
=
next
(
iter
(
param_dict
.
items
()))[
1
]
continue
vec
=
emb
.
detach
()
.
to
(
devices
.
device
,
dtype
=
torch
.
float32
)
shape
=
vec
.
shape
[
-
1
]
vectors
=
vec
.
shape
[
0
]
elif
type
(
data
)
==
dict
and
'clip_g'
in
data
and
'clip_l'
in
data
:
# SDXL embedding
vec
=
{
k
:
v
.
detach
()
.
to
(
devices
.
device
,
dtype
=
torch
.
float32
)
for
k
,
v
in
data
.
items
()}
shape
=
data
[
'clip_g'
]
.
shape
[
-
1
]
+
data
[
'clip_l'
]
.
shape
[
-
1
]
vectors
=
data
[
'clip_g'
]
.
shape
[
0
]
elif
type
(
data
)
==
dict
and
type
(
next
(
iter
(
data
.
values
())))
==
torch
.
Tensor
:
# diffuser concepts
assert
len
(
data
.
keys
())
==
1
,
'embedding file has multiple terms in it'
emb
=
next
(
iter
(
data
.
values
()))
if
len
(
emb
.
shape
)
==
1
:
emb
=
emb
.
unsqueeze
(
0
)
vec
=
emb
.
detach
()
.
to
(
devices
.
device
,
dtype
=
torch
.
float32
)
shape
=
vec
.
shape
[
-
1
]
vectors
=
vec
.
shape
[
0
]
else
:
raise
Exception
(
f
"Couldn't identify {emb_name} in lora: {name} as neither textual inversion embedding nor diffuser concept."
)
embedding
=
Embedding
(
vec
,
emb_name
)
embedding
.
vectors
=
vectors
embedding
.
shape
=
shape
embedding
.
loaded
=
False
if
emb_db
.
expected_shape
==
-
1
or
emb_db
.
expected_shape
==
embedding
.
shape
:
if
emb_db
.
expected_shape
==
-
1
or
emb_db
.
expected_shape
==
embedding
.
shape
:
embedding
.
loaded
=
True
emb_db
.
register_embedding
(
embedding
,
shared
.
sd_model
)
emb_db
.
register_embedding
(
embedding
,
shared
.
sd_model
)
else
:
else
:
emb_db
.
skipped_embeddings
[
name
]
=
embedding
emb_db
.
skipped_embeddings
[
name
]
=
embedding
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment