Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
S
Stable Diffusion Webui
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Locked Files
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Security & Compliance
Security & Compliance
Dependency List
License Compliance
Packages
Packages
List
Container Registry
Analytics
Analytics
CI / CD
Code Review
Insights
Issues
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
novelai-storage
Stable Diffusion Webui
Commits
2d8c894b
Commit
2d8c894b
authored
Oct 21, 2023
by
v0xie
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
refactor: use forward hook instead of custom forward
parent
0550659c
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
24 additions
and
9 deletions
+24
-9
extensions-builtin/Lora/network_oft.py
extensions-builtin/Lora/network_oft.py
+24
-9
No files found.
extensions-builtin/Lora/network_oft.py
View file @
2d8c894b
...
...
@@ -36,9 +36,11 @@ class NetworkModuleOFT(network.NetworkModule):
# how do we revert this to unload the weights?
def
apply_to
(
self
):
self
.
org_forward
=
self
.
org_module
[
0
]
.
forward
self
.
org_module
[
0
]
.
forward
=
self
.
forward
#self.org_module[0].forward = self.forward
self
.
org_module
[
0
]
.
register_forward_hook
(
self
.
forward_hook
)
def
get_weight
(
self
,
oft_blocks
,
multiplier
=
None
):
self
.
constraint
=
self
.
constraint
.
to
(
oft_blocks
.
device
,
dtype
=
oft_blocks
.
dtype
)
block_Q
=
oft_blocks
-
oft_blocks
.
transpose
(
1
,
2
)
norm_Q
=
torch
.
norm
(
block_Q
.
flatten
())
new_norm_Q
=
torch
.
clamp
(
norm_Q
,
max
=
self
.
constraint
)
...
...
@@ -66,14 +68,10 @@ class NetworkModuleOFT(network.NetworkModule):
output_shape
=
self
.
oft_blocks
.
shape
return
self
.
finalize_updown
(
updown
,
orig_weight
,
output_shape
)
def
forward
(
self
,
x
,
y
=
None
):
x
=
self
.
org_forward
(
x
)
if
self
.
multiplier
()
==
0.0
:
return
x
# calculating R here is excruciatingly slow
#R = self.get_weight().to(x.device, dtype=x.dtype)
def
forward_hook
(
self
,
module
,
args
,
output
):
#print(f'Forward hook in {self.network_key} called')
x
=
output
R
=
self
.
R
.
to
(
x
.
device
,
dtype
=
x
.
dtype
)
if
x
.
dim
()
==
4
:
...
...
@@ -83,3 +81,20 @@ class NetworkModuleOFT(network.NetworkModule):
else
:
x
=
torch
.
matmul
(
x
,
R
)
return
x
# def forward(self, x, y=None):
# x = self.org_forward(x)
# if self.multiplier() == 0.0:
# return x
# # calculating R here is excruciatingly slow
# #R = self.get_weight().to(x.device, dtype=x.dtype)
# R = self.R.to(x.device, dtype=x.dtype)
# if x.dim() == 4:
# x = x.permute(0, 2, 3, 1)
# x = torch.matmul(x, R)
# x = x.permute(0, 3, 1, 2)
# else:
# x = torch.matmul(x, R)
# return x
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment