fix strip

This commit is contained in:
Achazwl 2022-06-10 17:09:45 +08:00
parent b0e1b3715d
commit e2635545e7
2 changed files with 10 additions and 3 deletions

View File

@ -2,8 +2,15 @@ from transformers import BertForMaskedLM
model = BertForMaskedLM.from_pretrained("bert-base-cased")
# suppose we load BERT
import sys
if len(sys.argv) == 1:
port=True
else:
port=int(sys.argv[1])
from opendelta import LoraModel
delta_model = LoraModel(backbone_model=model, interactive_modify=True)
delta_model = LoraModel(backbone_model=model, interactive_modify=port)
# This will visualize the backbone after modification and other information.
delta_model.freeze_module(exclude=["deltas", "layernorm_embedding"], set_state_dict=True)

View File

@ -107,7 +107,7 @@ class hello:
class submit:
def GET(self, _):
global names
names = [name.strip("root.") for name in web.input(name=[]).name]
names = [name[5:] for name in web.input(name=[]).name]
app.stop()
def interactive(model, port=8888):
@ -120,7 +120,7 @@ def interactive(model, port=8888):
print("If on your machine, open the link below for interactive modification.\n "
"If on remote host, you could use port mapping, "
"or run in vscode terminal, which automatically do port mapping for you.")
app.run()
app.run(port)
global names
print("modified_modules:")
print(names)