This repository has been archived by the owner on Jun 21, 2024. It is now read-only.
-
-
Notifications
You must be signed in to change notification settings - Fork 77
/
hubconf.py
92 lines (72 loc) · 2.38 KB
/
hubconf.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import torch
from palm_rlhf_pytorch import PaLM
def palm_150m_8k_v0():
num_tokens = 50304
dim = 768
depth = 12
dim_head = 128
heads = 8
flash_attn = True
model = PaLM(
num_tokens=num_tokens, dim=dim, depth=depth, dim_head=dim_head, heads=heads, flash_attn=flash_attn
)
hf_url = 'https://huggingface.co/conceptofmind/palm-150m/resolve/main/palm_150m_8k_v0.pt'
state_dict = torch.hub.load_state_dict_from_url(hf_url)
model.load_state_dict(state_dict)
return model
def palm_410m_8k_v0():
num_tokens = 50304
dim = 1024
depth = 24
dim_head = 128
heads = 8
flash_attn = True
model = PaLM(
num_tokens=num_tokens, dim=dim, depth=depth, dim_head=dim_head, heads=heads, flash_attn=flash_attn
)
hf_url = 'https://huggingface.co/conceptofmind/palm-410m/resolve/main/palm_410m_8k_v0.pt'
state_dict = torch.hub.load_state_dict_from_url(hf_url)
model.load_state_dict(state_dict)
return model
def palm_1b_8k_v0():
num_tokens = 50304
dim = 2048
depth = 16
dim_head = 128
heads = 8
flash_attn = True
model = PaLM(
num_tokens=num_tokens, dim=dim, depth=depth, dim_head=dim_head, heads=heads, flash_attn=flash_attn
)
hf_url = 'https://huggingface.co/conceptofmind/palm-1b/resolve/main/palm_1b_8k_v0.pt'
state_dict = torch.hub.load_state_dict_from_url(hf_url)
model.load_state_dict(state_dict)
return model
def palm_2b_8k_v0():
num_tokens = 50304
dim = 2560
depth = 24
dim_head = 128
heads = 8
flash_attn = True
model = PaLM(
num_tokens=num_tokens, dim=dim, depth=depth, dim_head=dim_head, heads=heads, flash_attn=flash_attn
)
hf_url = 'https://huggingface.co/conceptofmind/palm-2b/resolve/main/palm_2b_8k_v0.pt'
state_dict = torch.hub.load_state_dict_from_url(hf_url)
model.load_state_dict(state_dict)
return model
# def palm_3b_8k_v0():
# num_tokens = 50304
# dim = 2560
# depth = 32
# dim_head = 128
# heads = 24
# flash_attn = True
# model = PaLM(
# num_tokens=num_tokens, dim=dim, depth=depth, dim_head=dim_head, heads=heads, flash_attn=flash_attn
# )
# hf_url = 'https://huggingface.co/conceptofmind/palm-1b/resolve/main/palm_3b_8k_v0.pt'
# state_dict = torch.hub.load_state_dict_from_url(hf_url)
# model.load_state_dict(state_dict)
# return model