File size: 1,897 Bytes
8410be3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
from transformers.configuration_utils import PretrainedConfig
class DakitariInstructConfig(PretrainedConfig):
model_type = "dakitari_instruct"
def __init__(
self,
vocab_size=30522,
n_positions=512,
n_embd=768, # increased embedding dimension
n_layer=24, # increased number of layers
n_head=8, # increased attention heads if desired
n_inner=3072, # increased feed-forward dimension
pad_token_id=0,
bos_token_id=1,
eos_token_id=2,
activation_function="gelu",
resid_pdrop=0.1,
embd_pdrop=0.1,
attn_pdrop=0.1,
layer_norm_epsilon=1e-5,
initializer_range=0.02,
adapter_bottleneck=128, # optionally increase adapter capacity
model_name="DakitariInstruct-v1.1",
creator="Quantum Leap AI company",
country="Kenya, Africa",
healthcare_purpose="Assist healthcare professionals and patients with accurate medical information",
**kwargs
):
self.vocab_size = vocab_size
self.n_positions = n_positions
self.n_embd = n_embd
self.n_layer = n_layer
self.n_head = n_head
self.n_inner = n_inner
self.pad_token_id = pad_token_id
self.bos_token_id = bos_token_id
self.eos_token_id = eos_token_id
self.activation_function = activation_function
self.resid_pdrop = resid_pdrop
self.embd_pdrop = embd_pdrop
self.attn_pdrop = attn_pdrop
self.layer_norm_epsilon = layer_norm_epsilon
self.initializer_range = initializer_range
self.adapter_bottleneck = adapter_bottleneck
self.model_name = model_name
self.creator = creator
self.country = country
self.healthcare_purpose = healthcare_purpose
super().__init__(**kwargs) |