from transformers import PretrainedConfig from typing import List class MetaLoRAConfig(PretrainedConfig): model_type = "metalora" def __init__( self, mlora_layers:List[int], base_size:int, embd_model:str, llm_tokenizer:str, **kwargs, ): self.mlora_layers = mlora_layers self.base_size = base_size self.embd_model = embd_model self.llm_tokenizer = llm_tokenizer super().__init__(**kwargs)