fix config saving compatibility issue for transformers 4.45+ (#7)
Browse files- fix config saving compatibility issue for transformers 4.45+ (31518f90dd48ca33b673de3e4eb24b0155f9cf98)
Co-authored-by: Ekaterina Aidova <[email protected]>
configuration_internvl_chat.py
CHANGED
|
@@ -46,9 +46,9 @@ class InternVLChatConfig(PretrainedConfig):
|
|
| 46 |
logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
|
| 47 |
|
| 48 |
self.vision_config = InternVisionConfig(**vision_config)
|
| 49 |
-
if llm_config
|
| 50 |
self.llm_config = LlamaConfig(**llm_config)
|
| 51 |
-
elif llm_config
|
| 52 |
self.llm_config = Qwen2Config(**llm_config)
|
| 53 |
else:
|
| 54 |
raise ValueError('Unsupported architecture: {}'.format(llm_config['architectures'][0]))
|
|
|
|
| 46 |
logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
|
| 47 |
|
| 48 |
self.vision_config = InternVisionConfig(**vision_config)
|
| 49 |
+
if llm_config.get('architectures', ["Qwen2ForCausalLM"])[0] == 'LlamaForCausalLM':
|
| 50 |
self.llm_config = LlamaConfig(**llm_config)
|
| 51 |
+
elif llm_config.get('architectures', ["Qwen2ForCausalLM"])[0] == 'Qwen2ForCausalLM':
|
| 52 |
self.llm_config = Qwen2Config(**llm_config)
|
| 53 |
else:
|
| 54 |
raise ValueError('Unsupported architecture: {}'.format(llm_config['architectures'][0]))
|