mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-01 07:00:15 +01:00
49 lines
1.9 KiB
Python
49 lines
1.9 KiB
Python
from typing import Optional
|
|
|
|
from extensions.multimodal.abstract_pipeline import AbstractMultimodalPipeline
|
|
|
|
available_pipelines = ['llava-7b', 'llava-13b', 'llava-llama-2-13b', 'llava-v1.5-13b', 'llava-v1.5-7b']
|
|
|
|
|
|
def get_pipeline(name: str, params: dict) -> Optional[AbstractMultimodalPipeline]:
|
|
if name == 'llava-7b':
|
|
from .llava import LLaVA_v0_7B_Pipeline
|
|
return LLaVA_v0_7B_Pipeline(params)
|
|
if name == 'llava-13b':
|
|
from .llava import LLaVA_v0_13B_Pipeline
|
|
return LLaVA_v0_13B_Pipeline(params)
|
|
if name == 'llava-llama-2-13b':
|
|
from .llava import LLaVA_LLaMA_2_13B_Pipeline
|
|
return LLaVA_LLaMA_2_13B_Pipeline(params)
|
|
if name == 'llava-v1.5-7b':
|
|
from .llava import LLaVA_v1_5_7B_Pipeline
|
|
return LLaVA_v1_5_7B_Pipeline(params)
|
|
if name == 'llava-v1.5-13b':
|
|
from .llava import LLaVA_v1_5_13B_Pipeline
|
|
return LLaVA_v1_5_13B_Pipeline(params)
|
|
return None
|
|
|
|
|
|
def get_pipeline_from_model_name(model_name: str, params: dict) -> Optional[AbstractMultimodalPipeline]:
|
|
if 'llava' not in model_name.lower():
|
|
return None
|
|
if 'llama-2' in model_name.lower():
|
|
if '13b' in model_name.lower():
|
|
from .llava import LLaVA_LLaMA_2_13B_Pipeline
|
|
return LLaVA_LLaMA_2_13B_Pipeline(params)
|
|
elif 'llava-v1.5' in model_name.lower():
|
|
if '13b' in model_name.lower():
|
|
from .llava import LLaVA_v1_5_13B_Pipeline
|
|
return LLaVA_v1_5_13B_Pipeline(params)
|
|
if '7b' in model_name.lower():
|
|
from .llava import LLaVA_v1_5_7B_Pipeline
|
|
return LLaVA_v1_5_7B_Pipeline(params)
|
|
else:
|
|
if '7b' in model_name.lower():
|
|
from .llava import LLaVA_v0_7B_Pipeline
|
|
return LLaVA_v0_7B_Pipeline(params)
|
|
if '13b' in model_name.lower():
|
|
from .llava import LLaVA_v0_13B_Pipeline
|
|
return LLaVA_v0_13B_Pipeline(params)
|
|
return None
|