YAML Metadata Warning: empty or missing yaml metadata in repo card (https://huggingface.co/docs/hub/model-cards#model-card-metadata)
import torch
from peft import PeftModel
from transformers import AutoProcessor, AutoModelForCausalLM, BitsAndBytesConfig, Qwen2AudioForConditionalGeneration

model_id = "Qwen/Qwen2-Audio-7B-Instruct"

bnb_config = BitsAndBytesConfig(
    load_in_4bit=True,
    bnb_4bit_use_double_quant=True,
    bnb_4bit_quant_type="nf4",
    bnb_4bit_compute_dtype=torch.bfloat16
)

base_model = Qwen2AudioForConditionalGeneration.from_pretrained(model_id, 
                                    device_map="auto",
                                    torch_dtype=torch.bfloat16,
                                    quantization_config=bnb_config,
                                    cache_dir = os.getenv('CACHE_DIR'))

model = PeftModel.from_pretrained(base_model, "binhquoc/alm-add-qwen-non")

processor = AutoProcessor.from_pretrained(model_id, cache_dir = os.getenv('CACHE_DIR'))
Downloads last month

-

Downloads are not tracked for this model. How to track
Inference Providers NEW
This model isn't deployed by any Inference Provider. 🙋 Ask for provider support

Collection including binhquoc/alm-add-qwen-non