FLUX.2-dev-Turbo-FP4 / prequantized_config.json
baseten-admin's picture
Upload folder using huggingface_hub
c3ae1c4 verified
raw
history blame contribute delete
779 Bytes
{
"base_model": "black-forest-labs/FLUX.2-dev",
"lora_path": "fal/FLUX.2-dev-Turbo",
"lora_nickname": "default",
"lora_strength": 1.0,
"is_prequantized": true,
"quantization": {
"type": "fp4",
"weight_block_size": 16,
"quantize_activations": true
},
"components": {
"text_encoder": {
"file": "text_encoder.safetensors",
"fp4_layers": 280,
"regular_layers": 82,
"size_gb": 12.896
},
"vae": {
"file": "vae.safetensors",
"fp4_layers": 0,
"regular_layers": 124,
"size_gb": 0.313
},
"transformer": {
"file": "transformer.safetensors",
"fp4_layers": 198,
"regular_layers": 133,
"size_gb": 17.087
}
},
"total_fp4_layers": 478,
"total_regular_layers": 339
}