```CODE: from diffusers import DiffusionPipeline pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev") prompt = "Astronaut in a jungle, cold color palette, muted colors, detailed, 8k" image = pipe(prompt).images[0] ``` ERROR: Traceback (most recent call last): File "/tmp/.cache/uv/environments-v2/b90b3a1935bc74f7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 2343, in _from_pretrained tokenizer = cls(*init_inputs, **init_kwargs) File "/tmp/.cache/uv/environments-v2/b90b3a1935bc74f7/lib/python3.13/site-packages/transformers/models/t5/tokenization_t5_fast.py", line 119, in __init__ super().__init__( ~~~~~~~~~~~~~~~~^ vocab_file=vocab_file, ^^^^^^^^^^^^^^^^^^^^^^ ...<7 lines>... **kwargs, ^^^^^^^^^ ) ^ File "/tmp/.cache/uv/environments-v2/b90b3a1935bc74f7/lib/python3.13/site-packages/transformers/tokenization_utils_fast.py", line 108, in __init__ raise ValueError( ...<2 lines>... ) ValueError: Cannot instantiate this tokenizer from a slow version. If it's based on sentencepiece, make sure you have sentencepiece installed. During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/tmp/black-forest-labs_FLUX.1-dev_1xD8pZN.py", line 18, in pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev") File "/tmp/.cache/uv/environments-v2/b90b3a1935bc74f7/lib/python3.13/site-packages/huggingface_hub/utils/_validators.py", line 114, in _inner_fn return fn(*args, **kwargs) File "/tmp/.cache/uv/environments-v2/b90b3a1935bc74f7/lib/python3.13/site-packages/diffusers/pipelines/pipeline_utils.py", line 1025, in from_pretrained loaded_sub_model = load_sub_model( library_name=library_name, ...<21 lines>... quantization_config=quantization_config, ) File "/tmp/.cache/uv/environments-v2/b90b3a1935bc74f7/lib/python3.13/site-packages/diffusers/pipelines/pipeline_loading_utils.py", line 860, in load_sub_model loaded_sub_model = load_method(os.path.join(cached_folder, name), **loading_kwargs) File "/tmp/.cache/uv/environments-v2/b90b3a1935bc74f7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 2097, in from_pretrained return cls._from_pretrained( ~~~~~~~~~~~~~~~~~~~~^ resolved_vocab_files, ^^^^^^^^^^^^^^^^^^^^^ ...<9 lines>... **kwargs, ^^^^^^^^^ ) ^ File "/tmp/.cache/uv/environments-v2/b90b3a1935bc74f7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 2344, in _from_pretrained except import_protobuf_decode_error(): ~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^ File "/tmp/.cache/uv/environments-v2/b90b3a1935bc74f7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 87, in import_protobuf_decode_error raise ImportError(PROTOBUF_IMPORT_ERROR.format(error_message)) ImportError: requires the protobuf library but it was not found in your environment. Check out the instructions on the installation page of its repo: https://github.com/protocolbuffers/protobuf/tree/master/python#installation and follow the ones that match your environment. Please note that you may need to restart your runtime after installation.