# pip install unsloth transformers gradio pillow torch bitsandbytes import os # 1) Read & validate your token hf_token = os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_HUB_TOKEN") if not hf_token: raise ValueError("Please set HF_TOKEN (or HUGGINGFACE_HUB_TOKEN) in your environment") # 2) Make it available to transformers from huggingface_hub import login login(token=hf_token) os.environ["HUGGINGFACE_HUB_TOKEN"] = hf_token # 3) Workaround for libgomp thread issue os.environ["OMP_NUM_THREADS"] = "1" import gradio as gr from transformers import pipeline import torch from PIL import Image # 4) Initialize pipeline with auth pipe = pipeline( task="image-text-to-text", model="EpistemeAI/PD_gemma-3n-E4B-v2", use_auth_token=True, ) def convert_image_to_text(image: Image.Image) -> str: messages = [ { "role": "user", "content": [ {"type": "image", "image": image}, {"type": "text", "text": "Analyze the plant disease and provide solution"} ] } ] response = pipe( text=messages, return_full_text=False, max_new_tokens=700 ) return response[0]["generated_text"] # 5) Gradio app demo = gr.Interface( fn=convert_image_to_text, inputs=gr.Image(type="pil", label="Upload an image"), outputs=gr.Textbox(label="Analysis"), title="Plant Disease Scientific Analyzer", description="Upload a plant image and receive a diagnostic report." ) if __name__ == "__main__": demo.launch()