brucewayne0459 commited on
Commit
74236c9
1 Parent(s): 706d7cb

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +24 -1
README.md CHANGED
@@ -43,11 +43,34 @@ The model can be directly used for analyzing dermatology images, providing insig
43
 
44
  ```python
45
 
 
46
  from transformers import AutoProcessor, PaliGemmaForConditionalGeneration
 
47
 
 
48
  model_id = "brucewayne0459/paligemma_derm"
49
  processor = AutoProcessor.from_pretrained(model_id)
50
- model = PaliGemmaForConditionalGeneration.from_pretrained(model_id)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
  ```
52
  ## Training Details
53
 
 
43
 
44
  ```python
45
 
46
+ import torch
47
  from transformers import AutoProcessor, PaliGemmaForConditionalGeneration
48
+ from PIL import Image
49
 
50
+ # Load the model and processor
51
  model_id = "brucewayne0459/paligemma_derm"
52
  processor = AutoProcessor.from_pretrained(model_id)
53
+ model = PaliGemmaForConditionalGeneration.from_pretrained(model_id, device_map={"": 0})
54
+ model.eval()
55
+
56
+ # Load a sample image and text input
57
+ input_text = "Identify the skin condition?"
58
+ input_image_path = " Replace with your actual image path"
59
+ input_image = Image.open(input_image_path).convert("RGB")
60
+
61
+ # Process the input
62
+ inputs = processor(text=input_text, images=input_image, return_tensors="pt", padding="longest").to("cuda" if torch.cuda.is_available() else "cpu")
63
+
64
+ # Set the maximum length for generation
65
+ max_new_tokens = 50
66
+
67
+ # Run inference
68
+ with torch.no_grad():
69
+ outputs = model.generate(**inputs, max_new_tokens=max_new_tokens)
70
+
71
+ # Decode the output
72
+ decoded_output = processor.decode(outputs[0], skip_special_tokens=True)
73
+ print("Model Output:", decoded_output)
74
  ```
75
  ## Training Details
76