Spaces:
Running
on
Zero
Running
on
Zero
Update inference.py
Browse files- inference.py +12 -9
inference.py
CHANGED
|
@@ -51,20 +51,23 @@ def sam_preprocess(
|
|
| 51 |
'''
|
| 52 |
assert img_size==1024, \
|
| 53 |
"both SAM and Effi-SAM receive images of size 1024^2, don't change this setting unless you're sure that your employed model works well with another size."
|
| 54 |
-
|
| 55 |
-
resize_shape = x.shape[:2]
|
| 56 |
-
x = torch.from_numpy(x).permute(2,0,1).contiguous()
|
| 57 |
-
|
| 58 |
# Normalize colors
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
|
|
|
| 63 |
# Pad
|
| 64 |
-
h, w = x.shape[-2:]
|
| 65 |
padh = img_size - h
|
| 66 |
padw = img_size - w
|
| 67 |
x = F.pad(x, (0, padw, 0, padh))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 68 |
return x, resize_shape
|
| 69 |
|
| 70 |
def beit3_preprocess(x: np.ndarray, img_size=224) -> torch.Tensor:
|
|
|
|
| 51 |
'''
|
| 52 |
assert img_size==1024, \
|
| 53 |
"both SAM and Effi-SAM receive images of size 1024^2, don't change this setting unless you're sure that your employed model works well with another size."
|
| 54 |
+
|
|
|
|
|
|
|
|
|
|
| 55 |
# Normalize colors
|
| 56 |
+
if model_type=="ori":
|
| 57 |
+
x = ResizeLongestSide(img_size).apply_image(x)
|
| 58 |
+
h, w = resize_shape = x.shape[:2]
|
| 59 |
+
x = torch.from_numpy(x).permute(2,0,1).contiguous()
|
| 60 |
+
x = (x - pixel_mean) / pixel_std
|
| 61 |
# Pad
|
|
|
|
| 62 |
padh = img_size - h
|
| 63 |
padw = img_size - w
|
| 64 |
x = F.pad(x, (0, padw, 0, padh))
|
| 65 |
+
else:
|
| 66 |
+
x = torch.from_numpy(x).permute(2,0,1).contiguous()
|
| 67 |
+
x = F.interpolate(x.unsqueeze(0), (img_size, img_size), mode="bilinear", align_corners=False).squeeze(0)
|
| 68 |
+
x = (x - pixel_mean) / pixel_std
|
| 69 |
+
resize_shape = None
|
| 70 |
+
|
| 71 |
return x, resize_shape
|
| 72 |
|
| 73 |
def beit3_preprocess(x: np.ndarray, img_size=224) -> torch.Tensor:
|