Fix FastSAM text prompt results (#6510)
This commit is contained in:
parent
830037e0ce
commit
0c4e97443b
1 changed files with 1 additions and 1 deletions
|
|
@ -342,7 +342,7 @@ class FastSAMPrompt:
|
||||||
max_idx = scores.argsort()
|
max_idx = scores.argsort()
|
||||||
max_idx = max_idx[-1]
|
max_idx = max_idx[-1]
|
||||||
max_idx += sum(np.array(filter_id) <= int(max_idx))
|
max_idx += sum(np.array(filter_id) <= int(max_idx))
|
||||||
self.results[0].masks.data = torch.tensor(np.array([ann['segmentation'] for ann in annotations]))
|
self.results[0].masks.data = torch.tensor(np.array([annotations[max_idx]['segmentation']]))
|
||||||
return self.results
|
return self.results
|
||||||
|
|
||||||
def everything_prompt(self):
|
def everything_prompt(self):
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue