Remove dill package from Ultralytics (#16574)
Signed-off-by: UltralyticsAssistant <web@ultralytics.com> Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
This commit is contained in:
parent
060214d066
commit
c327b0aae1
5 changed files with 5 additions and 18 deletions
|
|
@ -377,7 +377,7 @@ class Model(nn.Module):
|
|||
self.model.load(weights)
|
||||
return self
|
||||
|
||||
def save(self, filename: Union[str, Path] = "saved_model.pt", use_dill=True) -> None:
|
||||
def save(self, filename: Union[str, Path] = "saved_model.pt") -> None:
|
||||
"""
|
||||
Saves the current model state to a file.
|
||||
|
||||
|
|
@ -386,7 +386,6 @@ class Model(nn.Module):
|
|||
|
||||
Args:
|
||||
filename (Union[str, Path]): The name of the file to save the model to.
|
||||
use_dill (bool): Whether to try using dill for serialization if available.
|
||||
|
||||
Raises:
|
||||
AssertionError: If the model is not a PyTorch model.
|
||||
|
|
@ -408,7 +407,7 @@ class Model(nn.Module):
|
|||
"license": "AGPL-3.0 License (https://ultralytics.com/license)",
|
||||
"docs": "https://docs.ultralytics.com",
|
||||
}
|
||||
torch.save({**self.ckpt, **updates}, filename, use_dill=use_dill)
|
||||
torch.save({**self.ckpt, **updates}, filename)
|
||||
|
||||
def info(self, detailed: bool = False, verbose: bool = True):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -210,8 +210,6 @@ def _build_sam(
|
|||
state_dict = torch.load(f)
|
||||
sam.load_state_dict(state_dict)
|
||||
sam.eval()
|
||||
# sam.load_state_dict(torch.load(checkpoint), strict=True)
|
||||
# sam.eval()
|
||||
return sam
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -219,4 +219,4 @@ def update_models(model_names=("yolov8n.pt",), source_dir=Path("."), update_name
|
|||
|
||||
# Save model using model.save()
|
||||
print(f"Re-saving {model_name} model to {save_path}")
|
||||
model.save(save_path, use_dill=False)
|
||||
model.save(save_path)
|
||||
|
|
|
|||
|
|
@ -86,25 +86,15 @@ def torch_load(*args, **kwargs):
|
|||
return _torch_load(*args, **kwargs)
|
||||
|
||||
|
||||
def torch_save(*args, use_dill=True, **kwargs):
|
||||
def torch_save(*args, **kwargs):
|
||||
"""
|
||||
Optionally use dill to serialize lambda functions where pickle does not, adding robustness with 3 retries and
|
||||
exponential standoff in case of save failure.
|
||||
|
||||
Args:
|
||||
*args (tuple): Positional arguments to pass to torch.save.
|
||||
use_dill (bool): Whether to try using dill for serialization if available. Defaults to True.
|
||||
**kwargs (Any): Keyword arguments to pass to torch.save.
|
||||
"""
|
||||
try:
|
||||
assert use_dill
|
||||
import dill as pickle
|
||||
except (AssertionError, ImportError):
|
||||
import pickle
|
||||
|
||||
if "pickle_module" not in kwargs:
|
||||
kwargs["pickle_module"] = pickle
|
||||
|
||||
for i in range(4): # 3 retries
|
||||
try:
|
||||
return _torch_save(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -595,7 +595,7 @@ def strip_optimizer(f: Union[str, Path] = "best.pt", s: str = "", updates: dict
|
|||
|
||||
# Save
|
||||
combined = {**metadata, **x, **(updates or {})}
|
||||
torch.save(combined, s or f, use_dill=False) # combine dicts (prefer to the right)
|
||||
torch.save(combined, s or f) # combine dicts (prefer to the right)
|
||||
mb = os.path.getsize(s or f) / 1e6 # file size
|
||||
LOGGER.info(f"Optimizer stripped from {f},{f' saved as {s},' if s else ''} {mb:.1f}MB")
|
||||
return combined
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue