Skip to content

Commit

Permalink
cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg committed Sep 29, 2023
1 parent ed4eaf4 commit d2f88b7
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions llmfoundry/callbacks/hf_checkpointer.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,10 @@ class HuggingFaceCheckpointer(Callback):
huggingface_folder_name (str): Folder to save each checkpoint under (can be a format string). Default is ``ba{batch}``.
precision: The precision to save the model in. Default is ``float32``. Options are ``bfloat16``, ``float16``, or ``float32``.
overwrite (bool): Whether to overwrite previous checkpoints.
log_to_mlflow (bool): Whether to log and register the checkpoint to MLflow. Default is ``False``.
mlflow_logging_config (Optional[dict]): A dictionary of config arguments that will get passed along to the MLflow ``log_model`` call.
log_to_mlflow (bool): Whether to register the model to MLflow. This will only register one model at the end of training. Default is ``False``.
mlflow_logging_config (Optional[dict]): A dictionary of config arguments that will get passed along to the MLflow ``save_model`` call.
Expected to contain ``metadata`` and ``task`` keys. If either is unspecified, the defaults are ``'text-generation'`` and
``{'task': 'llm/v1/completions'}`` respectively.
"""

def __init__(
Expand Down Expand Up @@ -120,7 +121,6 @@ def run_event(self, event: Event, state: State, logger: Logger) -> None:
import mlflow
mlflow.environment_variables.MLFLOW_HUGGINGFACE_MODEL_MAX_SHARD_SIZE.set(
'5GB')
# mlflow.set_registry_uri('databricks-uc')

def _save_checkpoint(self, state: State, logger: Logger):
del logger # unused
Expand Down

0 comments on commit d2f88b7

Please sign in to comment.