I am running the example notebooks in Databricks to copy Mlflow run to another Databricks environment.
Databricks cluster ML Run 13.3 LTS
mlflow.version: 2.5.0
from mlflow_export_import.copy.copy_run import copy
dst_run = copy(src_run_id, dst_experiment_name, "databricks", dst_run_workspace)
20-Feb-24 18:08:52 - INFO - Exporting run: {'run_id': '7b9511eea1e64cfab0c8d8b61dac6cea', 'lifecycle_stage': 'active', 'experiment_id': '3355151502358774'}
20-Feb-24 18:08:53 - WARNING - No notebooks to export for run '7b9511eea1e64cfab0c8d8b61dac6cea' since tag 'mlflow.databricks.notebookPath' is not set.
20-Feb-24 18:08:53 - INFO - Importing run from '/tmp/tmpwvk_29vl'
20-Feb-24 18:08:53 - INFO - Creating Databricks workspace directory '/mlflow_experiments'
Error
ValueError: Enum ErrorCode has no value defined for name 403
File /Workspace/Repos/*****/mlflow-export-import/mlflow_export_import/common/mlflow_utils.py:35, in set_experiment(mlflow_client, dbx_client, exp_name, tags)
33 if not tags: tags = {}
34 tags = utils.create_mlflow_tags_for_databricks_import(tags)
---> 35 exp_id = mlflow_client.create_experiment(exp_name, tags=tags)
36 exp = mlflow_client.get_experiment(exp_id)
37 _logger.info(f"Created experiment '{exp.name}' with location '{exp.artifact_location}'")
File /databricks/python/lib/python3.10/site-packages/mlflow/tracking/client.py:557, in MlflowClient.create_experiment(self, name, artifact_location, tags)
509 def create_experiment(
510 self,
511 name: str,
512 artifact_location: Optional[str] = None,
513 tags: Optional[Dict[str, Any]] = None,
514 ) -> str:
515 """Create an experiment.
516
517 :param name: The experiment name. Must be unique.
(...)
555 Lifecycle_stage: active
556 """
--> 557 return self._tracking_client.create_experiment(name, artifact_location, tags)
File /databricks/python/lib/python3.10/site-packages/mlflow/tracking/_tracking_service/client.py:236, in TrackingServiceClient.create_experiment(self, name, artifact_location, tags)
225 """Create an experiment.
226
227 :param name: The experiment name. Must be unique.
(...)
232 :return: Integer ID of the created experiment.
233 """
234 _validate_experiment_artifact_location(artifact_location)
--> 236 return self.store.create_experiment(
237 name=name,
238 artifact_location=artifact_location,
239 tags=[ExperimentTag(key, value) for (key, value) in tags.items()] if tags else [],
240 )
File /databricks/python/lib/python3.10/site-packages/mlflow/store/tracking/rest_store.py:98, in RestStore.create_experiment(self, name, artifact_location, tags)
94 tag_protos = [tag.to_proto() for tag in tags] if tags else []
95 req_body = message_to_json(
96 CreateExperiment(name=name, artifact_location=artifact_location, tags=tag_protos)
97 )
---> 98 response_proto = self._call_endpoint(CreateExperiment, req_body)
99 return response_proto.experiment_id
File /databricks/python/lib/python3.10/site-packages/google/protobuf/internal/enum_type_wrapper.py:73, in EnumTypeWrapper.Value(self, name)
71 except KeyError:
72 pass # fall out to break exception chaining
---> 73 raise ValueError('Enum {} has no value defined for name {!r}'.format(
74 self._enum_type.name, name))
ValueError: Enum ErrorCode has no value defined for name 403
I am running the example notebooks in Databricks to copy Mlflow run to another Databricks environment. Databricks cluster ML Run 13.3 LTS mlflow.version: 2.5.0
Inputs dst_experiment_name = /mlflow_experiments/test dst_run_workspace = databricks://us_test:mlflow_central
Logs
20-Feb-24 18:08:52 - INFO - Exporting run: {'run_id': '7b9511eea1e64cfab0c8d8b61dac6cea', 'lifecycle_stage': 'active', 'experiment_id': '3355151502358774'} 20-Feb-24 18:08:53 - WARNING - No notebooks to export for run '7b9511eea1e64cfab0c8d8b61dac6cea' since tag 'mlflow.databricks.notebookPath' is not set. 20-Feb-24 18:08:53 - INFO - Importing run from '/tmp/tmpwvk_29vl' 20-Feb-24 18:08:53 - INFO - Creating Databricks workspace directory '/mlflow_experiments'
Error
ValueError: Enum ErrorCode has no value defined for name 403
ValueError Traceback (most recent call last) File, line 3
1 from mlflow_export_import.copy.copy_run import copy
----> 3 dst_run = copy(src_run_id, dst_experiment_name, "databricks", dst_run_workspace)
File /Workspace/Repos/****/mlflow-export-import/mlflow_export_import/copy/copy_run.py:32, in copy(src_run_id, dst_experiment_name, src_mlflow_uri, dst_mlflow_uri) 15 def copy( 16 src_run_id, 17 dst_experiment_name, 18 src_mlflow_uri = None, 19 dst_mlflow_uri = None 20 ): 21 """ 22 Copies a run to another tracking server (workspace). 23 (...) 29 :return: Destination Run object. 30 """ ---> 32 return _copy(src_run_id, dst_experiment_name, 33 copy_utils.mk_client(src_mlflow_uri), 34 copy_utils.mk_client(dst_mlflow_uri) 35 )
File /Workspace/Repos/**/mlflow-export-import/mlflow_export_import/copy/copy_run.py:48, in _copy(src_run_id, dst_experiment_name, src_client, dst_client) 41 with tempfile.TemporaryDirectory() as download_dir: 42 export_run( 43 src_run_id, 44 download_dir, 45 notebook_formats = [ "SOURCE" ], 46 mlflow_client = src_client 47 ) ---> 48 dstrun, = import_run( 49 download_dir, 50 dst_experiment_name, 51 mlflow_client = dst_client 52 ) 53 return dst_run
File /Workspace/Repos/****/mlflow-export-import/mlflow_export_import/run/import_run.py:67, in import_run(input_dir, experiment_name, import_source_tags, dst_notebook_dir, use_src_user_id, mlmodel_fix, mlflow_client) 63 dbx_client = create_dbx_client(mlflow_client) 65 _logger.info(f"Importing run from '{input_dir}'") ---> 67 exp = mlflow_utils.set_experiment(mlflow_client, dbx_client, experiment_name) 68 src_run_path = os.path.join(input_dir, "run.json") 69 src_run_dct = io_utils.read_file_mlflow(src_run_path)
File /Workspace/Repos/*****/mlflow-export-import/mlflow_export_import/common/mlflow_utils.py:35, in set_experiment(mlflow_client, dbx_client, exp_name, tags) 33 if not tags: tags = {} 34 tags = utils.create_mlflow_tags_for_databricks_import(tags) ---> 35 exp_id = mlflow_client.create_experiment(exp_name, tags=tags) 36 exp = mlflow_client.get_experiment(exp_id) 37 _logger.info(f"Created experiment '{exp.name}' with location '{exp.artifact_location}'")
File /databricks/python/lib/python3.10/site-packages/mlflow/tracking/client.py:557, in MlflowClient.create_experiment(self, name, artifact_location, tags) 509 def create_experiment( 510 self, 511 name: str, 512 artifact_location: Optional[str] = None, 513 tags: Optional[Dict[str, Any]] = None, 514 ) -> str: 515 """Create an experiment. 516 517 :param name: The experiment name. Must be unique. (...) 555 Lifecycle_stage: active 556 """ --> 557 return self._tracking_client.create_experiment(name, artifact_location, tags)
File /databricks/python/lib/python3.10/site-packages/mlflow/tracking/_tracking_service/client.py:236, in TrackingServiceClient.create_experiment(self, name, artifact_location, tags) 225 """Create an experiment. 226 227 :param name: The experiment name. Must be unique. (...) 232 :return: Integer ID of the created experiment. 233 """ 234 _validate_experiment_artifact_location(artifact_location) --> 236 return self.store.create_experiment( 237 name=name, 238 artifact_location=artifact_location, 239 tags=[ExperimentTag(key, value) for (key, value) in tags.items()] if tags else [], 240 )
File /databricks/python/lib/python3.10/site-packages/mlflow/store/tracking/rest_store.py:98, in RestStore.create_experiment(self, name, artifact_location, tags) 94 tag_protos = [tag.to_proto() for tag in tags] if tags else [] 95 req_body = message_to_json( 96 CreateExperiment(name=name, artifact_location=artifact_location, tags=tag_protos) 97 ) ---> 98 response_proto = self._call_endpoint(CreateExperiment, req_body) 99 return response_proto.experiment_id
File /databricks/python/lib/python3.10/site-packages/mlflow/store/tracking/rest_store.py:59, in RestStore._call_endpoint(self, api, json_body) 57 endpoint, method = _METHOD_TO_INFO[api] 58 response_proto = api.Response() ---> 59 return call_endpoint(self.get_host_creds(), endpoint, method, json_body, response_proto)
File /databricks/python/lib/python3.10/site-packages/mlflow/utils/rest_utils.py:203, in call_endpoint(host_creds, endpoint, method, json_body, response_proto, extra_headers) 201 call_kwargs["json"] = json_body 202 response = http_request(**call_kwargs) --> 203 response = verify_rest_response(response, endpoint) 204 js_dict = json.loads(response.text) 205 parse_dict(js_dict=js_dict, message=response_proto)
File /databricks/python/lib/python3.10/site-packages/mlflow/utils/rest_utils.py:135, in verify_rest_response(response, endpoint) 133 if response.status_code != 200: 134 if _can_parse_as_json_object(response.text): --> 135 raise RestException(json.loads(response.text)) 136 else: 137 base_msg = ( 138 f"API request to endpoint {endpoint} " 139 f"failed with error code {response.status_code} != 200" 140 )
File /databricks/python/lib/python3.10/site-packages/mlflow/exceptions.py:122, in RestException.init(self, json) 117 error_code = json.get("error_code", ErrorCode.Name(INTERNAL_ERROR)) 118 message = "{}: {}".format( 119 error_code, 120 json["message"] if "message" in json else "Response: " + str(json), 121 ) --> 122 super().init(message, error_code=ErrorCode.Value(error_code)) 123 self.json = json
File /databricks/python/lib/python3.10/site-packages/google/protobuf/internal/enum_type_wrapper.py:73, in EnumTypeWrapper.Value(self, name) 71 except KeyError: 72 pass # fall out to break exception chaining ---> 73 raise ValueError('Enum {} has no value defined for name {!r}'.format( 74 self._enum_type.name, name))
ValueError: Enum ErrorCode has no value defined for name 403
Can anyone help with this error?