best_C = greatest["params"]["C"]
best_solver = greatest["params"]["solver"]
final_pipe = Pipeline([
("scaler", StandardScaler()),
("clf", LogisticRegression(
C=best_C,
solver=best_solver,
penalty="l2",
max_iter=2000,
random_state=42
))
])
with mlflow.start_run(run_name="final_model_run") as final_run:
final_pipe.match(X_train, y_train)
proba = final_pipe.predict_proba(X_test)[:, 1]
pred = (proba >= 0.5).astype(int)
metrics = {
"test_auc": float(roc_auc_score(y_test, proba)),
"test_accuracy": float(accuracy_score(y_test, pred)),
"test_precision": float(precision_score(y_test, pred, zero_division=0)),
"test_recall": float(recall_score(y_test, pred, zero_division=0)),
"test_f1": float(f1_score(y_test, pred, zero_division=0)),
}
mlflow.log_metrics(metrics)
mlflow.log_params({"C": best_C, "solver": best_solver, "mannequin": "LogisticRegression+StandardScaler"})
input_example = X_test.iloc[:5].copy()
signature = infer_signature(input_example, final_pipe.predict_proba(input_example)[:, 1])
model_info = mlflow.sklearn.log_model(
sk_model=final_pipe,
artifact_path="mannequin",
signature=signature,
input_example=input_example,
registered_model_name=None,
)
print("Remaining run_id:", final_run.data.run_id)
print("Logged mannequin URI:", model_info.model_uri)
eval_df = X_test.copy()
eval_df["label"] = y_test.values
eval_result = mlflow.fashions.consider(
mannequin=model_info.model_uri,
knowledge=eval_df,
targets="label",
model_type="classifier",
evaluators="default",
)
eval_summary = {
"metrics": {okay: float(v) if isinstance(v, (int, float, np.floating)) else str(v)
for okay, v in eval_result.metrics.gadgets()},
"artifacts": {okay: str(v) for okay, v in eval_result.artifacts.gadgets()},
}
mlflow.log_dict(eval_summary, "analysis/eval_summary.json")
Elevate your perspective with NextTech Information, the place innovation meets perception.
Uncover the most recent breakthroughs, get unique updates, and join with a world community of future-focused thinkers.
Unlock tomorrow’s tendencies right now: learn extra, subscribe to our publication, and turn out to be a part of the NextTech neighborhood at NextTech-news.com

