from core.shap_analysis import SHAPAnalyzer class SHAPService: """SHAP 可解释性分析服务""" def __init__(self): self._analyzer = None def _ensure_analyzer(self): if self._analyzer is None: self._analyzer = SHAPAnalyzer() def get_global_importance(self, model_type='random_forest'): self._ensure_analyzer() return self._analyzer.global_shap_values(model_type) def get_local_explanation(self, data, model_type='random_forest'): self._ensure_analyzer() return self._analyzer.local_shap_values(data, model_type) def get_interactions(self, model_type='random_forest', top_n=10): self._ensure_analyzer() return self._analyzer.shap_interaction(model_type, top_n) def get_dependence(self, feature_name, model_type='random_forest'): self._ensure_analyzer() return self._analyzer.shap_dependence(feature_name, model_type) shap_service = SHAPService()