Skip to content

Commit

Permalink
Merge pull request #152 from haraoka-screen/cbs
Browse files Browse the repository at this point in the history
Fix some violations of coding rules
  • Loading branch information
ikeuchi-screen authored Oct 28, 2024
2 parents 844c9ee + 0594bc8 commit 9816e23
Show file tree
Hide file tree
Showing 2 changed files with 88 additions and 76 deletions.
58 changes: 29 additions & 29 deletions lingam/causal_based_simulator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,14 @@
from .ica_lingam import ICALiNGAM
from .bottom_up_parce_lingam import BottomUpParceLiNGAM
from .var_lingam import VARLiNGAM
from .longitudinal_lingam import LongitudinalLiNGAM

from abc import ABCMeta, abstractmethod
import numbers

import numpy as np
import pandas as pd
from scipy.special import expit

from sklearn.utils import check_array, check_scalar, check_random_state
from sklearn.utils import check_array, check_random_state
from sklearn.linear_model import LinearRegression, LogisticRegression
from sklearn.base import clone, is_regressor, is_classifier
from sklearn.pipeline import Pipeline
Expand Down Expand Up @@ -104,7 +102,7 @@ def run(
The key is the name of the target variable and that value is
a dictionary for that model. This dictionary contains three keys,
parant_names, coef and model. parent_names is the mandatory key and
whose value is the list of parent names. coef and model are selective.
whose value is the list of parent names. coef and model are selective.
coef is the list of the coefficients of the parent variables, which
must be same lenght as parent_names. The value of model must be
a trained machine laerning instance.
Expand Down Expand Up @@ -163,32 +161,29 @@ def _check_model_instance(self, model, var_name, discrete_endog_names):

if isinstance(model, Pipeline):
if not check_model_type(model.steps[-1][-1]):
raise RuntimeError(
"The last step in Pipeline should be an "
+ "instance of a regression/classification model."
)
mes = "The last step in Pipeline should be an "
mes += "instance of a regression/classification model."
raise RuntimeError(mes)
elif isinstance(model, BaseSearchCV):
if not check_model_type(model.get_params()["estimator"]):
raise RuntimeError(
"The type of the estimator shall be an "
+ "instance of a regression/classification model."
)
mes = "The type of the estimator shall be an "
mes += "instance of a regression/classification model."
raise RuntimeError(mes)
else:
if not check_model_type(model):
raise RuntimeError(
"The type of the estimator shall be an "
+ "instance of a regression/classification model."
)
mes = "The type of the estimator shall be an "
mes += "instance of a regression/classification model."
raise RuntimeError(mes)

if check_model_type == is_classifier:
try:
func = getattr(model, "predict_proba")
if not callable(func):
raise Exception
except Exception:
raise RuntimeError(
"Classification models shall have " + "predict_proba()."
)
mes = "Classification models shall have "
mes += "predict_proba()."
raise RuntimeError(mes)

def _check_models(self, models, endog_names, discrete_endog_names):
if models is None:
Expand Down Expand Up @@ -258,7 +253,7 @@ def _check_changing_models(self, changing_models, endog_names, discrete_endog_na

for parent_name in model_info["parent_names"]:
if parent_name not in endog_names:
raise RuntimeError(f"Unknown name. ({name})")
raise RuntimeError(f"Unknown name. ({parent_name})")

if len(parent_names) == 0:
changing_models_[target_name] = {"parent_names": []}
Expand All @@ -278,10 +273,10 @@ def _check_changing_models(self, changing_models, endog_names, discrete_endog_na
model = _LinearRegression(coef)
else:
model = _LogisticRegression(coef)

changing_models_[target_name] = {"parent_names": parent_names, "model": model}
continue

# model key
if "model" not in model_info.keys() or model_info["model"] is None:
raise KeyError("model must be set when coef isn't set.")
Expand Down Expand Up @@ -574,7 +569,7 @@ def get_data(self, var_names):
index = self._endog_names.index(var_name)
var_indices.append(index)
data = self._X[:, var_indices]

return data

def get_causal_order(self, changing_edges=None):
Expand All @@ -592,8 +587,9 @@ def get_causal_order(self, changing_edges=None):

causal_order = self._calc_causal_order(causal_graph)
if causal_order is None:
raise ValueError("causal_graph updated by changing_models is cyclic."
+ "changing_models must be set so that causal graph does not cycle.")
mes = "causal_graph updated by changing_models is cyclic."
mes += " changing_models must be set so that causal graph does not cycle."
raise ValueError(mes)
causal_order = [self._endog_names[n] for n in causal_order]

return causal_order
Expand Down Expand Up @@ -665,6 +661,7 @@ def _calc_causal_order(self, causal_graph):

return causal_order


class CBSIUnobsCommonCauseLiNGAM(CBSILiNGAM):
""" Class for data handling for BottomUpParceLiNGAM. """

Expand All @@ -676,13 +673,14 @@ def _check_causal_graph(self, causal_graph, X):
n_features = X.shape[1]
if causal_graph.shape != (n_features, n_features):
raise RuntimeError("The shape of causal_graph must be (n_features, n_features)")

causal_graph[np.isnan(causal_graph)] = 0
except Exception as e:
raise ValueError("causal_graph has an error: " + str(e))

return causal_graph


class CBSITimeSeriesLiNGAM(CBSILiNGAM):
""" Class for data handling for VARLiNGAM. """

Expand Down Expand Up @@ -743,7 +741,7 @@ def _make_var_names(self, causal_graph, X, is_discrete):
endog_names = []
for i in range(self._n_lags + 1):
if i == 0:
index_format = f"[t]"
index_format = "[t]"
else:
index_format = f"[t-{i}]"
endog_names += [name + index_format for name in endog_names_]
Expand All @@ -758,6 +756,7 @@ def _make_var_names(self, causal_graph, X, is_discrete):

return endog_names, discrete_endog_names


class _LinearRegression():
""" Linear regression model with configurable coefficients """

Expand All @@ -771,7 +770,8 @@ def predict(self, X):

@property
def coef_(self):
return coef_
return self._coef


class _LogisticRegression():
""" Logistic regression model with configurable coefficients """
Expand All @@ -786,4 +786,4 @@ def predict(self, X):

@property
def coef_(self):
return coef_
return self._coef
Loading

0 comments on commit 9816e23

Please sign in to comment.