class MyLogisticRegression02:
 
    def __init__(self, learning_rate=0.01, max_iter=10000):
        self.learning_rate = learning_rate
        self.max_iter = max_iter
        self.weights = None  
        self.bias = 0  
 
    def initialize_weights(self, n_features):
        self.weights = np.zeros(n_features)
        self.bias = 0  
 
    def sigmoid(self, z):
        z = np.clip(z, -500, 500)
        return 1 / (1 + np.exp(-z))
 
    def get_gradient(self, x, y):
 
        m = x.shape[0]
        predictions = np.matmul(x, self.weights) + self.bias 
 
        errors = self.sigmoid(predictions) - y
 
        weight_gradient = np.matmul(x.T, errors)
        bias_gradient = np.sum(errors)
 
        return weight_gradient / m, bias_gradient / m
 
 
    def fit(self, x_train, y_train):
        rows = x_train.shape[0]
        cols = x_train.shape[1]
 
        if self.weights is None: 
            self.initialize_weights(cols)
 
        for i in range(self.max_iter):
            gradient_weights, gradient_bias = self.get_gradient(x_train, y_train)
            self.weights = self.weights - self.learning_rate * gradient_weights
            self.bias = self.bias - self.learning_rate * gradient_bias
 
    def predict(self, x_test):  
        prediction =  np.matmul(x_test, self.weights) + self.bias
        binary_predictions = self.sigmoid(prediction)
        return (binary_predictions >= 0.5).reshape(-1).astype(int)
 
    def get_weights(self):
        print("Bias (intercept):", self.bias)
        print("Weights (coefficients):", self.weights)
				Y2xhc3MgTXlMb2dpc3RpY1JlZ3Jlc3Npb24wMjoKCiAgICBkZWYgX19pbml0X18oc2VsZiwgbGVhcm5pbmdfcmF0ZT0wLjAxLCBtYXhfaXRlcj0xMDAwMCk6CiAgICAgICAgc2VsZi5sZWFybmluZ19yYXRlID0gbGVhcm5pbmdfcmF0ZQogICAgICAgIHNlbGYubWF4X2l0ZXIgPSBtYXhfaXRlcgogICAgICAgIHNlbGYud2VpZ2h0cyA9IE5vbmUgIAogICAgICAgIHNlbGYuYmlhcyA9IDAgIAogICAgCiAgICBkZWYgaW5pdGlhbGl6ZV93ZWlnaHRzKHNlbGYsIG5fZmVhdHVyZXMpOgogICAgICAgIHNlbGYud2VpZ2h0cyA9IG5wLnplcm9zKG5fZmVhdHVyZXMpCiAgICAgICAgc2VsZi5iaWFzID0gMCAgCgogICAgZGVmIHNpZ21vaWQoc2VsZiwgeik6CiAgICAgICAgeiA9IG5wLmNsaXAoeiwgLTUwMCwgNTAwKQogICAgICAgIHJldHVybiAxIC8gKDEgKyBucC5leHAoLXopKQogICAgICAgIAogICAgZGVmIGdldF9ncmFkaWVudChzZWxmLCB4LCB5KToKICAgICAgICAKICAgICAgICBtID0geC5zaGFwZVswXQogICAgICAgIHByZWRpY3Rpb25zID0gbnAubWF0bXVsKHgsIHNlbGYud2VpZ2h0cykgKyBzZWxmLmJpYXMgCiAgICAgICAKICAgICAgICBlcnJvcnMgPSBzZWxmLnNpZ21vaWQocHJlZGljdGlvbnMpIC0geQoKICAgICAgICB3ZWlnaHRfZ3JhZGllbnQgPSBucC5tYXRtdWwoeC5ULCBlcnJvcnMpCiAgICAgICAgYmlhc19ncmFkaWVudCA9IG5wLnN1bShlcnJvcnMpCgogICAgICAgIHJldHVybiB3ZWlnaHRfZ3JhZGllbnQgLyBtLCBiaWFzX2dyYWRpZW50IC8gbQogICAgICAgCiAgICAKICAgIGRlZiBmaXQoc2VsZiwgeF90cmFpbiwgeV90cmFpbik6CiAgICAgICAgcm93cyA9IHhfdHJhaW4uc2hhcGVbMF0KICAgICAgICBjb2xzID0geF90cmFpbi5zaGFwZVsxXQogICAgICAgIAogICAgICAgIGlmIHNlbGYud2VpZ2h0cyBpcyBOb25lOiAKICAgICAgICAgICAgc2VsZi5pbml0aWFsaXplX3dlaWdodHMoY29scykKCiAgICAgICAgZm9yIGkgaW4gcmFuZ2Uoc2VsZi5tYXhfaXRlcik6CiAgICAgICAgICAgIGdyYWRpZW50X3dlaWdodHMsIGdyYWRpZW50X2JpYXMgPSBzZWxmLmdldF9ncmFkaWVudCh4X3RyYWluLCB5X3RyYWluKQogICAgICAgICAgICBzZWxmLndlaWdodHMgPSBzZWxmLndlaWdodHMgLSBzZWxmLmxlYXJuaW5nX3JhdGUgKiBncmFkaWVudF93ZWlnaHRzCiAgICAgICAgICAgIHNlbGYuYmlhcyA9IHNlbGYuYmlhcyAtIHNlbGYubGVhcm5pbmdfcmF0ZSAqIGdyYWRpZW50X2JpYXMKICAgIAogICAgZGVmIHByZWRpY3Qoc2VsZiwgeF90ZXN0KTogIAogICAgICAgIHByZWRpY3Rpb24gPSAgbnAubWF0bXVsKHhfdGVzdCwgc2VsZi53ZWlnaHRzKSArIHNlbGYuYmlhcwogICAgICAgIGJpbmFyeV9wcmVkaWN0aW9ucyA9IHNlbGYuc2lnbW9pZChwcmVkaWN0aW9uKQogICAgICAgIHJldHVybiAoYmluYXJ5X3ByZWRpY3Rpb25zID49IDAuNSkucmVzaGFwZSgtMSkuYXN0eXBlKGludCkKICAgIAogICAgZGVmIGdldF93ZWlnaHRzKHNlbGYpOgogICAgICAgIHByaW50KCJCaWFzIChpbnRlcmNlcHQpOiIsIHNlbGYuYmlhcykKICAgICAgICBwcmludCgiV2VpZ2h0cyAoY29lZmZpY2llbnRzKToiLCBzZWxmLndlaWdodHMp