class MyLogisticRegression:
 
    def __init__(self, learning_rate=0.01, max_iter=1000):
        self.learning_rate = learning_rate
        self.max_iter = max_iter
        self.weights = None  
        self.bias = 0 
        self.all_weights = []
        self.all_biases = []
        self.unique_classes = 0
 
    def initialize_weights(self, n_features):
        self.weights = np.zeros(n_features)
        self.bias = 0  
 
    def sigmoid(self, z):
        z = np.clip(z, -500, 500)
        return 1 / (1 + np.exp(-z))
 
    def get_gradient(self, x, y):
        m = x.shape[0]
        predictions = x @ self.weights + self.bias 
        errors = self.sigmoid(predictions) - y
        weight_gradient = x.T @ errors 
        bias_gradient = np.sum(errors)
        return weight_gradient / m, bias_gradient / m
 
 
    def binary_fit(self, x_train, y_train):
        rows = x_train.shape[0]
        cols = x_train.shape[1]
 
        if self.weights is None: 
            self.initialize_weights(cols)
 
        for i in range(self.max_iter):
            gradient_weights, gradient_bias = self.get_gradient(x_train, y_train)
            self.weights = self.weights - self.learning_rate * gradient_weights
            self.bias = self.bias - self.learning_rate * gradient_bias
        return self.weights, self.bias
 
    def fit(self, x_train, y_train):
 
        self.unique_classes = len(np.unique(y_train))
 
        for i in range(self.unique_classes):
            new_y = (y_train == i).astype(int)
            self.weights = None
            weights, bias = self.binary_fit(x_train, new_y)
            self.all_weights.append(weights)
            self.all_biases.append(bias)
 
    def predict_single_class(self, x_test, weights, bias):  
        prediction =  np.dot(x_test, weights) + bias
        binary_predictions = self.sigmoid(prediction)
        return binary_predictions  
 
    def predict(self, x_test):
        predict_prob = np.zeros((x_test.shape[0], self.unique_classes))
        for i in range(self.unique_classes):
            predict_prob[:, i] = self.predict_single_class(x_test, self.all_weights[i], self.all_biases[i])
        return np.argmax(predict_prob, axis=1)
				Y2xhc3MgTXlMb2dpc3RpY1JlZ3Jlc3Npb246CgogICAgZGVmIF9faW5pdF9fKHNlbGYsIGxlYXJuaW5nX3JhdGU9MC4wMSwgbWF4X2l0ZXI9MTAwMCk6CiAgICAgICAgc2VsZi5sZWFybmluZ19yYXRlID0gbGVhcm5pbmdfcmF0ZQogICAgICAgIHNlbGYubWF4X2l0ZXIgPSBtYXhfaXRlcgogICAgICAgIHNlbGYud2VpZ2h0cyA9IE5vbmUgIAogICAgICAgIHNlbGYuYmlhcyA9IDAgCiAgICAgICAgc2VsZi5hbGxfd2VpZ2h0cyA9IFtdCiAgICAgICAgc2VsZi5hbGxfYmlhc2VzID0gW10KICAgICAgICBzZWxmLnVuaXF1ZV9jbGFzc2VzID0gMAogICAgCiAgICBkZWYgaW5pdGlhbGl6ZV93ZWlnaHRzKHNlbGYsIG5fZmVhdHVyZXMpOgogICAgICAgIHNlbGYud2VpZ2h0cyA9IG5wLnplcm9zKG5fZmVhdHVyZXMpCiAgICAgICAgc2VsZi5iaWFzID0gMCAgCgogICAgZGVmIHNpZ21vaWQoc2VsZiwgeik6CiAgICAgICAgeiA9IG5wLmNsaXAoeiwgLTUwMCwgNTAwKQogICAgICAgIHJldHVybiAxIC8gKDEgKyBucC5leHAoLXopKQogICAgICAgIAogICAgZGVmIGdldF9ncmFkaWVudChzZWxmLCB4LCB5KToKICAgICAgICBtID0geC5zaGFwZVswXQogICAgICAgIHByZWRpY3Rpb25zID0geCBAIHNlbGYud2VpZ2h0cyArIHNlbGYuYmlhcyAKICAgICAgICBlcnJvcnMgPSBzZWxmLnNpZ21vaWQocHJlZGljdGlvbnMpIC0geQogICAgICAgIHdlaWdodF9ncmFkaWVudCA9IHguVCBAIGVycm9ycyAKICAgICAgICBiaWFzX2dyYWRpZW50ID0gbnAuc3VtKGVycm9ycykKICAgICAgICByZXR1cm4gd2VpZ2h0X2dyYWRpZW50IC8gbSwgYmlhc19ncmFkaWVudCAvIG0KICAgICAgIAogICAgCiAgICBkZWYgYmluYXJ5X2ZpdChzZWxmLCB4X3RyYWluLCB5X3RyYWluKToKICAgICAgICByb3dzID0geF90cmFpbi5zaGFwZVswXQogICAgICAgIGNvbHMgPSB4X3RyYWluLnNoYXBlWzFdCiAgICAgICAgCiAgICAgICAgaWYgc2VsZi53ZWlnaHRzIGlzIE5vbmU6IAogICAgICAgICAgICBzZWxmLmluaXRpYWxpemVfd2VpZ2h0cyhjb2xzKQoKICAgICAgICBmb3IgaSBpbiByYW5nZShzZWxmLm1heF9pdGVyKToKICAgICAgICAgICAgZ3JhZGllbnRfd2VpZ2h0cywgZ3JhZGllbnRfYmlhcyA9IHNlbGYuZ2V0X2dyYWRpZW50KHhfdHJhaW4sIHlfdHJhaW4pCiAgICAgICAgICAgIHNlbGYud2VpZ2h0cyA9IHNlbGYud2VpZ2h0cyAtIHNlbGYubGVhcm5pbmdfcmF0ZSAqIGdyYWRpZW50X3dlaWdodHMKICAgICAgICAgICAgc2VsZi5iaWFzID0gc2VsZi5iaWFzIC0gc2VsZi5sZWFybmluZ19yYXRlICogZ3JhZGllbnRfYmlhcwogICAgICAgIHJldHVybiBzZWxmLndlaWdodHMsIHNlbGYuYmlhcwoKICAgIGRlZiBmaXQoc2VsZiwgeF90cmFpbiwgeV90cmFpbik6CgogICAgICAgIHNlbGYudW5pcXVlX2NsYXNzZXMgPSBsZW4obnAudW5pcXVlKHlfdHJhaW4pKQoKICAgICAgICBmb3IgaSBpbiByYW5nZShzZWxmLnVuaXF1ZV9jbGFzc2VzKToKICAgICAgICAgICAgbmV3X3kgPSAoeV90cmFpbiA9PSBpKS5hc3R5cGUoaW50KQogICAgICAgICAgICBzZWxmLndlaWdodHMgPSBOb25lCiAgICAgICAgICAgIHdlaWdodHMsIGJpYXMgPSBzZWxmLmJpbmFyeV9maXQoeF90cmFpbiwgbmV3X3kpCiAgICAgICAgICAgIHNlbGYuYWxsX3dlaWdodHMuYXBwZW5kKHdlaWdodHMpCiAgICAgICAgICAgIHNlbGYuYWxsX2JpYXNlcy5hcHBlbmQoYmlhcykKCiAgICBkZWYgcHJlZGljdF9zaW5nbGVfY2xhc3Moc2VsZiwgeF90ZXN0LCB3ZWlnaHRzLCBiaWFzKTogIAogICAgICAgIHByZWRpY3Rpb24gPSAgbnAuZG90KHhfdGVzdCwgd2VpZ2h0cykgKyBiaWFzCiAgICAgICAgYmluYXJ5X3ByZWRpY3Rpb25zID0gc2VsZi5zaWdtb2lkKHByZWRpY3Rpb24pCiAgICAgICAgcmV0dXJuIGJpbmFyeV9wcmVkaWN0aW9ucyAgCiAgICAKICAgIGRlZiBwcmVkaWN0KHNlbGYsIHhfdGVzdCk6CiAgICAgICAgcHJlZGljdF9wcm9iID0gbnAuemVyb3MoKHhfdGVzdC5zaGFwZVswXSwgc2VsZi51bmlxdWVfY2xhc3NlcykpCiAgICAgICAgZm9yIGkgaW4gcmFuZ2Uoc2VsZi51bmlxdWVfY2xhc3Nlcyk6CiAgICAgICAgICAgIHByZWRpY3RfcHJvYls6LCBpXSA9IHNlbGYucHJlZGljdF9zaW5nbGVfY2xhc3MoeF90ZXN0LCBzZWxmLmFsbF93ZWlnaHRzW2ldLCBzZWxmLmFsbF9iaWFzZXNbaV0pCiAgICAgICAgcmV0dXJuIG5wLmFyZ21heChwcmVkaWN0X3Byb2IsIGF4aXM9MSk=