|
| 1 | +import numpy as np |
| 2 | +from sklearn.datasets import make_regression |
| 3 | + |
| 4 | +from rehline import plq_Ridge_Regressor |
| 5 | + |
| 6 | + |
| 7 | +def test_monotonic_increasing(): |
| 8 | + """Test monotonic increasing constraint.""" |
| 9 | + # Generate synthetic data |
| 10 | + X, y = make_regression(n_samples=100, n_features=10, noise=0.1, random_state=42) |
| 11 | + |
| 12 | + # Define monotonic increasing constraint |
| 13 | + constraint = [{"name": "monotonic", "decreasing": False}] |
| 14 | + |
| 15 | + # Fit model |
| 16 | + clf = plq_Ridge_Regressor(loss={"name": "huber"}, constraint=constraint, C=1.0) |
| 17 | + clf.fit(X, y) |
| 18 | + |
| 19 | + # Check if coefficients are non-decreasing |
| 20 | + coef = clf.coef_ |
| 21 | + diffs = np.diff(coef) |
| 22 | + |
| 23 | + # Allow for small numerical errors |
| 24 | + assert np.all(diffs >= -1e-3), f"Coefficients are not monotonic increasing: {coef}" |
| 25 | + |
| 26 | + |
| 27 | +def test_monotonic_decreasing(): |
| 28 | + """Test monotonic decreasing constraint.""" |
| 29 | + # Generate synthetic data |
| 30 | + X, y = make_regression(n_samples=100, n_features=10, noise=0.1, random_state=42) |
| 31 | + |
| 32 | + # Define monotonic decreasing constraint |
| 33 | + constraint = [{"name": "monotonic", "decreasing": True}] |
| 34 | + |
| 35 | + # Fit model |
| 36 | + clf = plq_Ridge_Regressor(loss={"name": "huber"}, constraint=constraint, C=1.0) |
| 37 | + clf.fit(X, y) |
| 38 | + |
| 39 | + # Check if coefficients are non-increasing |
| 40 | + coef = clf.coef_ |
| 41 | + diffs = np.diff(coef) |
| 42 | + |
| 43 | + # Allow for small numerical errors |
| 44 | + assert np.all(diffs <= 1e-3), f"Coefficients are not monotonic decreasing: {coef}" |
| 45 | + |
| 46 | + |
| 47 | +if __name__ == "__main__": |
| 48 | + test_monotonic_increasing() |
| 49 | + test_monotonic_decreasing() |
| 50 | + print("All monotonic constraint tests passed!") |
0 commit comments