import dynex
import dimod
import numpy as np
from sklearn.datasets import make_classification
from dynex import DynexConfig, ComputeBackend
# Generate dataset
X, y = make_classification(n_samples=100, n_features=10, random_state=42)
y = 2 * y - 1 # Convert to {-1, +1}
# Build QUBO for feature selection
# Objective: select features that maximize margin while penalizing count
n_features = X.shape[1]
lambda_reg = 0.1 # Regularization weight
# Compute kernel matrix
K = X @ X.T
Q = {}
# Linear terms: negative contribution to SVM objective
for i in range(n_features):
Q[(i, i)] = Q.get((i, i), 0) + lambda_reg
# Quadratic terms: correlations between features
for i in range(n_features):
for j in range(i+1, n_features):
correlation = np.abs(np.corrcoef(X[:, i], X[:, j])[0, 1])
if correlation > 0.1:
Q[(i, j)] = Q.get((i, j), 0) + correlation * lambda_reg
bqm = dimod.BinaryQuadraticModel.from_qubo(Q)
model = dynex.BQM(bqm)
config = DynexConfig(compute_backend=ComputeBackend.GPU)
sampler = dynex.DynexSampler(model, config=config)
sampleset = sampler.sample(num_reads=1000, annealing_time=200)
selected_features = [i for i, v in sampleset.first.sample.items() if v == 1]
print(f"Selected features: {selected_features}")
# Train classical SVM on selected features
from sklearn.svm import SVC
from sklearn.model_selection import cross_val_score
X_selected = X[:, selected_features]
svm = SVC(kernel='rbf')
scores = cross_val_score(svm, X_selected, y, cv=5)
print(f"Cross-validation accuracy: {scores.mean():.4f} ± {scores.std():.4f}")