Skip to content

Commit e961027

Browse files
committed
adding codes
1 parent 9c7dab4 commit e961027

File tree

2 files changed

+130
-0
lines changed

2 files changed

+130
-0
lines changed

doc/src/week16/programs/crbm.py

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
import numpy as np
2+
3+
class RBM:
4+
def __init__(self, n_visible, n_hidden, learning_rate=0.1):
5+
self.n_visible = n_visible
6+
self.n_hidden = n_hidden
7+
self.learning_rate = learning_rate
8+
9+
# Initialize weights and biases
10+
self.weights = np.random.normal(0, 0.01, size=(n_visible, n_hidden))
11+
self.visible_bias = np.zeros(n_visible)
12+
self.hidden_bias = np.zeros(n_hidden)
13+
14+
def sigmoid(self, x):
15+
return 1.0 / (1 + np.exp(-x))
16+
17+
def sample_prob(self, probs):
18+
return (np.random.rand(*probs.shape) < probs).astype(np.float32)
19+
20+
def train(self, data, epochs=1000, batch_size=10):
21+
n_samples = data.shape[0]
22+
23+
for epoch in range(epochs):
24+
np.random.shuffle(data)
25+
for i in range(0, n_samples, batch_size):
26+
v0 = data[i:i + batch_size]
27+
# Positive phase
28+
h0_prob = self.sigmoid(np.dot(v0, self.weights) + self.hidden_bias)
29+
h0_sample = self.sample_prob(h0_prob)
30+
31+
# Negative phase
32+
v1_prob = self.sigmoid(np.dot(h0_sample, self.weights.T) + self.visible_bias)
33+
h1_prob = self.sigmoid(np.dot(v1_prob, self.weights) + self.hidden_bias)
34+
35+
# Update weights and biases
36+
self.weights += self.learning_rate * (
37+
np.dot(v0.T, h0_prob) - np.dot(v1_prob.T, h1_prob)
38+
) / batch_size
39+
self.visible_bias += self.learning_rate * np.mean(v0 - v1_prob, axis=0)
40+
self.hidden_bias += self.learning_rate * np.mean(h0_prob - h1_prob, axis=0)
41+
42+
if epoch % 100 == 0:
43+
error = np.mean((v0 - v1_prob) ** 2)
44+
print(f"Epoch {epoch}: Reconstruction error = {error:.4f}")
45+
46+
def transform(self, v):
47+
"""Compute hidden unit probabilities."""
48+
return self.sigmoid(np.dot(v, self.weights) + self.hidden_bias)
49+
50+
def reconstruct(self, v):
51+
"""Reconstruct visible units from hidden layer."""
52+
h = self.sigmoid(np.dot(v, self.weights) + self.hidden_bias)
53+
v_recon = self.sigmoid(np.dot(h, self.weights.T) + self.visible_bias)
54+
return v_recon
55+
56+
# Generate synthetic binary data
57+
data = np.random.randint(0, 2, size=(100, 6))
58+
59+
# Initialize and train RBM
60+
rbm = RBM(n_visible=6, n_hidden=2, learning_rate=0.1)
61+
rbm.train(data, epochs=500)
62+
63+
# Transform and reconstruct
64+
sample = np.array([[1, 0, 1, 0, 1, 0]])
65+
hidden = rbm.transform(sample)
66+
reconstructed = rbm.reconstruct(sample)
67+
68+
print("Original:", sample)
69+
print("Hidden:", hidden)
70+
print("Reconstructed:", reconstructed)

doc/src/week16/programs/qrbm6.py

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import pennylane as qml
2+
from pennylane import numpy as np
3+
4+
# Define the device: two qubits for one visible and one hidden unit
5+
n_visible = 1
6+
n_hidden = 1
7+
dev = qml.device("default.qubit", wires=n_visible + n_hidden)
8+
9+
# Weight and bias initialization
10+
weights = np.random.normal(0, 0.1, size=(n_visible, n_hidden), requires_grad=True)
11+
visible_bias = np.zeros(n_visible, requires_grad=True)
12+
hidden_bias = np.zeros(n_hidden, requires_grad=True)
13+
14+
# Quantum circuit for the RQBM
15+
@qml.qnode(dev)
16+
def circuit(v, weights, visible_bias, hidden_bias):
17+
# Encode visible state
18+
for i in range(n_visible):
19+
if v[i] == 1:
20+
qml.PauliX(i)
21+
22+
# Apply visible bias rotations
23+
for i in range(n_visible):
24+
qml.RZ(visible_bias[i], wires=i)
25+
26+
# Apply hidden bias and interaction terms
27+
for j in range(n_hidden):
28+
qml.RZ(hidden_bias[j], wires[n_visible + j])
29+
for i in range(n_visible):
30+
qml.CNOT(wires=[i, n_visible + j])
31+
qml.RZ(weights[i, j], wires=n_visible + j)
32+
qml.CNOT(wires=[i, n_visible + j])
33+
34+
return qml.probs(wires=list(range(n_visible + n_hidden)))
35+
36+
# Data: all possible visible unit configurations
37+
visible_data = np.array([[0], [1]])
38+
39+
# Loss function: negative log-likelihood
40+
def rqbm_loss(weights, visible_bias, hidden_bias):
41+
loss = 0
42+
for v in visible_data:
43+
probs = circuit(v, weights, visible_bias, hidden_bias)
44+
prob = probs[0] # Only the (v,h=0) state is considered
45+
loss -= np.log(prob + 1e-6)
46+
return loss / len(visible_data)
47+
48+
# Optimization
49+
opt = qml.GradientDescentOptimizer(stepsize=0.1)
50+
steps = 100
51+
52+
for step in range(steps):
53+
weights, visible_bias, hidden_bias = opt.step(
54+
rqbm_loss, weights, visible_bias, hidden_bias
55+
)
56+
if step % 10 == 0:
57+
current_loss = rqbm_loss(weights, visible_bias, hidden_bias)
58+
print(f"Step {step}: Loss = {current_loss:.4f}")
59+
60+
B

0 commit comments

Comments
 (0)