Reputation: 11
I am trying to implement a quantum algorithm written in an research paper. The layout is a linear layer , followed by an quantum activation function layer and then a layer of softmax. This is my current code of the model and layer implementation
`
import numpy as np
from qiskit import QuantumCircuit, transpile, assemble
from qiskit_aer import Aer
from tensorflow.keras.layers import Layer
class QuantumActivationFunction(Layer):
def __init__(self, num_features, output_size, **kwargs):
super(QuantumActivationFunction, self).__init__(**kwargs)
self.num_features =8
self.output_size = 8
def encode_data_to_qubits(self, data):
# Implement the encoding of data to qubits
num_samples, num_features = data.shape
encoded_states = []
for i in range(num_samples):
x = data[i]
qc = QuantumCircuit(num_features)
for j in range(num_features):
angle_1 = np.arctan(x[j])
angle_2 = np.arctan(x[j] ** 2)
qc.h(j)
qc.ry(angle_1, j)
qc.rz(angle_2, j)
encoded_states.append(qc)
return encoded_states
def entangle_qubits(self, qc):
# Implement qubit entanglement
num_qubits = qc.num_qubits
for stride in range(1, num_qubits):
for i in range(num_qubits):
control_qubit = qc.qubits[i]
target_qubit = qc.qubits[(i+stride)%8]
qc.cx(control_qubit, target_qubit)
def compute_r(qc):
r = [0] * 4
for i in range(4):
r[i] = qc.rz(np.pi,qc.qubits[i])
return r
# def build(self, input_shape):
# # Define any trainable parameters here
# self._input_shape = input_shape
# pass
def call(self, data):
# Implement the activation function logic here
# For example, you can use the inputs to compute the quantum activation
# return tf.nn.relu(inputs) # Placeholder example, replace with your quantum activation function logi
encoded_states = self.encode_data_to_qubits(data)
for qc in encoded_states:
self.entangle_qubits(qc)
self.compute_r(qc)
return encoded_states
# def compute_output_shape(self, input_shape):
# #Return the shape of the output tensor
# return (input_shape[0], self.output_size)
from keras.layers import Activation
from keras.activations import softmax
def baseline_model():
model = Sequential()
model.add(Dense(8, input_dim=374, activation='linear')) # Linear layer
model.add(QuantumActivationFunction(8,8)) # Quantum Activation Function layer
model.add(Dense(3, activation='softmax')) # Output layer
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
The code is returning error :
`TypeError Traceback (most recent call last)
Cell In[105], line 1
----> 1 model = baseline_model()
2 first_linear_layer_output = model.layers[0](X_train).numpy()
4 print("Output of the first linear layer:", first_linear_layer_output)
Cell In[104], line 7
5 model = Sequential()
6 model.add(Dense(8, input_dim=374, activation='linear')) # Linear layer
----> 7 model.add(QuantumActivationFunction(8,8)) # Quantum Activation Function layer
8 model.add(Dense(3, activation='softmax')) # Output layer
9 model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
File ~\AppData\Roaming\Python\Python311\site-packages\tensorflow\python\trackable\base.py:205, in no_automatic_dependency_tracking.<locals>._method_wrapper(self, *args, **kwargs)
203 self._self_setattr_tracking = False # pylint: disable=protected-access
204 try:
--> 205 result = method(self, *args, **kwargs)
206 finally:`your text`
207 self._self_setattr_tracking = previous_value # pylint: disable=protected-access
File c:\Python311\Lib\site-packages\keras\utils\traceback_utils.py:70, in filter_traceback.<locals>.error_handler(*args, **kwargs)
67 filtered_tb = _process_traceback_frames(e.__traceback`
I have tried rechecking my code several times, I have even checked dimensions of my data but still the error persists. When I was running the functions of quantum layer separately , it was working fine but after integrating it into class , it showed again the same error. Sincere request to everyone who can help
Upvotes: 1
Views: 84