3
4

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?

More than 3 years have passed since last update.

Qiskit: Quantum Circuit Learning (QCL) の実装

Last updated at Posted at 2020-04-18

Quantum Circuit Learning

Quantum Circuit Learning(QCL)は,量子コンピュータを機械学習に応用するためのアルゴリズムです.
誤り訂正機能を持たない中規模量子コンピュータNISQでの動作を念頭に設計されています.

参考:Quantum Circuit Learning

実装

流石に0から実装するのは大変なので,Quantum Native Dojo に載っているqulacsのコードを参考にしながらqiskitで実装しています.

code全文

# coding : utf-8

import numpy as np
import matplotlib.pyplot as plt
from functools import reduce
from scipy.optimize import minimize
import time

from qiskit import QuantumRegister, QuantumCircuit, ClassicalRegister, execute
from qiskit.quantum_info.analysis import average_data
from qiskit import BasicAer
from qiskit.quantum_info.operators import Operator
from qiskit.aqua.utils import tensorproduct


I_mat = np.array([[1, 0], [0, 1]])
X_mat = np.array([[0, 1], [1, 0]])
Z_mat = np.array([[1, 0], [0, -1]])


def classical_minimize(cost_func, theta_init, method='Nelder-Mead'):
    print('Do classical_minimize !')
    start_time = time.time()
    result = minimize(cost_func, theta_init, method=method)
    print('runnning time; {}'.format(time.time() - start_time))
    print('opt_cost: {}'.format(result.fun))
    theta_opt = result.x
    return theta_opt


class SimpleQuantumCircuitLearning:


    def __init__(self, nqubit, func=lambda x: np.sin(x*np.pi), num_x_train=50, c_depth=3, time_step=0.77):
        self.nqubit = nqubit
        self.func_to_learn = func
        self.num_x_train = num_x_train
        self.c_depth = c_depth
        self.time_step = time_step
        self.x_train = None
        self.y_train = None
        self.InitialTheta = None
        self.time_evol_gate = None

    def initialize(self):
        random_seed = 0
        np.random.seed(random_seed)
        x_min = -1.
        x_max = 1.
        self.x_train = x_min + (x_max - x_min) * np.random.rand(self.num_x_train)
        self.y_train = self.func_to_learn(self.x_train)

    def add_noise(self, mag_noise=0.05):
        self.y_train = self.y_train + mag_noise * np.random.rand(self.num_x_train)

    def input_encode(self, x) -> QuantumCircuit:
        qr = QuantumRegister(self.nqubit)
        cr = ClassicalRegister(self.nqubit)
        qc = QuantumCircuit(qr, cr)
        angle_y = np.arcsin(x)
        angle_z = np.arccos(x ** 2)
        for i in range(self.nqubit):
            qc.ry(angle_y, i)
            qc.rz(angle_z, i)
        return qc

    def make_fullgate(self, list_SiteAndOperator):
        list_Site = [SiteAndOperator[0] for SiteAndOperator in list_SiteAndOperator]
        list_SingleGates = []
        cnt = 0
        for i in range(self.nqubit):
            if i in list_Site:
                list_SingleGates.append(list_SiteAndOperator[cnt][1])
                cnt += 1
            else:
                list_SingleGates.append(I_mat)
        return reduce(np.kron, list_SingleGates)

    def hamiltonian(self, time_step):
        ham = np.zeros((2**self.nqubit, 2**self.nqubit), dtype=complex)
        for i in range(self.nqubit):
            Jx = -1. + 2. * np.random.rand()
            ham += Jx * self.make_fullgate([[i, X_mat]])
            for j in range(i+1, self.nqubit):
                J_ij = -1. + 2. * np.random.rand()
                ham += J_ij * self.make_fullgate([[i, Z_mat], [j, Z_mat]])
        diag, eigen_vecs = np.linalg.eigh(ham)
        time_evol_op = np.dot(np.dot(eigen_vecs, np.diag(np.exp(-1j*time_step*diag))), eigen_vecs.T.conj())
        self.time_evol_gate = Operator(time_evol_op)

    def initial_theta(self):
        np.random.seed(9999)
        theta = np.array([2*np.pi*np.random.rand() for i in range(3 * self.nqubit * self.c_depth)])
        return theta

    def U_out(self, qc, theta):
        qc.unitary(self.time_evol_gate, range(self.nqubit), label='time_evol_gate')
        theta = np.reshape(theta, (3, self.nqubit, self.c_depth))
        for depth in range(self.c_depth):
            for q1 in range(self.nqubit):
                qc.rx(theta[depth][q1][0], q1)
                qc.rz(theta[depth][q1][1], q1)
                qc.rx(theta[depth][q1][2], q1)
        return qc

    def Z0(self):
        tensor = Z_mat
        for i in range(1, self.nqubit):
            tensor = tensorproduct(tensor, I_mat)
        return tensor

    def run_circuit(self, qc):
        NUM_SHOTS = 10000
        seed = 1234
        backend = BasicAer.get_backend('qasm_simulator')
        qc.measure(range(self.nqubit), range(self.nqubit))
        results = execute(qc, backend, shots=NUM_SHOTS, seed_simulator=seed).result()
        return results.get_counts(qc)

    def qcl_pred(self, x, theta):
        qc = self.input_encode(x)
        qc = self.U_out(qc, theta)
        counts = self.run_circuit(qc)
        expectation = average_data(counts, self.Z0())
        return expectation

    def cost_func(self, theta):
        y_pred = [self.qcl_pred(x, theta) for x in self.x_train]
        L = ((y_pred - self.y_train)**2).mean()
        return L

    def minim(self, InitialTheta):
        theta_opt = classical_minimize(self.cost_func, InitialTheta)
        return theta_opt


if __name__ == '__main__':
    x_min = - 1.
    x_max = 1.
    QCL = SimpleQuantumCircuitLearning(nqubit=3)
    QCL.initialize()
    QCL.add_noise(mag_noise=0.05)
    QCL.hamiltonian(time_step=0.77)
    initial_theta = QCL.initial_theta()
    initial_cost = QCL.cost_func(initial_theta)
    ##########################################################
    x_min = - 1.
    x_max = 1.
    xlist = np.arange(x_min, x_max, 0.02)
    y_init = [QCL.qcl_pred(x, initial_theta) for x in xlist]
    plt.plot(xlist, y_init)
    plt.show()
    ##########################################################
    print('initial_cost: {}'.format(initial_cost))
    theta_opt = QCL.minim(initial_theta)
    plt.figure(figsize=(10, 6))
    plt.plot(QCL.x_train, QCL.y_train, "o", label='Teacher')
    plt.plot(xlist, y_init, '--', label='Initial Model Prediction', c='gray')
    y_pred = np.array([QCL.qcl_pred(x, theta_opt) for x in xlist])
    plt.plot(xlist, y_pred, label='Final Model Prediction')
    plt.legend()
    plt.show()

初期設定

    def __init__(self, nqubit, func=lambda x: np.sin(x*np.pi), num_x_train=50, c_depth=3, time_step=0.77):
        self.nqubit = nqubit
        self.func_to_learn = func
        self.num_x_train = num_x_train
        self.c_depth = c_depth
        self.time_step = time_step
        self.x_train = None
        self.y_train = None
        self.InitialTheta = None
        self.time_evol_gate = None
    
    # トレーニングデータ
    def initialize(self):
        random_seed = 0
        np.random.seed(random_seed)
        x_min = -1.
        x_max = 1.
        self.x_train = x_min + (x_max - x_min) * np.random.rand(self.num_x_train)
        self.y_train = self.func_to_learn(self.x_train)

    # noiseを追加 
    def add_noise(self, mag_noise=0.05):
        self.y_train = self.y_train + mag_noise * np.random.rand(self.num_x_train)

結果

結果を出力すると次のようになります.

initial_cost: 0.20496500050465266
opt_cost: 0.13626398715932975

2020041802.png

結果を見る限りFinal Model Predictionの最大値と最小値の値が小さいように感じられる.
これはの値を使用しているからである.

    def Z0(self):
        tensor = Z_mat
        for i in range(1, self.nqubit):
            tensor = tensorproduct(tensor, I_mat)
        return tensor

そこで,<2Z>を用いると

    def Z0(self):
        tensor = Z_mat * 2
        for i in range(1, self.nqubit):
            tensor = tensorproduct(tensor, I_mat)
        return tensor

2020041801.png

となり,opt_costも

opt_cost: 0.03618545796607254

と改善がみられる.

試しに,<2.5Z>を用いてみた結果,

2020041803.png

opt_cost: 0.021796774423019367

まとめ

ざっくりですが,QCLをQiskitで実装してみました.
Qulacsやblueqatとは記述方法が全く異なるので,文献の多さなどからも取りかかりにくい気がしますね.
ちなみに,Zの係数も最適化する必要があるなと感じました.

なかなか雑な記事ですが,実装をメインにしていますのでご容赦ください.

3
4
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
3
4

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?