29 |
| - "print(__doc__)\n\n# Author: Vincent Dubourg <vincent.dubourg@gmail.com>\n# Adapted to GaussianProcessClassifier:\n# Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>\n# License: BSD 3 clause\n\nimport numpy as np\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib import cm\n\nfrom sklearn.gaussian_process import GaussianProcessClassifier\nfrom sklearn.gaussian_process.kernels import DotProduct, ConstantKernel as C\n\n# A few constants\nlim = 8\n\n\ndef g(x):\n \"\"\"The function to predict (classification will then consist in predicting\n whether g(x) <= 0 or not)\"\"\"\n return 5. - x[:, 1] - .5 * x[:, 0] ** 2.\n\n# Design of experiments\nX = np.array([[-4.61611719, -6.00099547],\n [4.10469096, 5.32782448],\n [0.00000000, -0.50000000],\n [-6.17289014, -4.6984743],\n [1.3109306, -6.93271427],\n [-5.03823144, 3.10584743],\n [-2.87600388, 6.74310541],\n [5.21301203, 4.26386883]])\n\n# Observations\ny = np.array(g(X) > 0, dtype=int)\n\n# Instanciate and fit Gaussian Process Model\nkernel = C(0.1, (1e-5, np.inf)) * DotProduct(sigma_0=0.1) ** 2\ngp = GaussianProcessClassifier(kernel=kernel)\ngp.fit(X, y)\nprint(\"Learned kernel: %s \" % gp.kernel_)\n\n# Evaluate real function and the predicted probability\nres = 50\nx1, x2 = np.meshgrid(np.linspace(- lim, lim, res),\n np.linspace(- lim, lim, res))\nxx = np.vstack([x1.reshape(x1.size), x2.reshape(x2.size)]).T\n\ny_true = g(xx)\ny_prob = gp.predict_proba(xx)[:, 1]\ny_true = y_true.reshape((res, res))\ny_prob = y_prob.reshape((res, res))\n\n# Plot the probabilistic classification iso-values\nfig = plt.figure(1)\nax = fig.gca()\nax.axes.set_aspect('equal')\nplt.xticks([])\nplt.yticks([])\nax.set_xticklabels([])\nax.set_yticklabels([])\nplt.xlabel('$x_1$')\nplt.ylabel('$x_2$')\n\ncax = plt.imshow(y_prob, cmap=cm.gray_r, alpha=0.8,\n extent=(-lim, lim, -lim, lim))\nnorm = plt.matplotlib.colors.Normalize(vmin=0., vmax=0.9)\ncb = plt.colorbar(cax, ticks=[0., 0.2, 0.4, 0.6, 0.8, 1.], norm=norm)\ncb.set_label('${\\\\rm \\mathbb{P}}\\left[\\widehat{G}(\\mathbf{x}) \\leq 0\\\\right]$')\nplt.clim(0, 1)\n\nplt.plot(X[y <= 0, 0], X[y <= 0, 1], 'r.', markersize=12)\n\nplt.plot(X[y > 0, 0], X[y > 0, 1], 'b.', markersize=12)\n\nplt.contour(x1, x2, y_true, [0.], colors='k', linestyles='dashdot')\n\ncs = plt.contour(x1, x2, y_prob, [0.666], colors='b',\n linestyles='solid')\nplt.clabel(cs, fontsize=11)\n\ncs = plt.contour(x1, x2, y_prob, [0.5], colors='k',\n linestyles='dashed')\nplt.clabel(cs, fontsize=11)\n\ncs = plt.contour(x1, x2, y_prob, [0.334], colors='r',\n linestyles='solid')\nplt.clabel(cs, fontsize=11)\n\nplt.show()" |
| 29 | + "print(__doc__)\n\n# Author: Vincent Dubourg <vincent.dubourg@gmail.com>\n# Adapted to GaussianProcessClassifier:\n# Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>\n# License: BSD 3 clause\n\nimport numpy as np\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib import cm\n\nfrom sklearn.gaussian_process import GaussianProcessClassifier\nfrom sklearn.gaussian_process.kernels import DotProduct, ConstantKernel as C\n\n# A few constants\nlim = 8\n\n\ndef g(x):\n \"\"\"The function to predict (classification will then consist in predicting\n whether g(x) <= 0 or not)\"\"\"\n return 5. - x[:, 1] - .5 * x[:, 0] ** 2.\n\n# Design of experiments\nX = np.array([[-4.61611719, -6.00099547],\n [4.10469096, 5.32782448],\n [0.00000000, -0.50000000],\n [-6.17289014, -4.6984743],\n [1.3109306, -6.93271427],\n [-5.03823144, 3.10584743],\n [-2.87600388, 6.74310541],\n [5.21301203, 4.26386883]])\n\n# Observations\ny = np.array(g(X) > 0, dtype=int)\n\n# Instantiate and fit Gaussian Process Model\nkernel = C(0.1, (1e-5, np.inf)) * DotProduct(sigma_0=0.1) ** 2\ngp = GaussianProcessClassifier(kernel=kernel)\ngp.fit(X, y)\nprint(\"Learned kernel: %s \" % gp.kernel_)\n\n# Evaluate real function and the predicted probability\nres = 50\nx1, x2 = np.meshgrid(np.linspace(- lim, lim, res),\n np.linspace(- lim, lim, res))\nxx = np.vstack([x1.reshape(x1.size), x2.reshape(x2.size)]).T\n\ny_true = g(xx)\ny_prob = gp.predict_proba(xx)[:, 1]\ny_true = y_true.reshape((res, res))\ny_prob = y_prob.reshape((res, res))\n\n# Plot the probabilistic classification iso-values\nfig = plt.figure(1)\nax = fig.gca()\nax.axes.set_aspect('equal')\nplt.xticks([])\nplt.yticks([])\nax.set_xticklabels([])\nax.set_yticklabels([])\nplt.xlabel('$x_1$')\nplt.ylabel('$x_2$')\n\ncax = plt.imshow(y_prob, cmap=cm.gray_r, alpha=0.8,\n extent=(-lim, lim, -lim, lim))\nnorm = plt.matplotlib.colors.Normalize(vmin=0., vmax=0.9)\ncb = plt.colorbar(cax, ticks=[0., 0.2, 0.4, 0.6, 0.8, 1.], norm=norm)\ncb.set_label('${\\\\rm \\mathbb{P}}\\left[\\widehat{G}(\\mathbf{x}) \\leq 0\\\\right]$')\nplt.clim(0, 1)\n\nplt.plot(X[y <= 0, 0], X[y <= 0, 1], 'r.', markersize=12)\n\nplt.plot(X[y > 0, 0], X[y > 0, 1], 'b.', markersize=12)\n\nplt.contour(x1, x2, y_true, [0.], colors='k', linestyles='dashdot')\n\ncs = plt.contour(x1, x2, y_prob, [0.666], colors='b',\n linestyles='solid')\nplt.clabel(cs, fontsize=11)\n\ncs = plt.contour(x1, x2, y_prob, [0.5], colors='k',\n linestyles='dashed')\nplt.clabel(cs, fontsize=11)\n\ncs = plt.contour(x1, x2, y_prob, [0.334], colors='r',\n linestyles='solid')\nplt.clabel(cs, fontsize=11)\n\nplt.show()" |