diff --git a/.gitignore b/.gitignore index 987df74..9a1d8ae 100644 --- a/.gitignore +++ b/.gitignore @@ -10,7 +10,8 @@ include/ *$py.class # C extensions -*.so +**/*.so +**/*.o # Distribution / packaging .Python @@ -52,8 +53,8 @@ coverage.xml .hypothesis/ # Translations -*.mo -*.pot +**/*.mo +**/*.pot # Django stuff: *.log @@ -108,5 +109,6 @@ venv.bak/ .vscode/ +**/Pipfile.lock # End of https://www.gitignore.io/api/python diff --git a/CEC/.idea/CEC.iml b/CEC/.idea/CEC.iml new file mode 100644 index 0000000..6711606 --- /dev/null +++ b/CEC/.idea/CEC.iml @@ -0,0 +1,11 @@ + + + + + + + + + + \ No newline at end of file diff --git a/CEC/.idea/inspectionProfiles/profiles_settings.xml b/CEC/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 0000000..105ce2d --- /dev/null +++ b/CEC/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/CEC/.idea/misc.xml b/CEC/.idea/misc.xml new file mode 100644 index 0000000..23715ca --- /dev/null +++ b/CEC/.idea/misc.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/CEC/.idea/modules.xml b/CEC/.idea/modules.xml new file mode 100644 index 0000000..c924456 --- /dev/null +++ b/CEC/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/CEC/.idea/vcs.xml b/CEC/.idea/vcs.xml new file mode 100644 index 0000000..6c0b863 --- /dev/null +++ b/CEC/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/CEC/.idea/workspace.xml b/CEC/.idea/workspace.xml new file mode 100644 index 0000000..632035e --- /dev/null +++ b/CEC/.idea/workspace.xml @@ -0,0 +1,58 @@ + + + + + + + + + + + + + + + + + + + + + + + 1568040396942 + + + + + + + \ No newline at end of file diff --git a/CEC/AlgoStats.ipynb b/CEC/AlgoStats.ipynb index e2f2622..17b655f 100644 --- a/CEC/AlgoStats.ipynb +++ b/CEC/AlgoStats.ipynb @@ -3,12 +3,22 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "jupyter": { + "source_hidden": true + } + }, "outputs": [], "source": [ + "import ctypes\n", "import sys\n", "import time\n", + "from multiprocessing import Process, Queue\n", + "\n", + "from IPython.display import display\n", + "\n", "import numpy as np\n", + "import pandas as pd\n", "import matplotlib.pyplot as plt\n", "\n", "# Test the type of distribution of data for any distribution\n", @@ -29,31 +39,34 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "dims, algos, alpha = [10, 30, 50], ['dynNpMsjDE', 'DE', 'jDE', 'BBFWA', 'SCA', 'ES(1+1)', 'dynFWAG', 'ASO', 'BA', 'MTS'], 0.05" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# CEC 2017 algorithm speed test\n", - "Example of time execution speed of algorithm" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, + "metadata": { + "jupyter": { + "source_hidden": true + } + }, "outputs": [], "source": [ - "sys.path.append('cec2017')\n", - "from cec2017 import run_fun\n", - "from run_cec import MinMB\n", - "from NiaPy import Runner\n", - "from NiaPy.util import Task\n", + "from run_cec import MinMB, fillprototype\n", + "\n", + "CECv = 2017\n", + "cec_dll = ctypes.cdll.LoadLibrary('cec%d/cec%d.so' % (CECv, CECv))\n", + "prototype = ctypes.CFUNCTYPE( \n", + " ctypes.c_double, \n", + " ctypes.POINTER(ctypes.c_double), \n", + " ctypes.c_int,\n", + " ctypes.c_int\n", + ")\n", + "run_fun = prototype(('runtest', cec_dll))\n", + "\n", + "from NiaPy.algorithms import AlgorithmUtility\n", + "from NiaPy.task import StoppingTask, Utility\n", + "from NiaPy.util import reflectRepair\n", + "\n", + "def testRunTest(a, d, bench, **args):\n", + " task = StoppingTask(D=d, nFES=d * 1e4, benchmark=bench, **args)\n", + " start_time = time.time()\n", + " best = a(task)\n", + " return time.time() - start_time, best\n", "\n", "def testOne(x=0.55):\n", " for i in range(10 ** 6): \n", @@ -66,16 +79,91 @@ " x = x / (x + 2)\n", "\n", "def testTwo(d):\n", - " for i in range(2 * 10 ** 5): \n", + " for i in range(2 * 10 ** 5):\n", " x = np.random.uniform(-100, 100, d)\n", - " run_fun(x, 18)\n", + " run_fun(x.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), ctypes.c_int(d), ctypes.c_int(18))\n", "\n", - "def runTree(a, d):\n", - " task = Task(D=d, nFES=2 * 10 ** 5, benchmark=MinMB(run_fun, fnum=18))\n", - " algo = Runner.getAlgorithm(a)(task=task)\n", - " start_time = time.time()\n", - " algo.run()\n", - " return time.time() - start_time" + "def testThreeCec(a, d, fnum=1): return testRunTest(a, d, MinMB(cec_dll.runtest, fnum=fnum))\n", + "\n", + "def testThreeBasic(a, d, fnum=1):\n", + " mapper = {\n", + " 1:'bentcigar',\n", + " 2:'',\n", + " 3:'zakharov',\n", + " 4:'rosenbrock',\n", + " 5:'rastrigin',\n", + " 6:'',\n", + " 10:'swefel'\n", + " }\n", + " return testRunTest(a, d, mapper[fnum])\n", + "\n", + "def t_fun_cec(a, d, fnum, q, runs_no):\n", + " for _ in range(runs_no): q.put(testThreeCec(a, d, fnum)[1][1])\n", + "\t \n", + "def t_fun_basic(a, d, fnum, q, runs_no):\n", + " for _ in range(runs_no): q.put(testThreeBasic(a, d, fnum)[1][1])\n", + " \n", + "def runThread(a, d, fnum=1, thread_fun=t_fun_cec, thread_no=24, runs_no=35, seed=1, **a_args):\n", + " autil = AlgorithmUtility()\n", + " ts, qs, r, runs, runs_sum = [], [], [], [0] * thread_no, 0\n", + " i = 0\n", + " while runs_sum < runs_no:\n", + " runs[i] += 1\n", + " runs_sum += 1\n", + " i = i + 1 if i + 1 < len(runs) else 0\n", + " for i in range(thread_no):\n", + " if runs[i] == 0: continue\n", + " q = Queue(runs[i])\n", + " t = Process(target=thread_fun, args=(autil.get_algorithm(a)(seed=seed + i, **a_args), d, fnum, q, runs[i]))\n", + " t.start()\n", + " ts.append(t), qs.append(q)\n", + " for t in ts: t.join()\n", + " for q in qs: \n", + " while not q.empty(): r.append(q.get())\n", + " return r" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "jupyter": { + "source_hidden": true + } + }, + "outputs": [], + "source": [ + "dims, algosNames, alpha = [10, 30, 50], ['BA', 'ABA', 'SABA', 'HBA', 'HSABA'], 0.05\n", + "seed = 1\n", + "algs = {\n", + " 'BA': {},\n", + " 'HBA': {},\n", + " 'SABA': {},\n", + " 'HSABA': {\n", + " 'NP': 100,\n", + " 'A': 0.5,\n", + " 'F': 0.5,\n", + " 'CR': 0.9,\n", + "\t 'r': 0.01,\n", + " 'Qmax': 1,\n", + " 'Qmin': -1.2\n", + " }\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# CEC 2017 algorithm speed test\n", + "Example of time execution speed of algorithm" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Test one" ] }, { @@ -87,29 +175,138 @@ "start_time = time.time()\n", "testOne()\n", "t0 = (time.time() - start_time)\n", - "\n", - "print ('t0: ', t0)\n", - "\n", + "print(t0)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Test two" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "t1 = []\n", "for d in dims:\n", " start_time = time.time()\n", " testTwo(d)\n", " t1.append(time.time() - start_time)\n", - " \n", - "print ('t1: ', t1)\n", - "\n", - "t2 = []\n", - "for d in dims:\n", - " tmp = []\n", - " for a in algos: \n", - " r = np.full(5, 0.0)\n", - " for i in range(len(r)): r[i] = runTree(a, d)\n", - " tmp.append(np.mean(r))\n", - " t2.append(tmp)\n", - "\n", - "for i, d in enumerate(dims):\n", - " for j, a in enumerate(algos):\n", - " print ('%10s %d -- %.4E' % (a, d, (t2[i][j] - t1[i]) / t0))" + "print(t1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Test three" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d = 10" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Run on basic function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "autil = AlgorithmUtility()\n", + "res, f = {}, 1\n", + "for k, v in algs.items():\n", + " a = autil.get_algorithm(k)(seed=seed + 1, **v)\n", + " r = testThreeBasic(a, d, f)\n", + " res[k] = [r[0], r[1][1]]\n", + "display('Func %d' % f)\n", + "display(pd.DataFrame.from_dict(res))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Run on CEC function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "autil = AlgorithmUtility()\n", + "for f in range(1):\n", + " res = {}\n", + " for k, v in algs.items():\n", + " a = autil.get_algorithm(k)(seed=seed + 1, **v)\n", + " r = testThreeCec(a, d, f + 1)\n", + " res[k] = [r[0], r[1][1]]\n", + " display('Func %d' % (f + 1))\n", + " display(pd.DataFrame.from_dict(res))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Running on multiple threads" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Run on basic function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "res, d, f = {}, 10, 1\n", + "for k, v in algs.items(): res[k] = runThread(k, d, f, thread_fun=t_fun_basic, **v)\n", + "display('Func %d' % f)\n", + "display(pd.DataFrame.from_dict(res))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Run on CEC function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "res, d, f = {}, 10, 1\n", + "for k, v in algs.items(): res[k] = runThread(k, d, f, thread_fun=t_fun_cec, **v)\n", + "display('Func %d' % f)\n", + "display(pd.DataFrame.from_dict(res))" ] }, { @@ -141,31 +338,16 @@ }, "outputs": [], "source": [ - "dim, data = dims[0], []\n", - "data = np.asanyarray([[np.loadtxt('data/%s_%d_%d_v' % (a, fnum, dim)) for fnum in range(1, 31)] for a in algos])\n", - "# Get basic statistics\n", - "vals = []\n", - "for fnum in range(30):\n", - " tmp = []\n", - " print ('\\nfun_num: %d' % (fnum + 1))\n", - " for i, a in enumerate(algos):\n", - " d = data[i, fnum] = data[i, fnum] - (fnum + 1) * 100\n", - " print ('%10s:\\tmin: %.3E \\tmean: %.3E\\tstd: %.3E' % (a, np.min(d), np.mean(d), np.std(d)))\n", - " tmp.append((np.min(d), np.mean(d), np.std(d)))\n", - " vals.append(tmp)\n", - "vals = np.asanyarray(vals)\n", - "# Get best values for basic statistics\n", - "imin, imean, istd = [], [], []\n", - "for fnum in range(30):\n", - " imin.append(np.argmin([vals[fnum, i, 0] for i in range(len(algos))]))\n", - " imean.append(np.argmin([vals[fnum, i, 1] for i in range(len(algos))]))\n", - " istd.append(np.argmin([vals[fnum, i, 2] for i in range(len(algos))]))\n", - "# Generate table entrys for latex\n", - "out = ''\n", - "for i in range(len(algos)):\n", - " for fnum in range(30): out += ('%.3E' if i != imin[fnum] else '\\\\textbf{%.3E}') % vals[fnum, i, 0] + ' & ' + ('%.3E' if i != imean[fnum] else '\\\\textbf{%.3E}') % vals[fnum, i, 1] + ' & ' + ('%.3E' if i != istd[fnum] else '\\\\textbf{%.3E}') % vals[fnum, i, 2] + ' \\\\\\\\ \\n'\n", - " out += '\\n'\n", - "print ('\\n', out)" + "res, d = {}, 10\n", + "for k, v in algs.items():\n", + " print('START algor run for %s' % k)\n", + " for f in range(1, 31):\n", + " print('START function %d run' % f)\n", + " tmp = runThread(k, d, f, thread_fun=t_fun_cec, **v)\n", + " if res.get(k, None) is None: res[k] = [tmp]\n", + " else: res[k].append(tmp)\n", + " print('END function run')\n", + " print('END algor run')" ] }, { @@ -299,9 +481,9 @@ ], "metadata": { "kernelspec": { - "display_name": "NiaPy", + "display_name": "Python 3", "language": "python", - "name": "niapy" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -313,9 +495,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.0" + "version": "3.7.3" } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/CEC/cec2005/Makefile b/CEC/cec2005/Makefile index 80f1ed1..9319a87 100644 --- a/CEC/cec2005/Makefile +++ b/CEC/cec2005/Makefile @@ -2,10 +2,9 @@ PYTHON=python PVERSION=$(shell $(PYTHON) -c "import sys; print('%d.%d.%d' % (sys.version_info.major, sys.version_info.minor, sys.version_info.micro))") build: cec5_test_func.cpp cec5_test_func.h data.h cec2005.pyx - $(PYTHON) setup.py build_ext -i - cp build/lib.linux-x86_64-3.7/cec2005.cpython-37m-x86_64-linux-gnu.so . + $(PYTHON) setup.py build_ext --inplace clean: - rm cec2005.cpp - rm -r build - rm *.so + -rm cec2005.cpp + -rm -r build + -rm *.so diff --git a/CEC/cec2005/setup.py b/CEC/cec2005/setup.py index be48b94..dbdaf66 100644 --- a/CEC/cec2005/setup.py +++ b/CEC/cec2005/setup.py @@ -1,16 +1,21 @@ +import os + from distutils.core import setup from Cython.Build import cythonize from distutils.extension import Extension import numpy +extra_compile_args=['-std=c++11', '-w', '-O3', '-march=native'] +if os.getenv('DEBUG', None) is not None: extra_compile_args=['-std=c++11', '-g3', '-O0'] + extensions = [ Extension('cec2008', ['cec2008.pyx', 'cec8_test_func.c'], include_dirs=[numpy.get_include()], + extra_compile_args=extra_compile_args, language='c' ), ] setup( ext_modules=cythonize(extensions), - extra_compile_args=['-w', '-O3', '-narch=native'], ) diff --git a/CEC/cec2008/Makefile b/CEC/cec2008/Makefile index 6aa1fb1..8ccf8ef 100644 --- a/CEC/cec2008/Makefile +++ b/CEC/cec2008/Makefile @@ -3,10 +3,9 @@ PVERSION=$(shell $(PYTHON) -c "import sys; print('%d.%d.%d' % (sys.version_info. PVERSIONS=$(shell $(PYTHON) -c "import sys; print('%d%d' % (sys.version_info.major, sys.version_info.minor))") build: cec8_test_func.cpp cec8_test_func.h data.h cec2008.pyx - $(PYTHON) setup.py build_ext -i - 8cp build/lib.linux-x86_64-$(PVERSION)/cec2008.cpython-$(PVERSIONS)m-x86_64-linux-gnu.so . + $(PYTHON) setup.py build_ext --inplace clean: - rm cec2008.cpp - rm -r build - rm *.so + -rm cec2008.cpp + -rm -r build + -rm *.so diff --git a/CEC/cec2008/setup.py b/CEC/cec2008/setup.py index eeb2a35..48d6ed4 100644 --- a/CEC/cec2008/setup.py +++ b/CEC/cec2008/setup.py @@ -1,17 +1,21 @@ +import os + from distutils.core import setup from Cython.Build import cythonize from distutils.extension import Extension import numpy +extra_compile_args=['-std=c++11', '-w', '-O3', '-march=native'] +if os.getenv('DEBUG', None) is not None: extra_compile_args=['-std=c++11', '-g3', '-O0'] + extensions = [ - Extension('cec2016', ['cec2016.pyx', 'cec16_test_func.cpp'], + Extension('cec2008', ['cec2008.pyx', 'cec8_test_func.cpp'], include_dirs=[numpy.get_include()], - extra_compile_args=['-std=c++17'], + extra_compile_args=extra_compile_args, language='c++' ), ] setup( ext_modules=cythonize(extensions), - extra_compile_args=['-w', '-O3', '-march=native'], ) diff --git a/CEC/cec2013/Makefile b/CEC/cec2013/Makefile index 1e18ad6..4bb6e2c 100644 --- a/CEC/cec2013/Makefile +++ b/CEC/cec2013/Makefile @@ -3,10 +3,9 @@ PVERSION=--python $(shell $(PYTHON) -c "import sys; print('%d.%d.%d' % (sys.vers PVERSIONS=$(shell $(PYTHON) -c "import sys; print('%d%d' % (sys.version_info.major, sys.version_info.minor))") build: cec13_test_func.cpp cec13_test_func.h cec2013.pyx - $(PYTHON) setup.py build_ext -i - cp build/lib.linux-x86_64-$(PVERSION)/cec2013.cpython-$(PVERSIONS)m-x86_64-linux-gnu.so . + $(PYTHON) setup.py build_ext --inplace clean: - rm cec2013.cpp - rm -r build - rm *.so + -rm cec2013.cpp + -rm -r build + -rm *.so diff --git a/CEC/cec2013/setup.py b/CEC/cec2013/setup.py index c0b780b..0572f7a 100644 --- a/CEC/cec2013/setup.py +++ b/CEC/cec2013/setup.py @@ -1,17 +1,21 @@ +import os + from distutils.core import setup from Cython.Build import cythonize from distutils.extension import Extension import numpy +extra_compile_args=['-std=c++11', '-w', '-O3', '-march=native'] +if os.getenv('DEBUG', None) is not None: extra_compile_args=['-std=c++11', '-g3', '-O0'] + extensions = [ Extension('cec2013', ['cec2013.pyx', 'cec13_test_func.cpp'], include_dirs=[numpy.get_include()], - extra_compile_args=['-std=c++17'], + extra_compile_args=extra_compile_args, language='c++' ), ] setup( ext_modules=cythonize(extensions), - extra_compile_args=['-w', '-O3', '-march=native'], ) diff --git a/CEC/cec2014/Makefile b/CEC/cec2014/Makefile index f7cd263..ac2e9fb 100644 --- a/CEC/cec2014/Makefile +++ b/CEC/cec2014/Makefile @@ -3,10 +3,9 @@ PVERSION=$(shell $(PYTHON) -c "import sys; print('%d.%d.%d' % (sys.version_info. PVERSIONS=$(shell $(PYTHON) -c "import sys; print('%d%d' % (sys.version_info.major, sys.version_info.minor))") build: cec14_test_func.cpp cec14_test_func.h cec2014.pyx - $(PYTHON) setup.py build_ext -i - cp build/lib.linux-x86_64-$(PVERSION)/cec2014.cpython-$(PVERSIONS)m-x86_64-linux-gnu.so . + $(PYTHON) setup.py build_ext --inplace clean: - rm cec2014.cpp - rm -r build - rm *.so + -rm cec2014.cpp + -rm -r build + -rm *.so diff --git a/CEC/cec2014/setup.py b/CEC/cec2014/setup.py index 96fc69a..320184a 100644 --- a/CEC/cec2014/setup.py +++ b/CEC/cec2014/setup.py @@ -1,17 +1,21 @@ +import os + from distutils.core import setup from Cython.Build import cythonize from distutils.extension import Extension import numpy +extra_compile_args=['-std=c++11', '-w', '-O3', '-march=native'] +if os.getenv('DEBUG', None) is not None: extra_compile_args=['-std=c++11', '-g3', '-O0'] + extensions = [ Extension('cec2014', ['cec2014.pyx', 'cec14_test_func.cpp'], include_dirs=[numpy.get_include()], - extra_compile_args=['-std=c++17'], + extra_compile_args=extra_compile_args, language='c++' ), ] setup( ext_modules=cythonize(extensions), - extra_compile_args=['-w', '-O3', '-march=native'], ) diff --git a/CEC/cec2015/Makefile b/CEC/cec2015/Makefile index fd4a34f..abb2593 100644 --- a/CEC/cec2015/Makefile +++ b/CEC/cec2015/Makefile @@ -3,10 +3,9 @@ PVERSION=$(shell $(PYTHON) -c "import sys; print('%d.%d.%d' % (sys.version_info. PVERSIONS=$(shell $(PYTHON) -c "import sys; print('%d%d' % (sys.version_info.major, sys.version_info.minor))") build: cec15_test_func.cpp cec15_test_func.h - $(PYTHON) setup.py build_ext -i - cp build/lib.linux-x86_64-$(PVERSION)/cec2015.cpython-$(PVERSIONS)m-x86_64-linux-gnu.so . + $(PYTHON) setup.py build_ext --inplace clean: - rm cec2015.cpp - rm -r build - rm *.so + -rm cec2015.cpp + -rm -r build + -rm *.so diff --git a/CEC/cec2015/setup.py b/CEC/cec2015/setup.py index a951a7c..b67baef 100644 --- a/CEC/cec2015/setup.py +++ b/CEC/cec2015/setup.py @@ -1,17 +1,21 @@ +import os + from distutils.core import setup from Cython.Build import cythonize from distutils.extension import Extension import numpy +extra_compile_args=['-std=c++11', '-w', '-O3', '-march=native'] +if os.getenv('DEBUG', None) is not None: extra_compile_args=['-std=c++11', '-g3', '-O0'] + extensions = [ Extension('cec2015', ['cec2015.pyx', 'cec15_test_func.cpp'], include_dirs=[numpy.get_include()], - extra_compile_args=['-std=c++17'], + extra_compile_args=extra_compile_args, language='c++' ), ] setup( ext_modules=cythonize(extensions), - extra_compile_args=['-w', '-O3', '-march=native'], ) diff --git a/CEC/cec2016/Makefile b/CEC/cec2016/Makefile index 3ebe114..64ef85c 100644 --- a/CEC/cec2016/Makefile +++ b/CEC/cec2016/Makefile @@ -3,10 +3,9 @@ PVERSION=$(shell $(PYTHON) -c "import sys; print('%d.%d.%d' % (sys.version_info. PVERSIONS=$(shell $(PYTHON) -c "import sys; print('%d%d' % (sys.version_info.major, sys.version_info.minor))") build: cec16_test_func.cpp cec16_test_func.h cec2016.pyx - $(PYTHON) setup.py build_ext -i - cp build/lib.linux-x86_64-$(PVERSION)/cec2016.cpython-$(PVERSIONS)m-x86_64-linux-gnu.so . + $(PYTHON) setup.py build_ext --inplace clean: - rm cec2016.cpp - rm -r build - rm *.so + -rm cec2016.cpp + -rm -r build + -rm *.so diff --git a/CEC/cec2016/setup.py b/CEC/cec2016/setup.py index eeb2a35..3d31bdc 100644 --- a/CEC/cec2016/setup.py +++ b/CEC/cec2016/setup.py @@ -1,17 +1,21 @@ +import os + from distutils.core import setup from Cython.Build import cythonize from distutils.extension import Extension import numpy +extra_compile_args=['-std=c++11', '-w', '-O3', '-march=native'] +if os.getenv('DEBUG', None) is not None: extra_compile_args=['-std=c++11', '-g3', '-O0'] + extensions = [ Extension('cec2016', ['cec2016.pyx', 'cec16_test_func.cpp'], include_dirs=[numpy.get_include()], - extra_compile_args=['-std=c++17'], + extra_compile_args=extra_compile_args, language='c++' ), ] setup( ext_modules=cythonize(extensions), - extra_compile_args=['-w', '-O3', '-march=native'], ) diff --git a/CEC/cec2017/Makefile b/CEC/cec2017/Makefile index 90d96aa..9678ff3 100644 --- a/CEC/cec2017/Makefile +++ b/CEC/cec2017/Makefile @@ -1,12 +1,10 @@ -PYTHON=python -PVERSION=$(shell $(PYTHON) -c "import sys; print('%d.%d.%d' % (sys.version_info.major, sys.version_info.minor, sys.version_info.micro))") -PVERSIONS=$(shell $(PYTHON) -c "import sys; print('%d%d' % (sys.version_info.major, sys.version_info.minor))") +CC:=gcc +CC_FLAGS:=-std=c99 -O3 -march=native -build: cec17_test_func.cpp cec17_test_func.h cec2017.pyx - $(PYTHON) setup.py build_ext -i - cp build/lib.linux-x86_64-$(PVERSION)/cec2017.cpython-$(PVERSIONS)m-x86_64-linux-gnu.so . +build: cec17_test_func.c + ${CC} -Wall -fPIC ${CC_FLAGS} -c cec17_test_func.c + ${CC} -shared -W -o cec2017.so cec17_test_func.o -lm -clean: - rm cec2017.cpp - rm -r build - rm *.so +clean: cec17_test_func.o cec2017.so + rm cec17_test_func.o + rm cec2017.so diff --git a/CEC/cec2017/cec17_test_func.cpp b/CEC/cec2017/cec17_test_func.c similarity index 96% rename from CEC/cec2017/cec17_test_func.cpp rename to CEC/cec2017/cec17_test_func.c index 94e1c43..414d6f9 100644 --- a/CEC/cec2017/cec17_test_func.cpp +++ b/CEC/cec2017/cec17_test_func.c @@ -1,253 +1,140 @@ #include #include #include -#include "cec17_test_func.h" + +#define INF 1.0e99 +#define EPS 1.0e-14 +#define E 2.7182818284590452353602874713526625 +#define PI 3.1415926535897932384626433832795029 double *OShift,*M,*y,*z,*x_bound; int ini_flag=0,n_flag,func_flag,*SS; -double runtest(double *x, int s, int fnum) { - double f; - return *cec17_test_func(x, &f, s, 1, fnum); +void shiftfunc (double *x, double *xshift, int nx,double *Os) { + int i; + for (i=0; i=17&&func_num<=22)||(func_num>=29&&func_num<=30))) printf("\nError: hf01,hf02,hf03,hf04,hf05,hf06,cf07&cf08 are NOT defined for D=2.\n"); - /* Load Matrix M*/ - sprintf(FileName, "cec2017/input_data/M_%d_D%d.txt", func_num,nx); - fpt = fopen(FileName,"r"); - if (fpt==NULL) printf("\n Error: Cannot open input file for reading \n"); - if (func_num<20) { - M=(double*)malloc(nx*nx*sizeof(double)); - if (M==NULL) printf("\nError: there is insufficient memory available!\n"); - for (i=0; i=11&&func_num<=20) { - sprintf(FileName, "cec2017/input_data/shuffle_data_%d_D%d.txt", func_num, nx); - fpt = fopen(FileName,"r"); - if (fpt==NULL) printf("\n Error: Cannot open input file for reading \n"); - SS=(int *)malloc(nx*sizeof(int)); - if (SS==NULL) printf("\nError: there is insufficient memory available!\n"); - for(i=0;i0) xasy[i]=pow(x[i],1.0+beta*i/(nx-1)*pow(x[i],0.5)); } - f[0] = sum; } -void zakharov_func (double *x, double *f, int nx, double *Os,double *Mr, int s_flag, int r_flag) /* zakharov */ { - int i; - sr_func (x, z, nx, Os, Mr,1.0, s_flag, r_flag); // shift and rotate - f[0] = 0.0; - double sum1 = 0.0; - double sum2 = 0.0; +void oszfunc (double *x, double *xosz, int nx) { + int i,sx; + double c1,c2,xx; for (i=0; i0) { + c1=10; + c2=7.9; + } else { + c1=5.5; + c2=3.1; + } + if (x[i]>0) sx=1; + else if (x[i]==0) sx=0; + else sx=-1; + xosz[i]=sx*exp(xx+0.049*(sin(c1*xx)+sin(c2*xx))); + } else xosz[i]=x[i]; + } +} + +void cf_cal(double *x, double *f, int nx, double *Os,double * delta,double * bias,double * fit, int cf_num) { + int i,j; + double *w; + double w_max=0,w_sum=0; + w=(double *)malloc(cf_num * sizeof(double)); + for (i=0; iw_max) w_max=w[i]; + } + for (i=0; i=17&&func_num<=22)||(func_num>=29&&func_num<=30))) printf("\nError: hf01,hf02,hf03,hf04,hf05,hf06,cf07&cf08 are NOT defined for D=2.\n"); + /* Load Matrix M*/ + sprintf(FileName, "cec2017/input_data/M_%d_D%d.txt", func_num,nx); + fpt = fopen(FileName,"r"); + if (fpt==NULL) printf("\n Error: Cannot open input file for reading \n"); + if (func_num<20) { + M=(double*)malloc(nx*nx*sizeof(double)); + if (M==NULL) printf("\nError: there is insufficient memory available!\n"); + for (i=0; i=11&&func_num<=20) { + sprintf(FileName, "cec2017/input_data/shuffle_data_%d_D%d.txt", func_num, nx); + fpt = fopen(FileName,"r"); + if (fpt==NULL) printf("\n Error: Cannot open input file for reading \n"); + SS=(int *)malloc(nx*sizeof(int)); + if (SS==NULL) printf("\nError: there is insufficient memory available!\n"); + for(i=0;i0) xasy[i]=pow(x[i],1.0+beta*i/(nx-1)*pow(x[i],0.5)); - } -} - -void oszfunc (double *x, double *xosz, int nx) { - int i,sx; - double c1,c2,xx; - for (i=0; i0) { - c1=10; - c2=7.9; - } else { - c1=5.5; - c2=3.1; - } - if (x[i]>0) sx=1; - else if (x[i]==0) sx=0; - else sx=-1; - xosz[i]=sx*exp(xx+0.049*(sin(c1*xx)+sin(c2*xx))); - } else xosz[i]=x[i]; - } -} - -void cf_cal(double *x, double *f, int nx, double *Os,double * delta,double * bias,double * fit, int cf_num) { - int i,j; - double *w; - double w_max=0,w_sum=0; - w=(double *)malloc(cf_num * sizeof(double)); - for (i=0; iw_max) w_max=w[i]; - } - for (i=0; i 3) { - sprintf(FileName, "input_data/M_%d_D%d.txt", func_num,nx); + sprintf(FileName, "cec2019/input_data/M_%d_D%d.txt", func_num,nx); fpt = fopen(FileName,"r"); if (fpt==NULL) { @@ -111,7 +114,7 @@ void cec19_test_func(double *x, double *f, int nx, int mx, int func_num) { /* Load shift_data */ if (func_num > 3) { - sprintf(FileName, "input_data/shift_data_%d.txt", func_num); + sprintf(FileName, "cec2019/input_data/shift_data_%d.txt", func_num); fpt = fopen(FileName,"r"); if (fpt==NULL) { @@ -340,7 +343,7 @@ void schwefel_func (double *x, double *f, int nx, double *Os,double *Mr,int s_fl -void escaffer6_func (double *x, double *f, int nx, double *Os,double *Mr,int s_flag, int r_flag) /* Expanded Scaffers F6 */ +void escaffer6_func (double *x, double *f, int nx, double *Os,double *Mr,int s_flag, int r_flag) /* Expanded Scaffer��s F6 */ { int i; double temp1, temp2; diff --git a/CEC/cec2019/cec19_test_func.h b/CEC/cec2019/cec19_test_func.h index a976e9c..1ebd873 100644 --- a/CEC/cec2019/cec19_test_func.h +++ b/CEC/cec2019/cec19_test_func.h @@ -6,10 +6,6 @@ #define E 2.7182818284590452353602874713526625 #define PI 3.1415926535897932384626433832795029 -extern double *OShift,*M,*y,*z,*x_bound; -//extern int ini_flag=0,n_flag,func_flag,*SS; -extern int ini_flag,n_flag,func_flag,*SS; - double runtest(double*, int, int); void cec19_test_func(double*, double*, int, int, int); void schaffer_F7_func (double*, double*, int, double*, double*, int, int); diff --git a/CEC/cec2019/setup.py b/CEC/cec2019/setup.py index 346ad47..fc416db 100644 --- a/CEC/cec2019/setup.py +++ b/CEC/cec2019/setup.py @@ -1,17 +1,21 @@ +import os + from distutils.core import setup from Cython.Build import cythonize from distutils.extension import Extension import numpy +extra_compile_args=['-std=c++11', '-w', '-O3', '-march=native'] +if os.getenv('DEBUG', None) is not None: extra_compile_args=['-std=c++11', '-g3', '-O0'] + extensions = [ Extension('cec2019', ['cec2019.pyx', 'cec19_test_func.cpp'], include_dirs=[numpy.get_include()], - extra_compile_args=['-std=c++17'], + extra_compile_args=extra_compile_args, language='c++' ), ] setup( ext_modules=cythonize(extensions), - extra_compile_args=['-w', '-O3', '-march=native'], ) diff --git a/CEC/cecargparser.py b/CEC/cecargparser.py index f3fdac7..5c12f9a 100644 --- a/CEC/cecargparser.py +++ b/CEC/cecargparser.py @@ -1,7 +1,7 @@ import sys from NiaPy.util import MakeArgParser -ccecs = [8, 13, 14, 15, 16, 17, 18] +ccecs = [8, 13, 14, 15, 16, 17, 18, 19] creduces = [0.01, 0.02, 0.03, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0] def positiveInt(x): return abs(int(x)) @@ -16,7 +16,7 @@ def MakeArgParserCEC(): parser.add_argument('-d', '--dim', dest='D', default=10, type=positiveInt) parser.add_argument('-nr', '--nFESreduc', dest='reduc', default=creduces[-1], choices=creduces, type=float) parser.add_argument('-rn', '--rnum', dest='runs', default=51, type=positiveInt) - parser.add_argument('-o', '--wout', dest='wout', default=False, type=str2bool) + parser.add_argument('-o', '--wout', dest='wout', default=True, type=str2bool) return parser def getArgs(argv): diff --git a/CEC/run_cec.py b/CEC/run_cec.py index 79bba1d..282b812 100644 --- a/CEC/run_cec.py +++ b/CEC/run_cec.py @@ -1,11 +1,15 @@ # encoding=utf8 import sys - import random import logging -from numpy import asarray, savetxt, set_printoptions -from NiaPy import Runner -from NiaPy.util import Task, TaskConvPrint, TaskConvPlot, OptimizationType +import ctypes + +import pandas as pd +from numpy import asarray, savetxt, set_printoptions, inf + +from NiaPy.algorithms import AlgorithmUtility +from NiaPy.benchmarks import Benchmark +from NiaPy.task import StoppingTask, OptimizationType from cecargparser import getDictArgs logging.basicConfig() @@ -17,111 +21,135 @@ # For output results printing set_printoptions(linewidth=10000000, formatter={'all': lambda x: str(x)}) -class MinMB(object): - def __init__(self, run_fun, Lower=-100, Upper=100, fnum=1): - self.Lower, self.Upper = Lower, Upper - self.fnum = fnum - self.run_fun = run_fun +def fillprototype(f, restype, argtypes): f.restype, f.argtypes = restype, argtypes - def function(self): - def evaluate(D, sol): return self.run_fun(asarray(sol), self.fnum) - return evaluate +class MinMB(Benchmark): + def __init__(self, run_fun, Lower=-100, Upper=100, fnum=1): + Benchmark.__init__(self, Lower=Lower, Upper=Upper) + self.fnum = fnum + self.run_fun = run_fun + + def function(self): return lambda d, x: self.run_fun(asarray(x).ctypes.data_as(ctypes.POINTER(ctypes.c_double)), ctypes.c_int(d), ctypes.c_int(self.fnum)) class MaxMB(MinMB): - def function(self): - f = MinMB.function(self) - def e(D, sol): return -f(D, sol) - return e + def function(self): + f = MinMB.function(self) + return lambda d, x: -f(d, x) cdimsOne = [2, 10, 30, 50] cdimsTwo = [2, 5, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100] cdimsThree = [10, 30, 50, 100] cdimsFour = [10, 30] +cdimsFive = [9, 10, 16, 18] def getCecBench(cec, d): - if cec == 5: - sys.path.append('cec2005') - from cec2005 import run_fun - if d not in cdimsOne: raise Exception('Dimension sould be in %s' % (cdimsOne)) - elif cec == 8: - sys.path.append('cec2008') - from cec2008 import run_fun - elif cec == 13: - sys.path.append('cec2013') - from cec2013 import run_fun - if d not in cdimsTwo: raise Exception('Dimension sould be in %s' % (cdimsTwo)) - elif cec == 14: - sys.path.append('cec2014') - from cec2014 import run_fun - if d not in cdimsThree: raise Exception('Dimension sould be in %s' % (cdimsThree)) - elif cec == 15: - sys.path.append('cec2015') - from cec2015 import run_fun - if d not in cdimsFour: raise Exception('Dimension sould be in %s' % (cdimsFour)) - elif cec == 16: - sys.path.append('cec2016') - from cec2016 import run_fun - if d not in cdimsOne: raise Exception('Dimension sould be in %s' % (cdimsOne)) - elif cec == 17: - sys.path.append('cec2017') - from cec2017 import run_fun - if d not in cdimsThree: raise Exception('Dimension sould be in %s' % (cdimsThree)) - elif cec == 18: - sys.path.append('cec2018') - from cec2018 import run_fun - if d not in cdimsThree: raise Exception('Dimension sould be in %s' % (cdimsThree)) - elif cec == 19: - sys.path.append('cec2019') - from cec2019 import run_fun - if d not in cdimsThree: raise Exception('Dimension sould be in %s' % (cdimsThree)) - return run_fun + if cec == 5: + sys.path.append('cec2005') + from cec2005 import run_fun + if d not in cdimsOne: raise Exception('Dimension sould be in %s' % (cdimsOne)) + elif cec == 8: + sys.path.append('cec2008') + from cec2008 import run_fun + elif cec == 13: + sys.path.append('cec2013') + from cec2013 import run_fun + if d not in cdimsTwo: raise Exception('Dimension sould be in %s' % (cdimsTwo)) + elif cec == 14: + sys.path.append('cec2014') + from cec2014 import run_fun + if d not in cdimsThree: raise Exception('Dimension sould be in %s' % (cdimsThree)) + elif cec == 15: + sys.path.append('cec2015') + from cec2015 import run_fun + if d not in cdimsFour: raise Exception('Dimension sould be in %s' % (cdimsFour)) + elif cec == 16: + sys.path.append('cec2016') + from cec2016 import run_fun + if d not in cdimsOne: raise Exception('Dimension sould be in %s' % (cdimsOne)) + elif cec == 17: + sys.path.append('cec2017') + from cec2017 import run_fun + if d not in cdimsThree: raise Exception('Dimension sould be in %s' % (cdimsThree)) + elif cec == 18: + sys.path.append('cec2018') + from cec2018 import run_fun + if d not in cdimsThree: raise Exception('Dimension sould be in %s' % (cdimsThree)) + elif cec == 19: + sys.path.append('cec2019') + from cec2019 import run_fun + if d not in cdimsFive: raise Exception('Dimension sould be in %s' % (cdimsThree)) + return run_fun def getMaxFES(cec): - if cec == 8: return 5000 - if cec in [5, 13, 14, 15, 17, 18]: return 10000 - else: return 10000 + if cec == 8: return 5000 + elif cec in [5, 13, 14, 15, 17, 18]: return 10000 + elif cec == 19: return inf + else: return 10000 def simple_example(alg, cec, fnum=1, runs=10, D=10, nFES=50000, nGEN=5000, seed=[None], optType=OptimizationType.MINIMIZATION, optFunc=MinMB, wout=False, sr=[-100, 100], **kwu): - bests, func = list(), getCecBench(cec, D) - for i in range(runs): - task = Task(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc(func, sr[0], sr[1], fnum)) - algo = alg(seed=seed[i % len(seed)], task=task) - best = algo.run() - logger.info('%s %s' % (best[0], best[1])) - bests.append(best) - if wout: - bpos, bval = asarray([x[0] for x in bests]), asarray([x[1] for x in bests]) - savetxt('%s_%d_%d_p' % (algo.Name[-1], fnum, D), bpos) - savetxt('%s_%d_%d_v' % (algo.Name[-1], fnum, D), bval) - -def logging_example(alg, cec, fnum=1, D=10, nFES=50000, nGEN=5000, seed=[None], optType=OptimizationType.MINIMIZATION, optFunc=MinMB, wout=False, sr=[-100, 100], **kwu): - func = getCecBench(cec, D) - task = TaskConvPrint(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc(func, sr[0], sr[1], fnum)) - algo = alg(seed=seed[0], task=task) - best = algo.run() - logger.info('%s %s' % (best[0], best[1])) + bests, func = list(), getCecBench(cec, D) + for i in range(runs): + task = StoppingTask(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc(func, sr[0], sr[1], fnum)) + algo = alg(seed=seed[i % len(seed)]) + best = algo.run(task) + logger.info('%s %s' % (best[0], best[1])) + bests.append(best) + if wout: + bpos, bval = asarray([x[0] for x in bests]), asarray([x[1] for x in bests]) + savetxt('%s_%d_%d_p' % (algo.Name[-1], fnum, D), bpos) + savetxt('%s_%d_%d_v' % (algo.Name[-1], fnum, D), bval) + +def logging_example(alg, cec, fnum=1, D=10, nFES=50000, nGEN=5000, seed=[None], optType=OptimizationType.MINIMIZATION, optFunc=MinMB, wout=False, sr=[-8192, 8192], **kwu): + func = getCecBench(cec, D) + task = StoppingTask(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc(func, sr[0], sr[1], fnum), logger=True) + algo = alg(seed=seed[0], NP=100, vMin=-16000, vMax=16000, w=0.5) + best = algo.run(task) + logger.info('%s %s' % (best[0], best[1])) + +def save_example(alg, cec, fnum=1, runs=10, D=10, nFES=50000, nGEN=5000, seed=[None], optType=OptimizationType.MINIMIZATION, optFunc=MinMB, wout=True, sr=[-100, 100], **kwu): + bests, conv_it, conv_f, func = list(), list(), list(), getCecBench(cec, D) + for i in range(runs): + task = StoppingTask(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc(func, sr[0], sr[1], fnum)) + algo = alg(seed=seed[i % len(seed)]) + best = algo.run(task) + logger.info('%s %s' % (best[0], best[1])) + bests.append(best) + conv_it.append(task.evals) + conv_f.append(task.x_f_vals) + if wout: + bpos, bval = asarray([x[0] for x in bests]), asarray([x[1] for x in bests]) + savetxt('%s_%d_%d_p' % (algo.Name[-1], fnum, D), bpos) + savetxt('%s_%d_%d_v' % (algo.Name[-1], fnum, D), bval) + inds = [] + for i in range(runs): inds.append('evals'), inds.append('funvl') + data = [] + for i in range(runs): data.append(conv_it[i]), data.append(conv_f[i]) + pd.DataFrame(data, index=inds).T.to_csv('%s_%d_%d.csv' % (algo.Name[-1], fnum, D), sep=',', index=False) def plot_example(alg, cec, fnum=1, D=10, nFES=50000, nGEN=5000, seed=[None], optType=OptimizationType.MINIMIZATION, optFunc=MinMB, wout=False, sr=[-100, 100], **kwu): - func = getCecBench(cec, D) - task = TaskConvPlot(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc(func, sr[0], sr[1], fnum)) - algo = alg(seed=seed[0], task=task) - best = algo.run() - logger.info('%s %s' % (best[0], best[1])) - input('Press [enter] to continue') + func = getCecBench(cec, D) + task = TaskConvPlot(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc(func, sr[0], sr[1], fnum)) + algo = alg(seed=seed[0]) + best = algo.run(task) + logger.info('%s %s' % (best[0], best[1])) + input('Press [enter] to continue') def getOptType(otype): - if otype == OptimizationType.MINIMIZATION: return MinMB - elif otype == OptimizationType.MAXIMIZATION: return MaxMB - else: return None + if otype == OptimizationType.MINIMIZATION: return MinMB + elif otype == OptimizationType.MAXIMIZATION: return MaxMB + else: return None if __name__ == '__main__': - pargs = getDictArgs(sys.argv[1:]) - pargs['nFES'] = round(pargs['D'] * getMaxFES(pargs['cec']) * pargs['reduc']) - algo = Runner.getAlgorithm(pargs['algo']) - optFunc = getOptType(pargs['optType']) - if not pargs['runType']: simple_example(algo, optFunc=optFunc, **pargs) - elif pargs['runType'] == 'log': logging_example(algo, optFunc=optFunc, **pargs) - elif pargs['runType'] == 'plot': plot_example(algo, optFunc=optFunc, **pargs) - else: simple_example(algo, optFunc=optFunc, **pargs) + pargs = getDictArgs(sys.argv[1:]) + fes = getMaxFES(pargs['cec']) if not inf else sys.maxsize + pargs['nFES'] = round(pargs['D'] * fes * pargs['reduc']) + algUtl = AlgorithmUtility() + algo = algUtl.get_algorithm(pargs['algo']) + optFunc = getOptType(pargs['optType']) + if not pargs['runType']: simple_example(algo, optFunc=optFunc, **pargs) + elif pargs['runType'] == 'log': logging_example(algo, optFunc=optFunc, **pargs) + elif pargs['runType'] == 'plot': plot_example(algo, optFunc=optFunc, **pargs) + elif pargs['runType'] == 'save': save_example(algo, optFunc=optFunc, **pargs) + else: simple_example(algo, optFunc=optFunc, **pargs) # vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3 diff --git a/CECC/Makefile b/CECC/Makefile new file mode 100644 index 0000000..f39cb5c --- /dev/null +++ b/CECC/Makefile @@ -0,0 +1,5 @@ +install: Pipfile + pipenv install + +clean: Pipfile.lock + pipenv --rm diff --git a/CECC/Pipfile b/CECC/Pipfile new file mode 100644 index 0000000..c72915b --- /dev/null +++ b/CECC/Pipfile @@ -0,0 +1,14 @@ +[[source]] +url = "https://pypi.python.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +pipfile = "*" +numpy = ">=1.16.3" +scipy = ">=1.2.1" +matplotlib = ">=3.0.3" +pandas = ">=0.24.2" +pydot = ">=1.4.1" +graphviz = ">=0.10.1" +Cython = ">=0.29.7" diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..8df99ab --- /dev/null +++ b/Makefile @@ -0,0 +1,14 @@ +MAKE:=make + +install: Pipfile + pipenv install + +lab: + pipenv run jupyter lab + +shell: + pipenv shell + +uninstall: + pipenv --rm + -rm Pipfile.lock diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..1b26139 --- /dev/null +++ b/Pipfile @@ -0,0 +1,20 @@ +[[source]] +url = "https://pypi.python.org/simple" +verify_ssl = true +name = "pypi" + +[requires] +python_version = '3.7' + +[packages] +pipfile = "*" +numpy = ">=1.16.3" +scipy = ">=1.2.1" +matplotlib = ">=3.0.3" +pandas = ">=0.21.0" +pydot = ">=1.4.1" +graphviz = ">=0.10.1" +XlsxWriter = ">=1.2.6" +scikit-learn = ">=0.21.3" +jupyterlab = "*" +niapy = {path = "../NiaPy/dist/NiaPy-2.0.0rc10-py3-none-any.whl"} diff --git a/README.md b/README.md index f5cd5be..04fcf25 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,14 @@ # NiaPy examples +

NiaPy

+ This repository contains various examples of usage of the [NiaPy micro-framework](https://github.com/NiaOrg/NiaPy). Each example is placed in a separate folder inside this repository with its own instructions on how to run it. Current list of examples: -- [Optimization of KNN parameters](https://github.com/NiaOrg/NiaPy-examples/tree/master/optimize_KNN_parameters) -- [CEC Competitions on Real-Parameter Single Objective Optimization](https://github.com/NiaOrg/NiaPy-examples/tree/master/CEC) -- [CEC Competitions on Constrained Real Parameter single objective optimization](https://github.com/NiaOrg/NiaPy-examples/tree/master/CECC) +- [Optimization of KNN parameters](optimize_KNN_parameters) +- [CEC Competitions on Real-Parameter Single Objective Optimization](CEC) +- [CEC Competitions on Constrained Real Parameter single objective optimization](CECC) +- [Clustering optimization](clustering_datasets) ## Contributing diff --git a/alogorithm_statistical_comparison/Makefile b/alogorithm_statistical_comparison/Makefile new file mode 100644 index 0000000..688f3ff --- /dev/null +++ b/alogorithm_statistical_comparison/Makefile @@ -0,0 +1,33 @@ +kernelName:=rosispy + +install: Pipfile + pipenv install + pipenv run jupyter labextension install @jupyter-widgets/jupyterlab-manager + pipenv run jupyter labextension install jupyter-matplotlib + pipenv run jupyter labextension install @jupyterlab/toc + pipenv run jupyter labextension install @oriolmirosa/jupyterlab_materialdarker + pipenv run jt -t onedork -lineh 110 -nfs 10 -fs 10 -tfs 10 -cellw 80% + +kernel: install + pipenv run python -m ipykernel install --user --name=$(kernelName) + +rKernel: + jupyter kernelspec remove -f $(kernelName) + +run: + pipenv run jupyter lab + +lab: + pipenv run jupyter lab + +notebook: + pipenv run jupyter notebook + +console: + pipenv run ipython + +clean: Pipfile.lock + pipenv --rm + rm Pipfile.lock + +all: install kernel run diff --git a/alogorithm_statistical_comparison/Pipfile b/alogorithm_statistical_comparison/Pipfile new file mode 100644 index 0000000..6f25b60 --- /dev/null +++ b/alogorithm_statistical_comparison/Pipfile @@ -0,0 +1,18 @@ +[[source]] +url = "https://pypi.python.org/simple" +verify_ssl = true +name = "pypi" + +[requires] +python_version = '3.7' + +[packages] +pipfile = "*" +jupyterlab = ">=0.35.5" +ipykernel = ">=5.1.0" +numpy = ">=1.16.3" +scipy = ">=1.2.1" +matplotlib = ">=3.0.3" +pandas = ">=0.24.2" +ipympl = ">=0.2.1" +jupyterthemes = ">=0.20.0" diff --git a/alogorithm_statistical_comparison/README.md b/alogorithm_statistical_comparison/README.md new file mode 100644 index 0000000..d90c9f4 --- /dev/null +++ b/alogorithm_statistical_comparison/README.md @@ -0,0 +1,6 @@ +# Install and run +1. Install [Nodejs](https://nodejs.org/) +2. Install [NPM](https://www.npmjs.com/) +3. Run `make install` for creating environmen +4. Install [NiaPy](https://github.com/NiaOrg/NiaPy) onto created python environment +5. Run `make run` for runing the jupyter lab diff --git a/alogorithm_statistical_comparison/data_analyze.ipynb b/alogorithm_statistical_comparison/data_analyze.ipynb new file mode 100644 index 0000000..fc5cbf0 --- /dev/null +++ b/alogorithm_statistical_comparison/data_analyze.ipynb @@ -0,0 +1,290 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "from typing import List, Tuple\n", + "import pandas as pd\n", + "pd.set_option('display.max_columns', 100)\n", + "pd.set_option('display.max_rows', 100)\n", + "import numpy as np\n", + "np.set_printoptions(threshold=sys.maxsize)\n", + "%matplotlib widget\n", + "import matplotlib.pyplot as plt\n", + "from matplotlib.ticker import (AutoMinorLocator, MultipleLocator)\n", + "import scipy.stats as stats" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Nalaganje podatkov" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pso_df = pd.DataFrame(np.asarray([np.loadtxt('PSO/PSO_%d_10_v' % fnum) - fnum * 100 for fnum in range(1, 31)]).T)\n", + "mkev3_df = pd.DataFrame(np.asarray([np.loadtxt('MKEv3/MKEv3_%d_10_v' % fnum) - fnum * 100 for fnum in range(1, 31)]).T)\n", + "nmm_df = pd.DataFrame(np.asarray([np.loadtxt('NMM/NMM_%d_10_v' % fnum) - fnum * 100 for fnum in range(1, 31)]).T)\n", + "es11_df = pd.DataFrame(np.asarray([np.loadtxt('ES(1+1)/ES(1+1)_%d_10_v' % fnum) - fnum * 100 for fnum in range(1, 31)]).T)\n", + "de_df = pd.DataFrame(np.asarray([np.loadtxt('DE/DE_%d_10_v' % fnum) - fnum * 100 for fnum in range(1, 31)]).T)\n", + "jde_df = pd.DataFrame(np.asarray([np.loadtxt('jDE/jDE_%d_10_v' % fnum) - fnum * 100 for fnum in range(1, 31)]).T)\n", + "aco_df = pd.DataFrame(np.asarray([np.loadtxt('ACO/ACO_%d_10_v' % fnum) - fnum * 100 for fnum in range(1, 31)]).T)\n", + "cso_df = pd.DataFrame(np.asarray([np.loadtxt('CSO/CSO_%d_10_v' % fnum) - fnum * 100 for fnum in range(1, 31)]).T)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Predalava podtkov" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Uporaba povprečja" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "de_data = de_df.describe().iloc[1, :].values.flatten()\n", + "jde_data = jde_df.describe().iloc[1, :].values.flatten()\n", + "pso_data = pso_df.describe().iloc[1, :].values.flatten()\n", + "mkev3_data = mkev3_df.describe().iloc[1, :].values.flatten()\n", + "nmm_data = nmm_df.describe().iloc[1, :].values.flatten()\n", + "es11_data = es11_df.describe().iloc[1, :].values.flatten()\n", + "aco_data = aco_df.describe().iloc[1, :].values.flatten()\n", + "cso_data = cso_df.describe().iloc[1, :].values.flatten()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Uporaba mediane" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "de_data = de_df.describe().iloc[5, :].values.flatten()\n", + "jde_data = jde_df.describe().iloc[5, :].values.flatten()\n", + "pso_data = pso_df.describe().iloc[5, :].values.flatten()\n", + "mkev3_data = mkev3_df.describe().iloc[5, :].values.flatten()\n", + "nmm_data = nmm_df.describe().iloc[5, :].values.flatten()\n", + "es11_data = es11_df.describe().iloc[5, :].values.flatten()\n", + "aco_data = aco_df.describe().iloc[5, :].values.flatten()\n", + "cso_data = cso_df.describe().iloc[5, :].values.flatten()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Uporaba vseh podatkov" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "de_data = de_df.values.T.flatten()\n", + "jde_data = jde_df.values.T.flatten()\n", + "pso_data = pso_df.values.T.flatten()\n", + "mkev3_data = mkev3_df.values.T.flatten()\n", + "nmm_data = nmm_df.values.T.flatten()\n", + "es11_data = es11_df.values.T.flatten()\n", + "aco_data = aco_df.values.T.flatten()\n", + "cso_data = cso_df.values.T.flatten()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Uporaba mediane, povprečja, standardnega odklona, maksimalne in minimalne vrednosti" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "de_data = de_df.describe().iloc[[1, 2, 3, 5, 7], :].T.values.flatten()\n", + "jde_data = jde_df.describe().iloc[[1, 2, 3, 5, 7], :].T.values.flatten()\n", + "pso_data = pso_df.describe().iloc[[1, 2, 3, 5, 7], :].T.values.flatten()\n", + "mkev3_data = mkev3_df.describe().iloc[[1, 2, 3, 5, 7], :].T.values.flatten()\n", + "nmm_data = nmm_df.describe().iloc[[1, 2, 3, 5, 7], :].T.values.flatten()\n", + "es11_data = es11_df.describe().iloc[[1, 2, 3, 5, 7], :].T.values.flatten()\n", + "aco_data = aco_df.describe().iloc[[1, 2, 3, 5, 7], :].T.values.flatten()\n", + "cso_data = cso_df.describe().iloc[[1, 2, 3, 5, 7], :].T.values.flatten()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Končan priprava podatkov" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# algs_data = [de_data, jde_data, pso_data, mkev3_data, nmm_data, es11_data, aco_data, cso_data]\n", + "# algs_labels = ['DE', 'jDE', 'PSO', 'MKEv3', 'NMM', 'ES(1+1)', 'ACO', 'CSO']\n", + "algs_data = [pso_data, mkev3_data, nmm_data, es11_data, aco_data, cso_data]\n", + "algs_labels = ['PSO', 'MKEv3', 'NMM', 'ES(1+1)', 'ACO', 'CSO']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Statistični testi" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Friedman test" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Kriticne razdalje za friedman test\n", + "$$\n", + "CD = q_{\\alpha} \\sqrt{\\frac{k(k + 1)}{6N}}\n", + "$$\n", + ", kjer:\n", + "* $k \\mapsto$ Stevilo algoritmov\n", + "* $N \\mapsto$ Stevilo problemov\n", + "* $q_{\\alpha} \\mapsto$ Kriticna vrednost" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def wilcoxonSignedRanks(a:np.ndarray, b:np.ndarray) -> Tuple[float, float, float]:\n", + " r\"\"\"Get rank values from signed wilcoxon test.\n", + " \n", + " Args:\n", + "\t a: First data.\n", + "\t b: Second data.\n", + "\t \n", + " Returns:\n", + "\t 1. Positive ranks.\n", + "\t 2. Negative ranks.\n", + "\t 3. T value\n", + " \"\"\"\n", + " y = a - b\n", + " y_diff = y[y != 0]\n", + " r = stats.rankdata(np.abs(y_diff))\n", + " r_all = np.sum(r) / 2\n", + " r_p, r_n = r_all + np.sum(r[np.where(y_diff > 0)]) , r_all + np.sum(r[np.where(y_diff < 0)])\n", + " return r_p, r_n, np.min([r_p, r_n])\n", + "\n", + "def friedmanRanks(*arrs:List[np.ndarray]) -> np.array:\n", + " r = np.asarray([stats.rankdata([arrs[j][i] for j in range(len(arrs))]) for i in range(len(arrs[0]))])\n", + " return np.asarray([np.sum(r[:, i]) / len(arrs[0]) for i in range(len(arrs))])\n", + "\n", + "def cd(alpha:float, k:float, n:float) -> float:\n", + " r\"\"\"Get critial distance for friedman test.\n", + " \n", + " Args:\n", + " alpha: Fold value.\n", + " k: Number of algorithms.\n", + " n: Number of algorithm results.\n", + " \"\"\"\n", + " nemenyi_df = pd.read_csv('nemenyi.csv')\n", + " q_a = nemenyi_df['%.2f' % alpha][nemenyi_df['k'] == k].values\n", + " return q_a[0] * np.sqrt((k * (k + 1)) / (6 * n))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cd_h = cd(0.01, len(algs_data), len(algs_data[0])) / 2.0\n", + "r = friedmanRanks(*algs_data)\n", + "f, a = plt.subplots(figsize=(6, 4))\n", + "a.boxplot([(e - cd_h, e, e + cd_h) for e in r], labels=algs_labels, widths=.15)\n", + "a.xaxis.set_minor_locator(AutoMinorLocator(7)); a.yaxis.set_minor_locator(AutoMinorLocator(7))\n", + "a.grid(which='both'); a.grid(which='minor', alpha=0.2, linestyle=':'); a.grid(which='major', alpha=0.5, linestyle='--')\n", + "a.set_ylabel('Average rank'); a.set_xlabel('Algorithm')\n", + "f.tight_layout()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pw_data = pd.DataFrame(np.asarray([[stats.wilcoxon(algs_data[j], algs_data[i])[1] if j != i else 1 for i in range(len(algs_data))] for j in range(len(algs_data))]), index=algs_labels, columns=algs_labels)\n", + "pw_data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pw_data.to_csv('res_data.csv')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/alogorithm_statistical_comparison/nemenyi.csv b/alogorithm_statistical_comparison/nemenyi.csv new file mode 100644 index 0000000..61a9750 --- /dev/null +++ b/alogorithm_statistical_comparison/nemenyi.csv @@ -0,0 +1,100 @@ +k,0.01,0.05,0.10 +2,2.575829491,1.959964233,1.64485341 +3,2.913494192,2.343700476,2.05229258 +4,3.113250443,2.569032073,2.291341341 +5,3.254685942,2.727774717,2.459516082 +6,3.363740192,2.849705382,2.588520643 +7,3.452212685,2.948319908,2.692731919 +8,3.526470918,3.030878867,2.779883537 +9,3.590338924,3.10173026,2.854606339 +10,3.646291577,3.16368342,2.919888558 +11,3.696020982,3.218653901,2.977768077 +12,3.740733465,3.268003591,3.029694463 +13,3.781318566,3.312738701,3.076733328 +14,3.818450865,3.353617959,3.1196936 +15,3.852654327,3.391230382,3.159198949 +16,3.884343317,3.426041249,3.195743642 +17,3.913850176,3.458424619,3.229723658 +18,3.941446432,3.488684546,3.261461439 +19,3.967356946,3.517072762,3.29122427 +20,3.991769808,3.543799277,3.31923277 +21,4.014841995,3.569040161,3.345675735 +22,4.036709272,3.592946027,3.370711558 +23,4.057487605,3.615646276,3.39447671 +24,4.077275281,3.637252631,3.417089277 +25,4.096160689,3.657860551,3.438651085 +26,4.114219489,3.677556303,3.459252641 +27,4.131518856,3.696413427,3.478971727 +28,4.148118188,3.71449839,3.497877641 +29,4.164069103,3.731869175,3.516032608 +30,4.179419684,3.748578108,3.533492489 +31,4.194212358,3.764671858,3.550305367 +32,4.208483894,3.780192852,3.566516497 +33,4.222268941,3.795178566,3.58216477 +34,4.235598611,3.809663649,3.597287662 +35,4.248501188,3.823679212,3.611916995 +36,4.261002129,3.837254248,3.626083879 +37,4.273124768,3.850413505,3.639814478 +38,4.284891024,3.863181025,3.653134249 +39,4.296319991,3.875578729,3.666065818 +40,4.307430053,3.887627121,3.678630398 +41,4.31823818,3.899344587,3.690847789 +42,4.328759929,3.910747391,3.702736375 +43,4.339009442,3.921852503,3.714311713 +44,4.348999447,3.932673359,3.725589359 +45,4.358743378,3.943224099,3.736584163 +46,4.368251843,3.953518159,3.747309558 +47,4.377536155,3.963566147,3.757777567 +48,4.386605506,3.973379375,3.767999503 +49,4.395470504,3.98296845,3.777987386 +50,4.404138926,3.992343271,3.787749702 +51,4.412619258,4.001512325,3.797297058 +52,4.42091857,4.010484803,3.806637939 +53,4.429046055,4.019267776,3.815781537 +54,4.437006664,4.02786973,3.824734923 +55,4.444807466,4.036297029,3.833505168 +56,4.452454825,4.044556036,3.842100758 +57,4.4599544,4.05265453,3.850526642 +58,4.467311139,4.060596753,3.858790599 +59,4.474529992,4.068389777,3.866897579 +60,4.481617323,4.076037844,3.874853237 +61,4.488575961,4.083547318,3.882663231 +62,4.495411562,4.090921028,3.890333219 +63,4.50212837,4.098166044,3.897866734 +64,4.508729212,4.105284488,3.905268728 +65,4.51521833,4.112282016,3.912543443 +66,4.521599969,4.119161458,3.919695828 +67,4.527876956,4.125927056,3.926729419 +68,4.53405212,4.132582345,3.933647045 +69,4.540129702,4.139131568,3.940452947 +70,4.546111826,4.145576139,3.94715137 +71,4.552002025,4.151921008,3.953744433 +72,4.557802422,4.158168297,3.960235674 +73,4.563515138,4.164320833,3.966628626 +74,4.569143708,4.170380738,3.972924705 +75,4.574690253,4.176352255,3.979128153 +76,4.580156896,4.182236797,3.985240384 +77,4.585545757,4.188036487,3.991264934 +78,4.590859664,4.19375486,3.997203923 +79,4.596099325,4.199392622,4.003058768 +80,4.601267569,4.204952603,4.008833001 +81,4.606365809,4.21043763,4.01452804 +82,4.611396874,4.215848411,4.02014671 +83,4.616481678,4.221187067,4.02568972 +84,4.621261013,4.22645572,4.031159898 +85,4.626098331,4.23165649,4.036558658 +86,4.63087413,4.236790793,4.041887415 +87,4.635590532,4.241859334,4.047148997 +88,4.64024683,4.246864943,4.052344817 +89,4.644847267,4.251809034,4.057475584 +90,4.649391842,4.256692313,4.062543418 +91,4.65388197,4.261516196,4.067549734 +92,4.658319065,4.266282802,4.072495239 +93,4.662703834,4.270992841,4.077382761 +94,4.667037692,4.275648432,4.082213008 +95,4.671322759,4.280249575,4.086986686 +96,4.675558329,4.284798393,4.091705209 +97,4.679746522,4.289294885,4.0963707 +98,4.683888754,4.29374188,4.100983157 +99,4.687985023,4.298139377,4.105544703 +100,4.692036745,4.302488791,4.110055337 diff --git a/clustering_datasets/Clustering.ipynb b/clustering_datasets/Clustering.ipynb new file mode 100644 index 0000000..f1c59c7 --- /dev/null +++ b/clustering_datasets/Clustering.ipynb @@ -0,0 +1,504 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "from typing import Dict, Tuple, Any\n", + "# Librarys\n", + "import numpy as np\n", + "import pandas as pd\n", + "from sklearn.datasets import load_iris, load_wine, load_breast_cancer, make_blobs\n", + "from sklearn.cluster import KMeans\n", + "from sklearn.preprocessing import LabelEncoder\n", + "from sklearn.model_selection import train_test_split\n", + "%matplotlib widget\n", + "import matplotlib.pyplot as plt\n", + "# Theme for matplotlib and \n", + "from jupyterthemes import jtplot\n", + "jtplot.style()\n", + "# NiaPy\n", + "from NiaPy.algorithms.basic import ParticleSwarmOptimization, ComprehensiveLearningParticleSwarmOptimizer, OppositionVelocityClampingParticleSwarmOptimization, DifferentialEvolution, BatAlgorithm, BlackHole\n", + "from NiaPy.algorithms.modified import SelfAdaptiveDifferentialEvolution\n", + "from NiaPy.benchmarks import Clustering, ClusteringMin, ClusteringMinPenalty, ClusteringClassification\n", + "from NiaPy.util import StoppingTask, groupdatabylabel, classifie, clusters2labels\n", + "# Output options\n", + "np.set_printoptions(threshold=sys.maxsize)\n", + "np.set_printoptions(formatter={'float': '{: 0.3f}'.format})\n", + "pd.set_option('display.max_columns', 100)\n", + "pd.set_option('display.max_rows', 100)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def grupdata(data: np.ndarray, C: np.ndarray) -> np.ndarray:\n", + " G = [[] for _ in range(C.shape[0])]\n", + " for e in data: G[np.argmin([np.sqrt(np.sum((e - C[i]) ** 2)) for i in range(C.shape[0])])].append(e)\n", + " return np.asarray([np.asarray(e) for e in G])\n", + "\n", + "def labeltransform(l): return LabelEncoder().fit(l)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "toc-hr-collapsed": true + }, + "source": [ + "# Algorithms" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## KMeans" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def kmeans(data, C, benchmark, nFES):\n", + " print ('KMeans')\n", + " kmeans = KMeans(n_clusters=noc, init='random', n_init=100, max_iter=nFES, random_state=None, algorithm='full').fit(data)\n", + " C['km'] = kmeans.cluster_centers_\n", + " print('Min val: %f' % benchmark.function()(C['km'].flatten()))\n", + " print (C['km'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Differetial evolution clustering" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def de(data, C, benchmark, nFES):\n", + " algo = DifferentialEvolution()\n", + " print (algo.Name[0])\n", + " task = StoppingTask(D=noc * len(data[0]), nFES=nFES, benchmark=benchmark)\n", + " res = algo.runTask(task)\n", + " print('Min val: %f' % res[1])\n", + " C['de'] = res[0].reshape((noc, len(data[0])))\n", + " print (C['de'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Self Adaptive Differential Evolution clustering" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def jde(data, C, benchmark, nFES):\n", + " algo = SelfAdaptiveDifferentialEvolution()\n", + " print (algo.Name[0])\n", + " task = StoppingTask(D=noc * len(data[0]), nFES=nFES, benchmark=benchmark)\n", + " res = algo.runTask(task)\n", + " print('Min val: %f' % res[1])\n", + " C['jde'] = res[0].reshape((noc, len(data[0])))\n", + " print (C['jde'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Particle Swarm Optimization clustering" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def pso(data, C, benchmark, nFES):\n", + " algo = ParticleSwarmOptimization()\n", + " print (algo.Name[0])\n", + " task = StoppingTask(D=noc * len(data[0]), nFES=nFES, benchmark=benchmark)\n", + " res = algo.runTask(task)\n", + " print('Min val: %f' % res[1])\n", + " C['pso'] = res[0].reshape((noc, len(data[0])))\n", + " print (C['pso'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Comprehensive Learning Particle Swarm Optimizer clustering" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def clpso(data, C, benchmark, nFES):\n", + " algo = ComprehensiveLearningParticleSwarmOptimizer()\n", + " print (algo.Name[0])\n", + " task = StoppingTask(D=noc * len(data[0]), nFES=nFES, benchmark=benchmark)\n", + " res = algo.runTask(task)\n", + " print('Min val: %f' % res[1])\n", + " C['clpso'] = res[0].reshape((noc, len(data[0])))\n", + " print (C['clpso'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Opposition Velocity Clamping Particle Swarm Optimization clustering" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def ovcpso(data, C, benchmark, nFES):\n", + " algo = OppositionVelocityClampingParticleSwarmOptimization()\n", + " print (algo.Name[0])\n", + " task = StoppingTask(D=noc * len(data[0]), nFES=nFES, benchmark=benchmark)\n", + " res = algo.runTask(task)\n", + " print('Min val: %f' % res[1])\n", + " C['ovcpso'] = res[0].reshape((noc, len(data[0])))\n", + " print (C['ovcpso'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Bat Algorithm clustering" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def ba(data, C, benchmark, nFES):\n", + " algo = BatAlgorithm()\n", + " print (algo.Name[0])\n", + " task = StoppingTask(D=noc * len(data[0]), nFES=nFES, benchmark=benchmark)\n", + " res = algo.runTask(task)\n", + " print('Min val: %f' % res[1])\n", + " C['ba'] = res[0].reshape((noc, len(data[0])))\n", + " print (C['ba'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Black Hole clustering" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def bh(data, C, benchmark, nFES):\n", + " algo = BlackHole()\n", + " print (algo.Name[0])\n", + " task = StoppingTask(D=noc * len(data[0]), nFES=nFES, benchmark=benchmark)\n", + " res = algo.runTask(task)\n", + " print('Min val: %f' % res[1])\n", + " C['bh'] = res[0].reshape((noc, len(data[0])))\n", + " print (C['bh'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Data preparation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Generated data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "nof, noc = 9, 4\n", + "data, labels = make_blobs(n_samples=500, n_features=nof, centers=noc, random_state=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Iris" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "data, labels = load_iris(True)\n", + "noc = len(np.unique(labels))\n", + "C_best = {\n", + " 'report_bh': np.asarray([[6.73305, 3.06805, 5.62938, 2.10908], [5.01186, 3.40303, 1.47143, 0.23532], [5.93229, 2.79775, 4.41857, 1.41608]]),\n", + " 'bh': np.asarray([[5.650, 2.878, 5.605, 1.298], [5.048, 3.426, 1.347, 0.397], [6.082, 2.698, 4.174, 0.834]]),\n", + " 'clpso': np.asarray([[5.793, 3.636, 1.792, 0.813], [5.146, 2.828, 4.051, 0.539], [4.603, 2.588, 5.491, 1.561]])\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## The Cancer Wisconsin Dataset" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "data, labels = load_breast_cancer(True)\n", + "noc = len(np.unique(labels))\n", + "C_best = {\n", + " 'jde': np.asarray([[17.654, 9.710, 172.041, 1157.277, 0.053, 0.303, 0.000, 0.184, 0.177, 0.075, 1.617, 1.369, 0.847, 44.476, 0.010, 0.042, 0.318, 0.000, 0.021, 0.001, 14.003, 15.286, 92.127, 1787.341, 0.201, 0.265, 0.159, 0.191, 0.289, 0.206], [6.981, 11.369, 43.790, 1430.514, 0.130, 0.168, 0.315, 0.201, 0.254, 0.091, 1.317, 3.862, 9.089, 138.556, 0.010, 0.129, 0.053, 0.021, 0.062, 0.025, 7.930, 49.540, 190.337, 185.200, 0.160, 0.653, 0.625, 0.000, 0.247, 0.146]]),\n", + " 'bh': np.asarray([[14.366, 23.217, 71.421, 1229.990, 0.123, 0.237, 0.140, 0.109, 0.242, 0.076, 0.890, 2.616, 6.428, 107.904, 0.021, 0.063, 0.266, 0.017, 0.049, 0.009, 19.898, 30.594, 176.826, 1727.838, 0.117, 0.596, 0.755, 0.101, 0.538, 0.144], [13.153, 25.890, 153.234, 1569.438, 0.109, 0.115, 0.140, 0.084, 0.166, 0.081, 2.177, 3.526, 7.352, 338.804, 0.014, 0.054, 0.280, 0.041, 0.058, 0.015, 10.863, 41.038, 138.735, 1286.425, 0.113, 0.304, 0.565, 0.175, 0.297, 0.099]])\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Wine" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "data, labels = load_wine(True)\n", + "noc = len(np.unique(labels))\n", + "C_best = {\n", + " 'report_bh': np.asarray([[12.87096, 2.11606, 2.39431, 19.46178, 98.84497, 2.03580, 1.44765, 0.43320, 1.49193, 5.36444, 0.88652, 2.12046, 686.93205], [12.63469, 2.44139, 2.37083, 21.26462, 92.39332, 2.12789, 1.58430, 0.40206, 1.13521, 4.83774, 0.81497, 2.71348, 463.69590], [13.31401, 2.26752, 2.56857, 17.34232, 105.03031, 2.82361, 3.24277, 0.28947, 2.67352, 5.20622, 1.03286, 3.38781, 1137.44167]]),\n", + " 'jde': np.asarray([[14.472, 1.100, 2.389, 21.122, 121.500, 2.752, 3.342, 0.130, 2.343, 5.531, 1.104, 1.970, 1066.214], [14.546, 4.664, 2.626, 14.452, 156.808, 2.860, 4.975, 0.288, 3.475, 3.524, 1.185, 2.614, 313.572], [14.450, 5.324, 2.324, 10.798, 115.543, 1.063, 1.835, 0.236, 2.812, 8.268, 1.657, 3.978, 731.918]])\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Glass" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df = pd.read_csv('glass.csv')\n", + "data, labels = df.iloc[:, :-1].values, df.iloc[:, -1].values\n", + "noc = len(np.unique(labels))\n", + "C_best = {\n", + " 'report_bh': np.asarray([[1.51474, 14.59500, 0.06789, 2.25305, 73.29150, 0.00937, 8.71261, 1.01385, -0.01161], [1.52117, 13.79589, 3.55131, 0.95428, 71.84335, 0.19175, 9.54099, 0.08156, 0.00710], [1.51745, 13.31326, 3.59522, 1.42358, 72.67659, 0.57686, 8.20015, -0.00741, 0.03106], [1.51326, 13.01074, -0.00358, 3.02527, 70.66960, 6.22227, 6.94351, -0.00710, -0.00041], [1.51743, 12.85016, 3.45851, 1.30894, 73.02754, 0.60704, 8.58511, 0.02745, 0.05789], [1.52095, 13.02689, 0.26652, 1.51925, 72.75985, 0.35290, 11.95589, -0.04668, 0.03072]]),\n", + " 'bh': np.asarray([[1.523, 13.305, 3.020, 2.195, 72.693, 2.201, 9.734, 0.939, 0.276], [1.518, 14.215, 3.066, 2.046, 71.789, 1.707, 10.576, 0.970, 0.231], [1.521, 12.349, 2.250, 1.126, 73.202, 1.175, 10.913, 1.403, 0.297], [1.519, 14.941, 1.919, 2.003, 72.113, 2.191, 8.563, 1.805, 0.300], [1.526, 13.190, 3.412, 1.522, 72.512, 1.194, 8.402, 0.587, 0.135], [1.529, 13.265, 2.718, 1.620, 70.936, 2.665, 11.614, 1.322, 0.219]]),\n", + " 'jde': np.asarray([[1.529, 16.186, 1.588, 2.817, 72.258, 0.000, 8.043, 0.824, 0.374], [1.532, 10.730, 0.495, 0.768, 71.836, 3.082, 8.435, 1.831, 0.504], [1.533, 16.842, 4.488, 3.237, 71.848, 5.223, 10.575, 0.154, 0.416], [1.516, 14.662, 4.253, 0.507, 72.073, 0.000, 7.095, 0.072, 0.114], [1.524, 11.501, 2.029, 1.710, 75.310, 3.203, 15.281, 1.354, 0.353], [1.512, 11.318, 3.625, 0.290, 75.101, 4.514, 5.430, 0.544, 0.085]])\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## CMC" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df = pd.read_csv('cmc.csv')\n", + "data, labels = df.iloc[:, :-1].values, df.iloc[:, -1].values\n", + "noc = len(np.unique(labels))\n", + "C_best = {\n", + " 'report_bh': np.asarray([[24.42273, 3.03421, 3.51476, 1.79348, 0.92053, 0.82924, 2.29826, 2.95830, 0.02510], [43.63258, 2.99608, 3.45429, 4.57393, 0.82686, 0.83295, 1.82888, 3.47833, 0.11822], [33.49565, 3.13181, 3.56438, 3.64850, 0.79404, 0.66550, 2.09068, 3.29362, 0.06771]]),\n", + " 'pso': np.asarray([[25.671, 2.096, 4.000, 1.963, 0.913, 0.700, 1.000, 3.505, 0.234], [46.014, 4.000, 4.000, 16.000, 0.000, 1.000, 2.017, 3.675, 1.000], [46.394, 2.021, 2.468, 15.206, 1.000, 0.000, 4.000, 3.004, 0.145]])\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Benchmark preparation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Basic" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "benchmark = Clustering(data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Min" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "benchmark = ClusteringMin(data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Min with penalty" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "benchmark = ClusteringMinPenalty(data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Clustering for classification" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "benchmark = ClusteringClassification(data, labels)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Klasifikacija" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "funcs, C, nFES = [kmeans, de, jde, pso, clpso, ovcpso, ba, bh], {}, 1000\n", + "for algo in funcs: algo(data, C, benchmark, nFES); print ()\n", + "lt = labeltransform(labels)\n", + "gl = groupdatabylabel(data, labels, lt)\n", + "for k, V in C.items():\n", + " l, ok = clusters2labels(V, gl), 0\n", + " for i, d in enumerate(data): ok += 1 if lt.inverse_transform([l[classifie(d, V)]])[0] == labels[i] else 0\n", + " print ('Sucess of %s: %f' % (k, ok / len(data)))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lt = labeltransform(labels)\n", + "gl = groupdatabylabel(data, labels, lt)\n", + "for k, V in C_best.items():\n", + " l, ok = clusters2labels(V, gl), 0\n", + " for i, d in enumerate(data): ok += 1 if lt.inverse_transform([l[classifie(d, V)]])[0] == labels[i] else 0\n", + " print ('Sucess of %s: %f' % (k, ok / len(data)))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.3" + }, + "toc-showtags": false + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/clustering_datasets/Data.ipynb b/clustering_datasets/Data.ipynb new file mode 100644 index 0000000..b2018a0 --- /dev/null +++ b/clustering_datasets/Data.ipynb @@ -0,0 +1,653 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "from typing import Union, Optional\n", + "import numpy as np\n", + "import pandas as pd\n", + "from sklearn import datasets\n", + "%matplotlib widget\n", + "import matplotlib as mpl\n", + "import matplotlib.pyplot as plt\n", + "from jupyterthemes import jtplot\n", + "import seaborn as sns\n", + "from sklearn.preprocessing import LabelEncoder\n", + "from NiaPy.util import groupdatabylabel\n", + "# Dodatne opcije\n", + "jtplot.style()\n", + "# sns.set_style(\"whitegrid\"), sns.set_context(\"talk\")\n", + "pd.set_option('display.max_columns', None)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def getdims(d: Union[np.ndarray, list], n: int = 0) -> None:\n", + " for i, e in enumerate(d):\n", + " if isinstance(e, (list, np.ndarray)): print ('%d:%d -> %d' % (n, i, len(e)))\n", + " else: continue\n", + " if isinstance(e[0], (list, np.ndarray)) and isinstance(e[0][0], (list, np.ndarray)): getdims(e, n + 1)\n", + "\t\t \n", + "def mplot(data: pd.DataFrame, clabel: str, ax: Optional[mpl.axes.Axes] = None) -> None:\n", + " d = pd.melt(data, clabel, var_name=\"measurement\")\n", + " if ax is None: f, ax = plt.subplots()\n", + " sns.despine(bottom=True, left=True)\n", + " sns.stripplot(x=\"value\", y=\"measurement\", hue=clabel, data=d, dodge=True, jitter=True, alpha=.25, zorder=1)\n", + " sns.pointplot(x=\"value\", y=\"measurement\", hue=clabel, data=d, dodge=.532, join=False, palette=\"dark\", markers=\"d\", scale=.75, ci=None)\n", + " handles, labels = ax.get_legend_handles_labels()\n", + " noc = len(data[clabel].unique())\n", + " ax.legend(handles[noc:], labels[noc:], title=clabel, handletextpad=0, columnspacing=1, loc=\"lower right\", ncol=noc, frameon=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Generated Data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Example" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "X, y = datasets.make_blobs()\n", + "fig, ax = plt.subplots(1, 1, figsize=(6, 5))\n", + "for e in np.unique(y): ax.scatter(X[np.where(y == e), 0], X[np.where(y == e), 1], label='Class %d' % e)\n", + "ax.set_xlabel('$a_1$'); ax.set_ylabel('$a_2$')\n", + "ax.legend()\n", + "fig.tight_layout()\n", + "fig.savefig(\"clusterExample.pdf\", bbox_inches='tight')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gdata = pd.DataFrame(np.hstack((X, y.reshape(len(y), 1))), columns=['a1', 'a2', 'Class'])\n", + "gdata" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lt = LabelEncoder().fit(y)\n", + "lt.classes_\n", + "d = groupdatabylabel(X, y, lt)\n", + "getdims(d)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mplot(gdata, 'Class')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# The Iris Dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Attribute description\n", + "1. sepal length in cm\n", + "2. sepal width in cm\n", + "3. petal length in cm\n", + "4. petal width in cm\n", + "5. class:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Class attribute\n", + "* 0: Iris Setosa\n", + "* 1: Iris Versicolour\n", + "* 2: Iris Virginica" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "iris = datasets.load_iris()\n", + "X, y = iris.data, iris.target\n", + "iris = pd.DataFrame(np.hstack((X, y.reshape(len(y), 1))), columns=['Sepal length', 'Sepal width', 'Petal length', 'Petal width', 'Species'])\n", + "iris" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open('irisStat.tex', 'w') as file: file.write(iris.describe().to_latex())\n", + "iris.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lt = LabelEncoder().fit(y)\n", + "lt.classes_" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d = groupdatabylabel(X, y, lt)\n", + "getdims(d)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mplot(iris, 'Species')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "toc-hr-collapsed": false + }, + "source": [ + "# The Cancer Wisconsin Dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Attribute information\n", + "1. ID number\n", + "2. Diagnosis (M = malignant, B = benign)\n", + "3. Ten real-valued features are computed for each cell nucleus:\n", + " 1. radius (mean of distances from center to points on the perimeter)\n", + " 2. texture (standard deviation of gray-scale values)\n", + " 3. perimeter\n", + " 4. area\n", + " 5. smoothness (local variation in radius lengths)\n", + " 6. compactness (perimeter^2 / area - 1.0)\n", + " 7. concavity (severity of concave portions of the contour)\n", + " 8. concave points (number of concave portions of the contour)\n", + " 9. symmetry \n", + " 10. fractal dimension (\"coastline approximation\" - 1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Class attirbute\n", + "* 0: No cancer\n", + "* 1: Cancer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cwd = datasets.load_breast_cancer()\n", + "X, y = cwd.data, cwd.target\n", + "columns = ['radius 1', 'texture 1', 'perimeter 1', 'area 1', 'smoothness 1', 'compactness 1', 'concavity 1', 'concave points 1', 'symmetry 1', 'fractal dimension 1', 'radius 2', 'texture 2', 'perimeter 2', 'area 2', 'smoothness 2', 'compactness 2', 'concavity 2', 'concave points 2', 'symmetry 2', 'fractal dimension 2', 'radius 3', 'texture 3', 'perimeter 3', 'area 3', 'smoothness 3', 'compactness 3', 'concavity 3', 'concave points 3', 'symmetry 3', 'fractal dimension 3', 'Diagnosis']\n", + "cwd = pd.DataFrame(np.hstack((X, y.reshape(len(y), 1))), columns=columns)\n", + "cwd" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open('cancerStat.tex', 'w') as file: file.write(cwd.describe().to_latex())\n", + "cwd.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lt = LabelEncoder().fit(y)\n", + "lt.classes_" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d = groupdatabylabel(X, y, lt)\n", + "getdims(d)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mplot(cwd, 'Diagnosis')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "toc-hr-collapsed": false + }, + "source": [ + "# The Wine Dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Attribute description\n", + "1. Alcohol\n", + "2. Malic acid\n", + "3. Ash\n", + "4. Alcalinity of ash\n", + "5. Magnesium\n", + "6. Total phenols\n", + "7. Flavanoids\n", + "8. Nonflavanoid phenols\n", + "9. Proanthocyanins\n", + "10. Color intensity\n", + "11. Hue\n", + "12. OD280/OD315 of diluted wines\n", + "13. Proline " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Class attiribute\n", + "* 0: TODO\n", + "* 1: TODO\n", + "* 2: TODO" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "wine = datasets.load_wine()\n", + "X, y = wine.data, wine.target\n", + "wine = pd.DataFrame(np.hstack((X, y.reshape(len(y), 1))), columns=['Alcohol', 'Malic acid', 'Ash', 'Alcalinity of ash', 'Magnesium', 'Total phenols', 'Flavanoids', 'Nonflavanoid phenols', 'Proanthocyanins', 'Color intensity', 'Hue', 'OD280/OD315 of diluted wines', 'Proline', 'Class'])\n", + "wine" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open('wineStat.tex', 'w') as file: file.write(wine.describe().to_latex())\n", + "wine.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lt = LabelEncoder().fit(y)\n", + "lt.classes_" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d = groupdatabylabel(X, y, lt)\n", + "getdims(d)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mplot(wine, 'Class')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# The Glass Dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Attribute information\n", + "1. RI: refractive index\n", + "2. Na: Sodium (unit measurement: weight percent in corresponding oxide, as are attributes 4-10)\n", + "3. Mg: Magnesium\n", + "4. Al: Aluminum\n", + "5. Si: Silicon\n", + "6. K: Potassium\n", + "7. Ca: Calcium\n", + "8. Ba: Barium\n", + "9. Fe: Iron\n", + "10. Type of glass: (class attribute)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Class attribute\n", + "Type of glass:\n", + "* 1: building_windows_float_processed\n", + "* 2: building_windows_non_float_processed\n", + "* 3: vehicle_windows_float_processed\n", + "* 4: vehicle_windows_non_float_processed (none in this database)\n", + "* 5: containers\n", + "* 6: tableware\n", + "* 7: headlamps" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "glass = pd.read_csv('glass.csv')\n", + "glass" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open('glassStat.tex', 'w') as file: file.write(glass.describe().to_latex())\n", + "glass.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lt = LabelEncoder().fit(glass.iloc[:, -1].values)\n", + "lt.classes_" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d = groupdatabylabel(glass.iloc[:, :-1].values, glass.iloc[:, -1].values, lt)\n", + "getdims(d)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mplot(glass, 'Type')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# The Contraceptive Method Choice Dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Attribute information\n", + "1. Wife's age (numerical)\n", + "2. Wife's education (categorical) 1=low, 2, 3, 4=high\n", + "3. Husband's education (categorical) 1=low, 2, 3, 4=high\n", + "4. Number of children ever born (numerical)\n", + "5. Wife's religion (binary) 0=Non-Islam, 1=Islam\n", + "6. Wife's now working? (binary) 0=Yes, 1=No\n", + "7. Husband's occupation (categorical) 1, 2, 3, 4\n", + "8. Standard-of-living index (categorical) 1=low, 2, 3, 4=high\n", + "9. Media exposure (binary) 0=Good, 1=Not good\n", + "10. Contraceptive method used (class attribute)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Class attribute\n", + "Contraceptive method used:\n", + "* 1: No-use\n", + "* 2: Long-term\n", + "* 3: Short-term" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cmc = pd.read_csv('cmc.csv')\n", + "cmc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open('cmcStat.tex', 'w') as file: file.write(cmc.describe().to_latex())\n", + "cmc.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lt = LabelEncoder().fit(cmc.iloc[:, -1])\n", + "lt.classes_" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d = groupdatabylabel(cmc.iloc[:, :-1].values, cmc.iloc[:, -1].values, lt)\n", + "getdims(d)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mplot(cmc, ' Contraceptive method used')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Covertype dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Attribute information\n", + "1. Elevation / quantitative /meters / Elevation in meters\n", + "2. Aspect / quantitative / azimuth / Aspect in degrees azimuth\n", + "3. Slope / quantitative / degrees / Slope in degrees\n", + "4. Horizontal_Distance_To_Hydrology / quantitative / meters / Horz Dist to nearest surface water features\n", + "5. Vertical_Distance_To_Hydrology / quantitative / meters / Vert Dist to nearest surface water features\n", + "6. Horizontal_Distance_To_Roadways / quantitative / meters / Horz Dist to nearest roadway\n", + "7. Hillshade_9am / quantitative / 0 to 255 index / Hillshade index at 9am, summer solstice\n", + "8. Hillshade_Noon / quantitative / 0 to 255 index / Hillshade index at noon, summer soltice\n", + "9. Hillshade_3pm / quantitative / 0 to 255 index / Hillshade index at 3pm, summer solstice\n", + "10. Horizontal_Distance_To_Fire_Points / quantitative / meters / Horz Dist to nearest wildfire ignition points\n", + "11. Wilderness_Area (4 binary columns) / qualitative / 0 (absence) or 1 (presence) / Wilderness area designation\n", + "12. Soil_Type (40 binary columns) / qualitative / 0 (absence) or 1 (presence) / Soil Type designation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Class attribute\n", + "* Cover_Type (7 types) / integer / 1 to 7 / Forest Cover Type designation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "covtype = datasets.fetch_covtype()\n", + "X, y = covtype.data, covtype.target\n", + "covtype = pd.DataFrame(np.hstack((X, y.reshape(len(y), 1))), columns=['Elevation', 'Aspect', 'Slope', 'Horizontal_Distance_To_Hydrology', 'Vertical_Distance_To_Hydrology', 'Horizontal_Distance_To_Roadways', 'Hillshade_9am', 'Hillshade_Noon', 'Hillshade_3pm', 'Horizontal_Distance_To_Fire_Points', 'Wilderness_Area_1', 'Wilderness_Area_2', 'Wilderness_Area_3', 'Wilderness_Area_4', 'Soil_Type_1', 'Soil_Type_2', 'Soil_Type_3', 'Soil_Type_4', 'Soil_Type_5', 'Soil_Type_6', 'Soil_Type_7', 'Soil_Type_8', 'Soil_Type_9', 'Soil_Type_10', 'Soil_Type_11', 'Soil_Type_12', 'Soil_Type_13', 'Soil_Type_14', 'Soil_Type_15', 'Soil_Type_16', 'Soil_Type_17', 'Soil_Type_18', 'Soil_Type_19', 'Soil_Type_20', 'Soil_Type_21', 'Soil_Type_22', 'Soil_Type_23', 'Soil_Type_24', 'Soil_Type_25', 'Soil_Type_26', 'Soil_Type_27', 'Soil_Type_28', 'Soil_Type_29', 'Soil_Type_30', 'Soil_Type_31', 'Soil_Type_32', 'Soil_Type_33', 'Soil_Type_34', 'Soil_Type_35', 'Soil_Type_36', 'Soil_Type_37', 'Soil_Type_38', 'Soil_Type_39', 'Soil_Type_40', 'Cover_Type'])\n", + "covtype" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open('covtypeStat.tex', 'w') as file: file.write(covtype.describe().to_latex())\n", + "covtype.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lt = LabelEncoder().fit(y)\n", + "lt.classes_" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d = groupdatabylabel(X, y, lt)\n", + "getdims(d)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mplot(covtype, 'Cover_Type')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/clustering_datasets/Makefile b/clustering_datasets/Makefile new file mode 100644 index 0000000..13db6d1 --- /dev/null +++ b/clustering_datasets/Makefile @@ -0,0 +1,25 @@ +install: Pipfile + pipenv install + pipenv run jupyter labextension install @jupyter-widgets/jupyterlab-manager + pipenv run jupyter labextension install jupyter-matplotlib + pipenv run jupyter labextension install @jupyterlab/toc + pipenv run jupyter labextension install @oriolmirosa/jupyterlab_materialdarker + pipenv run jt -t onedork -lineh 110 -nfs 10 -fs 10 -tfs 10 -cellw 80% + +run: + pipenv run jupyter lab + +lab: + pipenv run jupyter lab + +notebook: + pipenv run jupyter notebook + +console: + pipenv run ipython + +clean: Pipfile.lock + pipenv --rm + rm Pipfile.lock + +all: install run diff --git a/clustering_datasets/Pipfile b/clustering_datasets/Pipfile new file mode 100644 index 0000000..8867a5b --- /dev/null +++ b/clustering_datasets/Pipfile @@ -0,0 +1,23 @@ +[[source]] +url = "https://pypi.python.org/simple" +verify_ssl = true +name = "pypi" + +[requires] +python_version = '3.7' + +[packages] +pipfile = "*" +wheel = ">=0.33.4" +jupyterlab = ">=0.35.5" +ipykernel = ">=5.1.0" +numpy = ">=1.16.3" +scipy = ">=1.2.1" +matplotlib = ">=3.0.3" +pandas = ">=0.24.2" +scikit-learn = ">=0.21.2" +ipywidgets = ">=7.4.2" +ipympl = ">=0.2.1" +jupyterthemes = ">=0.20.0" +seaborn = ">=0.9.0" +niapy = {path = "./../../NiaPy/dist/NiaPy-2.0.0rc4-cp37-cp37m-linux_x86_64.whl"} diff --git a/clustering_datasets/README.md b/clustering_datasets/README.md new file mode 100644 index 0000000..660ed94 --- /dev/null +++ b/clustering_datasets/README.md @@ -0,0 +1,9 @@ +# Install and run +1. Install [Nodejs](https://nodejs.org/) +2. Install [NPM](https://www.npmjs.com/) +3. Run `make install` for creating environmen +4. Run `make run` for runing the jupyter lab + +# Run experiment + +```pipenv run python run_cluster.py -a 'jDE' -rn 5 -ds 'iris' -of 'gc' -nFES 1000``` diff --git a/clustering_datasets/clusterargparser.py b/clustering_datasets/clusterargparser.py new file mode 100644 index 0000000..bd7adfb --- /dev/null +++ b/clustering_datasets/clusterargparser.py @@ -0,0 +1,38 @@ +# encoding=utf8 +import sys + +import numpy as np + +from NiaPy.util import MakeArgParser + +datasets = ['iris', 'cancer', 'wine', 'glass', 'cmc', 'gen'] +optfuncs = ['c', 'cm', 'cmp', 'cc'] + +def positiveInt(x: str) -> int: return abs(int(x)) + +def prob(x: str) -> float: return float(x) if 0 < float(x) < 1 else np.random.rand() + +def str2bool(x: str) -> bool: return True if x in ['true', 'True', 'TRUE', 'T', 'yes', 'Yes', 'YES', 'Y'] else False + +def MakeArgParserClustering(): + parser = MakeArgParser() + parser.add_argument('-ds', '--dataset', dest='dataset', default=datasets[0], choices=datasets, type=str) + parser.add_argument('-of', '--optfunc', dest='ofun', default=optfuncs[-1], choices=optfuncs, type=str) + parser.add_argument('-rn', '--rnum', dest='runs', default=51, type=positiveInt) + parser.add_argument('-ss', '--sseed', dest='sseed', default=1, type=positiveInt) + parser.add_argument('-sp', '--split', dest='split', default=.3, type=prob) + parser.add_argument('-o', '--wout', dest='wout', default=True, type=str2bool) + return parser + +def getArgs(argv): + parser = MakeArgParserClustering() + args = parser.parse_args(argv) + return args + +def getDictArgs(argv): return vars(getArgs(argv)) + +if __name__ == '__main__': + args = getArgs(sys.argv[1:]) + print (args) + +# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3 diff --git a/clustering_datasets/cmc.csv b/clustering_datasets/cmc.csv new file mode 100644 index 0000000..db6cf6e --- /dev/null +++ b/clustering_datasets/cmc.csv @@ -0,0 +1,1474 @@ +Wife's age, Wife's education, Husband's education, Number of children ever born, Wife's religion, Wife's now working, Husband's occupation, Standard-of-living, Media exposure, Contraceptive method used +24,2,3,3,1,1,2,3,0,1 +45,1,3,10,1,1,3,4,0,1 +43,2,3,7,1,1,3,4,0,1 +42,3,2,9,1,1,3,3,0,1 +36,3,3,8,1,1,3,2,0,1 +19,4,4,0,1,1,3,3,0,1 +38,2,3,6,1,1,3,2,0,1 +21,3,3,1,1,0,3,2,0,1 +27,2,3,3,1,1,3,4,0,1 +45,1,1,8,1,1,2,2,1,1 +38,1,3,2,1,0,3,3,1,1 +42,1,4,4,1,1,1,3,0,1 +44,4,4,1,1,0,1,4,0,1 +42,2,4,1,1,0,3,3,0,1 +38,3,4,2,1,1,2,3,0,1 +26,2,4,0,1,1,4,1,0,1 +48,1,1,7,1,1,2,4,0,1 +39,2,2,6,1,1,2,4,0,1 +37,2,2,8,1,1,2,3,0,1 +39,2,1,5,1,1,2,1,1,1 +26,3,4,1,1,0,4,1,0,1 +24,3,4,0,1,0,3,1,0,1 +46,4,4,1,0,1,1,4,0,1 +39,4,4,1,1,1,1,4,0,1 +48,4,4,5,1,1,1,4,0,1 +40,2,4,8,1,1,3,3,0,1 +38,4,4,1,1,0,1,4,0,1 +29,4,4,0,1,0,1,4,0,1 +24,4,4,0,1,0,2,2,0,1 +43,1,2,8,1,1,2,4,0,1 +31,3,4,2,1,1,1,4,0,1 +31,4,4,2,1,1,3,2,0,1 +26,2,4,0,1,0,1,2,0,1 +33,2,2,2,1,1,3,1,0,1 +24,2,3,1,1,1,3,3,0,1 +42,1,3,9,1,1,3,2,1,1 +43,1,2,8,1,0,3,1,0,1 +25,3,3,4,1,1,3,2,0,1 +42,2,2,3,1,1,3,4,0,1 +28,2,2,1,1,1,2,4,0,1 +28,1,1,3,1,1,3,1,0,1 +47,2,3,0,1,0,1,3,0,1 +48,2,4,2,1,1,1,3,0,1 +40,4,4,2,0,1,1,4,0,1 +28,2,2,1,1,1,3,2,0,1 +25,1,4,1,1,1,2,1,0,1 +32,4,4,2,1,1,2,3,0,1 +45,4,4,0,1,1,2,4,0,1 +26,4,4,1,1,1,1,4,0,1 +26,4,4,0,1,0,1,4,0,1 +31,4,4,0,1,0,1,4,0,1 +49,4,4,7,0,1,2,4,0,1 +40,3,4,8,1,1,3,3,0,1 +34,4,4,1,0,0,2,3,0,1 +36,2,4,5,1,1,2,3,0,1 +20,2,2,2,1,1,2,2,1,1 +43,4,4,5,1,0,1,4,0,1 +37,3,4,7,1,1,2,1,0,1 +21,2,3,0,1,1,3,4,0,1 +49,4,4,6,1,1,1,2,0,1 +42,3,3,10,1,1,3,3,0,1 +20,2,3,1,1,1,2,3,0,1 +47,1,2,2,1,0,1,3,0,1 +22,2,3,1,1,1,2,1,0,1 +27,2,4,1,1,0,1,3,0,1 +24,2,4,1,1,1,3,1,0,1 +44,4,2,8,1,1,3,3,0,1 +30,2,3,3,1,1,3,1,0,1 +23,3,4,2,1,1,3,4,0,1 +25,4,4,1,1,0,3,3,0,1 +47,2,3,4,1,1,2,3,0,1 +48,2,3,8,1,1,1,4,0,1 +29,2,3,1,1,1,1,1,0,1 +29,2,3,1,1,1,1,2,0,1 +21,3,4,0,1,1,2,3,0,1 +21,3,4,1,1,1,2,3,0,1 +29,4,4,0,1,0,2,4,0,1 +45,3,4,3,1,1,1,4,0,1 +33,2,3,2,1,1,3,2,0,1 +38,4,4,1,1,0,1,4,0,1 +24,4,4,0,0,1,1,4,0,1 +49,4,4,6,0,0,1,4,0,1 +24,2,4,1,1,0,4,4,0,1 +28,4,4,0,0,0,1,4,0,1 +30,4,4,2,1,1,1,4,0,1 +33,4,4,2,1,1,1,4,0,1 +25,4,4,1,1,1,1,3,0,1 +35,4,4,3,0,1,1,4,0,1 +27,2,4,4,1,1,1,4,0,1 +24,2,2,3,1,0,2,1,0,1 +25,2,3,0,1,1,2,1,0,1 +46,2,4,6,1,1,1,4,0,1 +26,2,2,3,1,1,2,1,0,1 +38,2,2,1,1,0,3,2,0,1 +47,1,2,3,1,1,1,3,0,1 +41,1,3,10,1,1,2,3,1,1 +41,2,3,1,1,0,2,3,0,1 +28,4,4,5,1,1,2,1,0,1 +17,2,2,3,1,1,3,1,0,1 +27,2,3,5,1,1,2,1,0,1 +27,1,2,4,1,1,3,1,1,1 +19,2,3,0,1,0,2,2,0,1 +27,2,4,3,1,0,3,2,0,1 +26,4,3,4,1,1,2,3,0,1 +23,2,4,2,1,1,3,3,0,1 +40,3,3,3,1,1,3,2,0,1 +22,4,4,0,1,1,1,1,0,1 +44,3,4,4,1,0,1,3,0,1 +29,2,3,2,1,1,3,1,0,1 +26,2,3,3,1,1,3,1,0,1 +45,4,4,1,1,0,3,3,0,1 +32,3,3,7,1,0,2,3,0,1 +21,4,4,1,1,1,1,4,0,1 +31,2,2,4,1,1,3,3,0,1 +21,3,4,0,1,1,1,3,0,1 +37,4,4,1,1,1,1,4,0,1 +34,4,4,3,0,0,3,3,0,1 +44,3,4,1,1,1,1,3,0,1 +49,4,3,5,1,1,2,3,0,1 +26,2,3,2,1,1,3,3,0,1 +38,1,2,1,1,1,2,4,1,1 +36,2,3,5,1,1,2,3,0,1 +26,3,4,2,1,1,3,3,0,1 +20,2,3,1,0,1,3,2,0,1 +44,2,1,3,1,0,3,2,0,1 +31,3,4,2,1,1,1,2,1,1 +45,4,4,1,1,1,1,3,0,1 +36,2,3,2,1,1,3,2,1,1 +43,1,3,2,1,0,2,2,1,1 +25,3,4,0,1,1,1,3,0,1 +48,3,4,3,1,1,3,1,0,1 +30,4,4,0,0,0,1,3,0,1 +48,1,2,8,1,1,3,1,0,1 +28,3,4,1,1,1,3,1,1,1 +26,4,4,1,1,1,1,3,0,1 +26,3,3,3,1,0,2,4,0,1 +24,4,4,2,1,1,3,3,0,1 +21,3,3,2,1,1,2,2,0,1 +30,4,4,2,0,1,3,1,0,1 +20,4,4,1,1,1,1,4,0,1 +33,4,4,2,1,1,1,3,0,1 +30,4,4,0,0,0,3,3,0,1 +47,4,4,1,1,0,1,3,0,1 +35,4,4,2,0,1,3,3,0,1 +30,2,4,2,1,1,3,4,0,1 +33,4,4,3,1,1,1,4,0,1 +45,4,4,4,1,1,2,3,0,1 +41,4,4,2,1,1,1,4,0,1 +32,4,4,1,1,0,1,3,0,1 +43,4,4,5,1,1,1,4,0,1 +44,4,4,3,1,1,2,4,0,1 +21,1,3,4,1,1,3,2,0,1 +44,3,4,2,1,1,1,4,1,1 +23,4,4,2,1,1,2,4,0,1 +47,1,2,4,1,1,2,2,0,1 +47,1,1,7,1,1,2,3,1,1 +35,4,4,2,1,0,2,4,0,1 +32,2,4,4,1,1,3,2,0,1 +45,1,4,5,1,1,1,2,1,1 +22,2,2,2,1,1,3,1,0,1 +23,1,1,3,1,1,2,1,1,1 +30,4,4,0,1,0,1,4,0,1 +44,1,3,0,1,1,2,3,0,1 +40,3,3,1,1,0,3,3,0,1 +34,4,4,4,1,1,2,3,0,1 +37,3,4,1,1,1,2,4,0,1 +49,3,4,5,1,1,1,4,0,1 +26,4,4,1,1,1,1,4,0,1 +22,4,4,0,1,1,2,4,0,1 +20,3,4,0,1,0,2,4,0,1 +47,4,4,4,1,1,1,4,0,1 +49,2,2,8,1,0,3,1,0,1 +22,2,4,1,1,1,2,4,0,1 +22,3,3,2,1,1,3,4,0,1 +25,3,2,2,1,1,3,2,0,1 +32,2,4,0,1,0,3,4,0,1 +28,4,4,0,0,1,1,3,0,1 +30,2,4,4,1,0,1,4,0,1 +45,2,4,2,1,0,2,4,0,1 +49,2,1,10,1,0,2,4,0,1 +34,4,4,2,1,0,1,4,0,1 +25,2,4,1,1,1,2,2,0,1 +42,1,3,12,1,1,3,3,0,1 +48,2,1,3,1,0,4,3,0,1 +35,3,4,5,1,0,2,2,0,1 +33,2,3,1,1,1,3,4,0,1 +20,2,4,0,1,1,3,2,0,1 +36,2,3,4,1,1,3,1,1,1 +19,3,3,2,1,1,3,2,0,1 +27,3,2,3,1,1,3,2,0,1 +30,1,2,4,1,1,4,1,0,1 +31,3,4,3,1,0,3,2,0,1 +44,1,1,11,1,1,4,1,1,1 +24,3,3,2,1,1,3,3,0,1 +18,3,4,0,1,0,3,1,0,1 +45,1,2,6,1,1,3,3,1,1 +25,2,2,3,1,0,3,1,0,1 +36,3,4,6,1,1,2,4,0,1 +30,1,1,5,1,1,2,2,0,1 +32,2,4,2,1,1,3,3,0,1 +42,1,4,11,0,1,2,2,1,1 +22,2,4,0,1,1,3,3,0,1 +36,2,4,2,1,0,2,4,0,1 +27,2,3,2,1,1,3,3,0,1 +40,3,4,1,1,1,1,2,0,1 +22,2,3,2,1,1,3,1,0,1 +26,2,4,0,1,1,3,1,0,1 +31,3,4,0,1,0,2,3,0,1 +32,4,4,1,1,1,1,4,0,1 +34,2,3,3,1,0,3,2,0,1 +40,3,3,1,0,1,3,4,0,1 +45,1,2,1,1,0,3,3,0,1 +26,3,4,3,1,1,3,2,0,1 +48,1,3,5,1,1,3,2,0,1 +43,1,3,8,1,1,3,1,0,1 +27,3,4,2,1,0,2,2,0,1 +42,3,2,1,1,1,3,3,0,1 +34,3,2,8,1,0,2,3,1,1 +32,3,2,4,1,1,2,2,1,1 +31,3,3,2,1,0,2,3,0,1 +22,2,3,1,1,1,3,3,0,1 +27,3,4,0,1,1,3,3,0,1 +37,1,2,2,1,1,2,2,1,1 +22,1,2,0,1,1,2,4,0,1 +47,4,4,4,1,1,1,4,0,1 +32,4,4,4,1,0,1,3,0,1 +37,3,3,2,1,1,1,4,0,1 +45,1,3,1,1,1,2,3,1,1 +47,1,4,8,1,1,3,3,0,1 +28,3,4,0,1,1,3,2,0,1 +48,1,3,4,1,1,3,4,1,1 +26,2,4,1,1,1,1,3,0,1 +21,3,4,0,1,0,3,1,0,1 +20,2,3,1,1,1,2,4,0,1 +39,1,1,5,1,0,3,1,1,1 +27,2,3,1,1,0,3,4,0,1 +37,2,3,5,1,0,3,2,0,1 +24,4,4,1,1,1,3,3,0,1 +39,3,3,6,1,1,3,3,0,1 +20,3,3,1,1,1,2,3,0,1 +19,3,4,1,1,1,2,3,0,1 +40,3,3,4,1,1,2,2,0,1 +41,1,4,4,1,1,3,3,0,1 +22,4,4,2,1,1,1,3,0,1 +32,3,4,4,1,1,1,4,0,1 +34,4,4,2,0,1,1,4,0,1 +34,4,4,2,1,0,1,3,0,1 +23,4,4,2,1,1,1,4,0,1 +27,4,4,1,1,1,1,2,0,1 +46,3,2,11,1,0,2,2,1,1 +34,4,4,3,0,1,1,2,0,1 +33,4,4,5,1,1,3,3,0,1 +25,3,3,1,1,1,3,2,0,1 +26,4,4,1,1,0,1,4,0,1 +48,1,3,4,1,1,3,3,1,1 +36,4,4,2,1,1,3,3,0,1 +38,3,4,2,0,1,2,4,0,1 +44,1,2,7,1,1,3,4,0,1 +44,2,2,1,1,0,3,2,0,1 +39,3,3,1,0,1,3,4,0,1 +47,1,2,7,1,0,3,2,0,1 +43,1,3,5,1,0,3,2,0,1 +29,3,3,3,1,0,2,4,0,1 +22,3,3,0,1,1,2,2,0,1 +31,2,4,5,1,1,3,2,0,1 +22,4,4,0,1,1,1,3,0,1 +45,3,4,6,1,1,2,4,0,1 +42,3,4,10,1,0,3,2,0,1 +38,3,3,7,1,1,3,1,0,1 +47,1,1,6,1,0,3,1,0,1 +30,4,4,2,1,1,1,4,0,1 +28,3,4,1,1,1,3,4,0,1 +25,3,4,1,1,1,2,1,0,1 +49,2,4,6,0,1,2,3,0,1 +23,3,4,2,1,0,2,3,0,1 +35,3,4,1,0,1,2,3,0,1 +47,2,1,5,0,1,2,4,0,1 +35,2,3,0,1,1,2,4,0,1 +44,3,4,6,1,1,3,4,0,1 +48,4,4,3,1,1,2,4,0,1 +43,2,3,6,1,0,3,3,0,1 +21,2,4,2,1,1,3,1,0,1 +22,4,4,0,1,1,1,4,0,1 +41,3,3,5,1,1,3,4,0,1 +29,2,4,2,1,0,4,4,1,1 +44,4,4,5,0,1,2,4,0,1 +22,4,4,0,1,0,1,4,0,1 +23,2,4,4,1,1,3,3,0,1 +22,2,1,0,1,1,3,2,0,1 +35,1,3,0,1,1,2,2,1,1 +24,4,4,1,1,1,2,2,0,1 +36,4,4,4,1,1,1,4,0,1 +47,1,2,2,1,1,2,4,0,1 +40,4,4,3,1,1,3,4,0,1 +29,4,4,1,1,1,1,3,0,1 +49,3,4,6,1,1,1,4,0,1 +49,3,4,9,1,1,1,4,0,1 +47,4,4,2,1,1,1,4,0,1 +28,4,4,3,0,1,1,4,0,1 +26,4,4,1,1,1,1,4,0,1 +29,1,2,7,1,1,2,4,0,1 +29,4,2,0,1,0,2,2,0,1 +25,3,3,1,1,1,3,4,0,1 +24,2,4,2,1,1,3,4,0,1 +23,4,4,1,1,0,2,4,0,1 +23,4,4,2,1,0,3,3,0,1 +44,1,2,2,1,1,2,4,0,1 +39,3,4,6,1,1,1,4,0,1 +21,3,3,0,1,1,2,2,0,1 +22,3,4,2,1,1,2,3,0,1 +47,3,4,3,1,1,2,3,0,1 +36,3,3,2,1,0,2,4,0,1 +25,1,4,2,1,0,3,3,1,1 +40,4,4,2,1,1,3,4,0,1 +36,2,2,0,0,0,3,3,0,1 +37,2,3,5,0,0,2,4,0,1 +28,1,4,2,1,0,2,3,0,1 +30,3,3,5,1,0,3,4,0,1 +16,3,4,1,1,1,3,3,0,1 +32,2,3,4,0,1,2,4,0,1 +40,4,4,3,0,1,3,4,0,1 +40,1,1,2,1,0,3,4,0,1 +45,3,4,2,0,1,1,4,0,1 +21,1,3,0,1,0,3,4,1,1 +28,4,4,1,0,1,1,3,0,1 +25,4,4,1,0,1,2,3,0,1 +44,1,3,4,1,1,3,3,1,1 +35,4,4,2,0,1,3,4,0,1 +49,4,4,4,1,1,3,4,0,1 +39,3,4,1,0,1,3,4,0,1 +32,3,4,1,0,1,2,4,0,1 +35,1,2,0,1,0,4,4,0,1 +21,1,2,1,1,0,3,4,0,1 +47,4,4,1,0,1,1,4,0,1 +27,4,4,1,0,1,3,4,0,1 +31,2,2,8,1,1,2,2,1,1 +34,4,3,0,0,0,3,3,0,1 +30,4,4,3,1,0,2,4,0,1 +25,4,4,0,1,0,2,4,0,1 +28,3,4,3,1,1,2,1,0,1 +30,2,3,4,1,0,3,2,0,1 +42,3,3,8,1,1,4,3,0,1 +23,4,4,0,0,1,2,4,0,1 +45,3,4,3,0,1,3,4,0,1 +21,4,4,1,1,1,3,3,0,1 +28,4,4,1,0,1,2,4,0,1 +33,4,4,1,0,0,3,4,0,1 +32,1,2,4,1,1,3,2,1,1 +25,2,2,3,1,1,3,3,0,1 +25,3,3,1,1,1,3,1,0,1 +19,2,2,2,1,1,3,1,0,1 +40,1,2,6,1,1,3,1,1,1 +24,2,2,2,1,1,3,1,1,1 +32,2,2,1,1,1,2,1,0,1 +35,2,3,8,1,1,3,2,0,1 +27,2,2,1,1,0,3,2,1,1 +23,2,4,2,1,1,1,4,0,1 +22,2,4,4,1,1,3,4,0,1 +49,2,3,0,1,1,1,2,0,1 +25,2,3,0,1,1,2,4,0,1 +18,3,4,1,1,1,3,1,0,1 +43,3,3,4,1,1,2,4,0,1 +37,1,3,0,1,1,3,4,0,1 +30,2,3,2,1,0,2,4,0,1 +29,3,2,0,1,1,2,2,0,1 +20,2,3,3,1,1,3,4,0,1 +42,3,3,3,1,0,2,4,0,1 +23,4,4,1,1,1,2,3,0,1 +34,2,3,7,1,0,3,3,0,1 +23,3,3,6,1,1,3,3,0,1 +38,3,3,5,1,1,1,4,0,1 +37,2,3,4,1,1,2,3,0,1 +17,2,3,1,1,1,3,2,0,1 +39,3,4,3,1,1,1,4,0,1 +49,1,3,0,1,1,3,3,1,1 +23,3,3,2,1,1,2,2,0,1 +40,3,3,3,1,1,2,2,1,1 +32,2,4,5,1,1,2,3,0,1 +18,2,3,1,1,1,2,3,0,1 +46,1,2,8,1,1,3,3,1,1 +37,3,3,6,1,1,2,2,0,1 +38,2,4,5,0,1,2,2,0,1 +37,2,3,2,0,0,3,3,0,1 +25,2,3,1,1,0,2,1,0,1 +38,2,4,5,1,1,3,3,0,1 +39,4,4,2,1,0,1,4,0,1 +44,4,4,3,1,0,1,4,1,1 +19,4,4,0,0,1,3,4,0,1 +28,2,4,1,1,0,3,4,0,1 +20,2,3,1,1,1,3,4,0,1 +33,4,4,0,1,0,3,3,0,1 +33,4,4,5,1,1,3,4,0,1 +47,1,4,6,1,1,1,4,0,1 +25,3,3,0,1,1,3,4,0,1 +29,4,4,0,1,0,2,4,0,1 +37,4,4,2,0,1,1,4,0,1 +49,1,3,6,1,1,2,4,0,1 +48,1,1,8,1,0,2,2,1,1 +19,2,2,0,1,1,2,2,0,1 +46,1,2,3,1,1,2,1,1,1 +39,1,2,2,1,1,3,1,1,1 +30,3,4,5,1,1,1,4,0,1 +40,2,2,8,1,1,2,2,0,1 +43,1,2,1,1,1,3,4,1,1 +29,3,4,4,1,0,1,3,0,1 +23,3,3,2,1,1,3,3,0,1 +24,2,2,3,1,1,2,4,0,1 +47,3,3,2,1,1,2,4,0,1 +35,3,3,2,1,0,2,2,0,1 +19,3,3,1,1,1,2,4,0,1 +45,2,3,4,1,0,2,4,0,1 +42,2,4,2,1,1,3,4,0,1 +47,1,1,1,1,1,3,4,0,1 +25,2,2,2,1,1,3,1,0,1 +20,2,3,3,1,1,3,4,0,1 +33,3,3,6,1,1,3,4,0,2 +46,4,3,5,1,1,1,4,0,2 +35,4,4,4,1,1,1,4,0,2 +26,4,4,2,1,0,1,4,0,2 +36,3,3,5,1,0,1,3,0,2 +37,3,4,3,1,1,1,3,0,2 +44,2,4,4,1,1,3,3,0,2 +32,4,4,2,1,1,1,4,0,2 +25,2,4,6,1,1,3,4,0,2 +42,2,3,7,1,1,3,4,0,2 +30,4,4,2,0,0,1,4,0,2 +39,4,4,4,1,1,1,4,0,2 +26,1,1,2,1,1,3,4,0,2 +30,2,4,3,1,1,3,4,0,2 +38,4,4,4,0,0,1,2,0,2 +23,4,4,1,1,1,1,2,0,2 +18,3,4,1,1,1,3,4,0,2 +25,3,4,3,1,1,2,3,0,2 +37,4,4,2,1,0,3,3,0,2 +20,3,4,1,1,1,3,2,0,2 +45,4,4,7,1,1,1,4,0,2 +28,2,1,2,1,0,3,3,0,2 +33,2,2,3,1,1,2,3,0,2 +28,2,3,2,1,1,3,3,0,2 +30,2,4,1,1,0,3,4,0,2 +32,4,4,4,1,1,2,4,0,2 +48,4,3,7,1,0,2,4,0,2 +46,4,4,2,1,0,1,4,0,2 +32,4,4,2,0,0,1,4,0,2 +49,4,4,10,1,1,1,4,0,2 +33,2,2,6,1,0,3,4,0,2 +39,4,4,3,1,1,1,4,0,2 +35,4,4,5,0,0,1,4,0,2 +36,4,4,3,0,1,1,3,0,2 +20,3,4,1,1,1,2,4,0,2 +23,4,4,1,1,1,3,4,0,2 +26,2,4,2,1,1,1,3,0,2 +25,3,3,1,1,1,3,1,0,2 +31,4,4,1,1,1,1,4,0,2 +36,4,4,2,1,1,2,4,0,2 +32,3,3,2,1,0,3,4,0,2 +36,4,4,3,1,1,1,4,0,2 +40,4,4,3,1,1,1,4,0,2 +46,4,4,4,1,1,1,4,0,2 +42,4,4,8,0,1,1,4,0,2 +39,4,4,4,1,0,1,4,0,2 +34,4,4,2,1,1,1,4,0,2 +36,4,4,3,1,1,1,4,0,2 +37,4,4,3,1,1,1,4,0,2 +36,4,4,4,0,1,1,4,0,2 +38,4,4,2,1,1,1,4,0,2 +27,3,3,1,0,0,2,4,0,2 +37,4,3,8,1,1,1,4,0,2 +24,1,4,1,1,0,4,2,1,2 +42,2,4,8,1,1,1,4,0,2 +31,4,4,3,0,0,4,4,0,2 +34,2,3,2,1,1,3,3,0,2 +34,4,4,4,1,1,3,3,0,2 +36,4,4,4,0,0,1,4,0,2 +47,4,4,8,1,1,1,4,0,2 +34,4,4,3,1,1,1,4,0,2 +44,4,4,6,0,1,1,3,0,2 +21,4,4,1,1,1,1,4,0,2 +42,4,4,6,1,1,1,3,0,2 +44,4,4,4,1,0,2,3,0,2 +19,4,4,1,1,1,2,3,0,2 +35,1,3,7,1,0,2,3,1,2 +24,4,4,1,1,1,3,2,0,2 +32,3,4,7,0,1,1,3,0,2 +31,3,3,2,1,0,3,3,0,2 +47,3,4,11,1,0,2,3,0,2 +38,4,4,6,1,0,3,3,0,2 +40,4,4,4,1,1,1,3,0,2 +35,3,3,4,1,1,3,2,0,2 +26,3,4,3,1,1,3,4,0,2 +23,4,3,3,1,1,3,3,0,2 +40,4,4,6,0,0,1,4,0,2 +37,4,4,3,1,1,1,4,0,2 +39,4,4,5,1,1,2,4,0,2 +25,4,4,2,1,1,2,4,0,2 +48,4,4,4,0,1,1,4,0,2 +31,4,4,3,1,1,1,4,0,2 +26,4,4,1,0,1,2,4,0,2 +44,4,4,5,1,0,1,4,0,2 +29,4,4,1,1,0,2,4,0,2 +36,2,1,6,1,1,2,1,0,2 +29,4,4,2,1,0,1,3,0,2 +37,2,3,7,1,1,3,4,0,2 +28,3,3,3,1,1,1,2,0,2 +40,2,3,6,1,1,1,2,0,2 +33,4,4,4,1,1,1,4,0,2 +22,4,4,1,1,1,1,2,0,2 +35,3,4,5,1,0,1,3,1,2 +32,4,4,3,0,1,3,3,0,2 +35,4,4,5,1,0,1,3,0,2 +29,4,4,2,1,1,1,4,0,2 +23,3,3,2,1,0,3,4,0,2 +35,4,4,2,1,0,1,4,0,2 +36,4,4,3,1,0,2,4,0,2 +32,4,4,2,1,0,1,4,0,2 +30,4,1,4,1,1,3,3,0,2 +36,4,4,4,0,1,2,4,0,2 +33,4,4,3,1,1,3,4,0,2 +33,3,4,5,0,1,3,3,0,2 +43,2,4,13,1,1,2,3,0,2 +35,3,3,6,1,1,2,3,0,2 +38,4,4,5,1,1,1,3,0,2 +25,3,1,4,1,1,3,1,0,2 +26,3,3,2,1,1,3,2,0,2 +24,4,4,1,1,1,3,4,0,2 +22,3,4,1,1,1,4,4,0,2 +33,2,3,5,1,0,2,1,0,2 +45,3,3,7,1,1,3,4,0,2 +29,4,4,2,1,0,1,3,0,2 +47,2,2,7,1,1,2,4,0,2 +36,3,2,6,1,1,3,2,0,2 +45,3,3,8,1,1,2,3,0,2 +47,4,4,6,1,0,1,4,0,2 +43,3,4,2,1,0,2,4,0,2 +44,4,4,7,1,1,3,2,0,2 +24,2,4,2,1,1,3,3,0,2 +28,4,4,2,1,0,1,3,0,2 +34,4,4,5,1,1,3,4,0,2 +31,3,4,4,1,1,1,3,0,2 +27,4,4,3,1,1,1,4,0,2 +27,3,3,1,1,0,3,4,0,2 +26,4,4,1,1,0,1,3,0,2 +36,3,4,7,1,1,2,2,0,2 +35,4,4,4,0,1,3,3,0,2 +38,4,4,6,1,1,3,3,0,2 +44,4,4,7,1,1,2,4,0,2 +25,4,4,1,1,1,1,4,0,2 +25,4,4,2,0,1,1,3,0,2 +41,4,4,4,1,1,1,4,0,2 +41,3,4,5,1,1,1,3,0,2 +24,4,4,1,1,0,1,4,0,2 +33,4,4,3,1,1,3,3,0,2 +29,4,4,3,1,0,3,3,0,2 +47,4,4,4,1,0,1,4,0,2 +28,4,4,4,1,1,1,3,0,2 +21,2,2,1,1,1,3,2,0,2 +32,3,3,6,1,0,3,3,0,2 +30,3,4,2,1,1,1,4,0,2 +37,4,4,5,1,1,3,4,0,2 +29,4,3,4,1,0,3,2,0,2 +38,4,4,3,1,0,1,4,0,2 +49,3,3,11,1,1,1,4,0,2 +41,3,3,5,1,1,1,4,0,2 +43,3,4,5,1,1,3,2,1,2 +47,1,1,10,1,1,3,3,0,2 +22,4,4,1,1,1,1,3,0,2 +27,4,4,1,0,0,1,4,0,2 +34,3,4,2,0,1,2,3,0,2 +42,3,3,4,1,1,2,4,0,2 +39,4,4,3,0,0,1,4,0,2 +36,4,4,3,0,1,2,4,0,2 +21,4,4,1,1,1,2,1,0,2 +36,3,4,4,0,0,2,4,0,2 +25,2,3,1,1,0,4,4,0,2 +41,4,4,4,0,0,2,4,0,2 +32,4,4,4,0,0,2,4,0,2 +41,4,4,4,0,0,2,4,0,2 +43,4,4,3,0,1,1,4,0,2 +35,4,4,3,1,1,2,3,0,2 +24,4,4,2,1,0,1,3,0,2 +43,4,4,6,1,1,1,4,0,2 +48,4,4,7,0,1,1,4,0,2 +28,4,4,2,1,1,1,3,0,2 +24,4,4,1,1,1,1,4,0,2 +42,4,4,3,1,0,1,4,0,2 +39,4,4,3,1,1,1,4,0,2 +24,4,4,1,1,1,1,4,0,2 +33,4,3,5,1,1,2,4,0,2 +29,4,4,2,1,1,3,4,0,2 +44,4,4,5,1,1,1,4,0,2 +17,4,4,1,1,1,2,4,0,2 +35,3,4,3,1,1,2,2,0,2 +32,3,4,5,1,1,1,2,0,2 +27,4,4,2,1,0,1,3,0,2 +44,4,4,4,1,1,1,4,0,2 +42,4,4,5,1,1,1,4,0,2 +40,4,4,6,1,1,1,3,0,2 +44,4,4,3,1,1,1,4,0,2 +33,4,4,5,1,1,2,4,0,2 +35,2,4,3,0,0,3,4,1,2 +31,4,2,2,0,1,1,3,0,2 +37,4,4,3,0,1,3,4,0,2 +45,4,4,3,0,1,2,4,0,2 +26,4,4,2,0,1,1,4,0,2 +40,3,4,2,0,1,3,4,0,2 +30,4,4,1,0,1,2,4,0,2 +36,3,4,3,0,0,3,4,0,2 +33,3,4,3,0,1,2,4,0,2 +41,4,4,5,0,1,2,4,0,2 +45,3,3,6,0,1,2,4,0,2 +48,4,4,6,0,1,2,4,0,2 +35,4,4,3,0,0,3,4,0,2 +37,4,4,4,0,0,2,4,0,2 +46,4,4,3,0,0,1,4,0,2 +38,4,4,4,0,1,3,4,0,2 +30,3,4,7,1,1,2,4,0,2 +42,4,3,7,1,0,1,4,0,2 +35,2,2,6,1,1,3,4,0,2 +34,4,4,6,1,1,3,4,0,2 +37,4,4,4,1,1,1,4,0,2 +38,4,4,6,1,1,1,4,0,2 +30,3,3,2,1,1,2,3,0,2 +46,4,4,4,1,1,1,4,0,2 +31,4,4,3,0,1,2,4,0,2 +41,4,4,6,0,1,1,4,0,2 +41,4,4,7,1,1,2,4,0,2 +32,4,4,2,0,0,2,4,0,2 +44,4,4,4,1,1,1,4,0,2 +39,4,4,4,0,1,1,4,0,2 +39,4,4,5,1,1,1,4,0,2 +45,4,4,4,1,1,1,4,0,2 +33,4,4,3,1,1,1,4,0,2 +39,4,4,3,0,1,1,4,0,2 +44,4,4,3,1,0,1,4,0,2 +34,3,3,6,1,1,3,4,0,2 +33,3,3,5,1,1,3,4,0,2 +34,2,2,8,1,1,2,2,0,2 +27,3,2,5,1,1,2,4,0,2 +47,1,1,8,1,1,3,4,1,2 +25,2,4,3,1,1,1,4,1,2 +24,3,4,5,1,1,1,2,1,2 +41,4,4,4,1,0,1,4,0,2 +29,3,3,4,1,1,3,4,0,3 +27,2,2,5,1,1,3,1,0,3 +40,3,4,5,1,1,1,2,0,3 +30,2,2,6,1,1,3,1,0,3 +24,4,4,1,1,0,1,4,0,3 +36,4,4,2,1,0,1,4,0,3 +17,2,2,1,1,1,3,2,0,3 +28,1,2,4,1,1,2,3,0,3 +27,2,4,1,1,1,1,4,0,3 +21,3,3,1,1,1,3,3,0,3 +30,2,2,4,1,1,2,4,0,3 +48,4,4,16,1,1,1,4,0,3 +29,2,4,3,1,0,3,3,0,3 +29,4,4,2,1,1,3,4,0,3 +20,3,4,2,1,1,1,3,0,3 +28,4,4,1,1,1,3,2,0,3 +32,2,3,5,1,0,2,3,0,3 +27,3,3,1,1,0,3,3,0,3 +25,2,3,1,1,1,3,1,0,3 +43,4,4,3,1,1,1,4,0,3 +35,4,4,2,1,1,2,4,0,3 +30,4,4,1,1,1,1,4,0,3 +29,1,4,3,1,1,4,4,1,3 +33,3,4,8,1,1,3,4,0,3 +37,3,4,5,1,0,2,4,0,3 +27,2,4,3,1,1,4,4,0,3 +33,2,4,3,1,1,2,4,0,3 +35,2,3,6,1,0,3,3,0,3 +20,3,3,2,1,1,2,4,0,3 +26,2,4,3,1,1,3,4,0,3 +24,3,3,2,1,1,3,3,0,3 +45,1,3,3,1,1,3,2,0,3 +34,3,4,4,1,1,1,3,0,3 +49,4,3,13,1,1,1,4,0,3 +33,4,4,5,1,1,1,4,0,3 +33,3,3,8,1,1,3,4,0,3 +24,3,4,3,1,1,1,4,0,3 +30,2,2,4,1,1,2,1,0,3 +27,2,4,4,1,1,2,1,0,3 +23,4,3,1,1,1,2,2,0,3 +35,4,4,4,1,1,1,4,0,3 +35,4,4,1,0,1,1,4,0,3 +40,4,4,2,0,0,1,4,0,3 +29,3,3,1,1,1,3,3,0,3 +33,3,3,3,1,1,3,3,0,3 +36,4,4,6,0,1,1,2,1,3 +36,4,4,4,0,1,1,4,0,3 +41,4,4,1,1,1,3,3,0,3 +38,3,4,3,1,1,3,2,0,3 +29,2,4,3,1,1,3,3,0,3 +19,4,4,1,1,1,1,3,0,3 +34,2,3,2,1,1,2,3,0,3 +26,2,4,3,1,1,3,4,0,3 +22,4,4,1,1,1,2,3,0,3 +42,4,4,7,1,1,2,4,0,3 +27,3,4,2,1,1,3,2,0,3 +27,4,4,2,1,1,1,4,0,3 +39,4,3,1,1,0,2,2,0,3 +21,4,4,1,1,1,3,1,0,3 +36,2,3,7,1,1,3,3,0,3 +29,1,2,5,1,0,1,2,0,3 +29,2,3,4,1,1,2,1,0,3 +22,3,2,2,1,1,2,1,0,3 +20,2,3,1,1,1,2,1,0,3 +33,3,4,4,1,0,2,4,0,3 +27,3,3,3,1,1,2,3,0,3 +25,4,4,3,1,1,1,4,0,3 +25,4,3,2,1,1,3,3,0,3 +32,2,3,4,1,1,3,2,0,3 +39,3,4,2,1,1,1,3,0,3 +31,3,3,3,1,1,2,1,1,3 +26,4,4,4,1,1,3,4,0,3 +30,3,3,7,1,1,3,4,0,3 +43,3,3,5,1,0,3,3,0,3 +34,2,4,4,1,1,3,3,0,3 +43,4,4,3,1,1,1,4,0,3 +32,4,4,3,0,1,1,4,0,3 +41,4,4,5,1,1,1,4,0,3 +32,4,4,3,0,1,1,4,0,3 +46,4,4,4,1,1,1,4,0,3 +40,3,4,3,1,0,1,4,0,3 +21,4,4,1,1,1,2,3,0,3 +32,4,4,3,1,1,1,4,0,3 +25,4,4,1,0,1,3,3,0,3 +24,4,4,1,1,1,1,3,0,3 +21,2,3,1,1,0,3,2,0,3 +22,4,4,1,1,1,3,3,0,3 +32,4,4,4,1,1,1,3,0,3 +30,4,4,2,1,0,2,2,0,3 +25,1,2,2,1,1,2,1,0,3 +31,3,3,6,0,1,3,1,0,3 +19,2,4,1,1,1,2,3,0,3 +33,2,2,7,1,0,2,3,0,3 +24,2,2,2,1,1,3,4,0,3 +19,3,3,3,1,1,2,3,0,3 +27,2,2,5,1,1,3,3,0,3 +32,4,4,2,0,0,1,4,0,3 +32,4,4,2,1,0,1,3,0,3 +29,1,3,5,1,1,3,3,0,3 +23,3,4,1,1,1,2,3,0,3 +34,2,4,7,1,1,2,3,0,3 +27,2,4,1,1,1,3,3,0,3 +37,4,4,4,1,1,3,2,0,3 +26,3,4,4,1,1,2,3,0,3 +31,2,3,7,1,1,3,3,0,3 +44,4,4,11,1,1,1,4,0,3 +35,3,4,4,1,1,1,4,0,3 +23,4,4,2,1,1,3,4,0,3 +24,2,4,2,1,1,1,4,0,3 +24,4,4,2,1,1,1,4,0,3 +39,4,4,3,1,1,2,4,0,3 +33,3,4,4,1,1,3,3,0,3 +24,2,3,2,1,1,2,1,0,3 +35,4,4,4,1,1,1,4,0,3 +35,4,4,6,1,1,1,4,0,3 +34,4,4,2,1,1,3,3,0,3 +48,3,4,3,1,1,2,3,0,3 +28,4,4,1,1,1,1,3,0,3 +26,4,4,1,1,1,3,3,0,3 +43,3,3,8,1,1,1,3,0,3 +28,2,2,5,1,0,2,4,0,3 +22,3,4,3,1,1,2,3,0,3 +26,3,3,3,1,1,3,3,0,3 +30,3,4,4,1,1,3,3,0,3 +38,4,4,5,1,1,2,3,0,3 +44,4,4,8,1,1,2,4,0,3 +25,4,4,1,0,1,1,4,0,3 +34,4,4,3,1,1,1,4,0,3 +26,4,4,3,1,1,3,4,0,3 +20,4,4,2,1,1,1,3,0,3 +39,4,4,2,0,1,1,3,0,3 +27,3,4,4,1,1,3,3,0,3 +32,4,4,2,1,0,3,4,0,3 +48,3,4,8,1,0,1,4,0,3 +39,4,3,4,1,1,3,4,0,3 +37,4,4,3,1,0,1,4,0,3 +33,4,4,2,0,0,1,3,0,3 +32,4,4,3,1,0,1,4,0,3 +25,4,4,2,1,1,1,3,0,3 +37,4,4,3,1,1,2,4,0,3 +40,4,4,4,1,1,1,4,0,3 +38,4,4,6,1,1,2,4,0,3 +36,4,4,4,1,1,1,4,0,3 +27,3,3,3,1,1,2,2,0,3 +22,1,3,2,1,1,3,1,0,3 +22,2,2,2,1,1,2,3,0,3 +30,3,4,3,1,1,2,3,0,3 +20,2,2,2,1,1,3,2,1,3 +34,4,4,2,1,1,1,4,0,3 +31,3,4,5,1,1,3,1,0,3 +30,3,3,3,1,0,3,2,0,3 +44,3,4,5,1,0,1,4,0,3 +24,3,3,3,1,0,2,1,0,3 +19,4,4,2,1,1,1,4,0,3 +28,4,4,5,1,1,2,4,0,3 +27,4,4,1,0,0,1,3,0,3 +29,3,3,3,1,1,2,1,0,3 +37,4,4,3,1,1,2,4,0,3 +32,2,2,8,1,1,2,1,0,3 +21,2,2,1,1,1,4,3,0,3 +33,2,3,7,1,0,3,1,0,3 +23,2,3,2,1,1,3,1,0,3 +36,2,2,5,1,1,3,4,0,3 +33,2,4,3,1,1,3,4,0,3 +35,4,4,2,1,0,1,4,0,3 +25,2,3,4,1,1,3,3,0,3 +29,2,4,4,1,0,2,4,0,3 +23,3,3,1,1,1,2,2,0,3 +19,3,3,1,1,1,2,1,0,3 +35,1,2,6,1,1,2,2,1,3 +25,3,3,3,1,1,3,2,0,3 +40,3,4,6,1,1,1,3,0,3 +17,3,4,1,1,1,2,1,0,3 +25,2,4,3,1,1,3,3,0,3 +24,3,3,1,1,1,3,2,0,3 +23,4,4,1,1,1,3,3,0,3 +25,2,4,3,1,1,3,2,0,3 +30,1,3,4,1,1,3,4,0,3 +31,1,1,3,1,1,4,2,1,3 +26,3,4,2,1,1,3,3,0,3 +28,3,4,2,1,1,2,3,0,3 +30,3,3,3,1,1,3,1,0,3 +29,3,4,5,1,1,1,2,0,3 +24,1,3,4,1,1,3,3,0,3 +26,3,4,2,1,1,1,3,0,3 +25,3,3,3,1,1,3,3,0,3 +35,4,4,3,1,1,1,4,0,3 +22,4,4,1,1,0,2,4,0,3 +19,4,4,1,1,1,2,4,0,3 +29,2,3,5,1,1,2,4,0,3 +21,3,4,2,1,1,3,3,0,3 +21,3,3,1,1,1,3,3,0,3 +41,1,4,9,1,1,2,1,0,3 +37,2,4,3,1,1,2,3,0,3 +27,4,4,3,1,1,3,4,1,3 +26,2,3,2,1,1,2,3,0,3 +37,4,4,6,1,0,3,4,0,3 +25,3,3,3,1,1,2,1,0,3 +32,4,3,3,1,1,2,4,0,3 +34,1,4,4,1,1,3,4,1,3 +23,4,4,2,1,0,2,4,0,3 +28,3,4,2,1,1,3,4,0,3 +24,3,4,1,1,1,3,4,0,3 +32,2,3,4,1,0,2,2,0,3 +22,1,2,3,1,1,3,1,1,3 +25,3,3,1,1,1,2,1,0,3 +25,2,3,2,1,1,2,4,0,3 +38,2,3,3,1,1,2,4,0,3 +33,2,2,2,1,1,3,4,0,3 +41,4,4,8,1,1,1,4,0,3 +29,3,3,4,1,1,3,4,0,3 +22,3,4,1,1,0,1,4,0,3 +29,3,3,6,1,1,3,3,0,3 +26,3,4,3,1,1,3,3,0,3 +28,2,3,3,1,1,3,4,0,3 +27,4,4,1,0,1,3,4,0,3 +29,2,4,2,1,1,3,2,0,3 +24,3,4,2,1,1,3,3,0,3 +29,3,4,3,1,1,3,2,0,3 +38,3,4,7,1,1,3,2,0,3 +23,3,2,2,1,0,3,2,0,3 +20,4,3,1,1,1,3,4,0,3 +32,1,4,4,1,1,3,2,0,3 +25,2,3,2,1,1,3,1,0,3 +26,1,2,3,1,0,3,2,0,3 +23,1,2,3,1,1,3,2,1,3 +23,4,4,3,1,1,3,3,0,3 +28,3,4,5,1,1,2,4,0,3 +22,4,4,3,1,1,3,2,0,3 +28,2,4,4,1,1,3,4,0,3 +26,3,3,3,1,1,3,3,0,3 +26,2,3,2,1,1,3,3,0,3 +24,4,3,3,1,1,3,3,0,3 +29,4,4,3,1,0,1,4,0,3 +29,3,4,4,1,0,1,3,0,3 +42,4,4,5,1,1,1,4,0,3 +27,4,4,2,1,1,3,4,0,3 +35,3,4,5,1,1,1,4,0,3 +21,3,4,1,1,1,3,2,0,3 +21,1,3,1,1,1,3,3,1,3 +22,3,4,1,1,1,3,3,0,3 +28,3,3,4,1,1,3,3,0,3 +22,3,4,1,0,0,1,4,0,3 +23,3,2,1,1,1,3,3,0,3 +28,3,3,2,0,1,2,3,0,3 +30,4,4,3,0,0,2,4,0,3 +25,3,4,3,1,0,3,4,0,3 +33,4,4,4,1,1,2,3,0,3 +33,3,4,6,1,1,3,2,0,3 +30,4,4,2,0,0,3,3,0,3 +22,2,4,1,1,1,3,3,0,3 +38,3,3,2,0,1,3,4,0,3 +35,4,3,2,0,1,3,4,0,3 +34,2,2,4,0,1,3,2,0,3 +39,4,4,2,0,1,3,4,0,3 +47,4,3,4,1,1,1,3,0,3 +38,4,4,2,1,0,1,4,0,3 +43,4,4,5,1,1,1,4,0,3 +21,3,3,1,1,1,2,2,0,3 +30,3,4,2,1,1,1,2,0,3 +27,3,4,2,1,1,1,3,0,3 +22,4,4,1,0,1,3,1,0,3 +25,1,2,3,1,1,2,3,0,3 +22,1,3,2,1,1,3,2,0,3 +28,4,4,4,1,1,2,3,0,3 +25,3,3,2,1,1,2,3,0,3 +26,4,4,2,1,0,3,3,0,3 +31,3,3,3,1,1,3,1,1,3 +32,1,3,4,1,1,3,3,0,3 +35,2,4,6,1,1,3,4,0,3 +24,2,3,5,1,1,3,3,0,3 +28,4,4,2,1,1,1,4,0,3 +36,4,4,3,0,0,2,4,0,3 +47,4,4,5,1,1,1,3,0,3 +26,4,4,2,1,0,1,4,0,3 +22,3,4,2,0,1,3,4,0,3 +30,4,4,2,1,1,4,3,0,3 +41,4,4,5,1,1,1,4,0,3 +19,4,4,1,1,1,3,2,0,3 +45,3,4,2,0,1,3,3,0,3 +35,1,2,5,1,1,2,2,1,3 +21,2,2,1,1,1,3,2,0,3 +32,1,2,6,0,1,2,3,0,3 +32,3,4,5,0,1,3,4,0,3 +46,2,3,6,0,0,3,4,0,3 +41,1,2,6,1,1,3,3,0,3 +36,4,4,3,0,1,3,4,0,3 +37,3,4,3,0,1,3,4,0,3 +37,3,4,4,0,1,2,4,0,3 +35,4,4,3,0,1,3,4,0,3 +29,2,3,4,1,1,3,2,0,3 +28,4,4,2,0,1,2,4,0,3 +46,4,4,4,0,1,1,4,0,3 +34,4,3,3,0,1,2,4,0,3 +43,3,3,2,0,0,3,4,0,3 +28,4,4,3,0,0,3,4,0,3 +28,3,3,3,1,0,2,4,0,3 +34,4,4,6,1,1,2,2,0,3 +27,2,4,4,1,1,3,4,0,3 +36,1,3,6,1,1,3,2,0,3 +17,2,2,0,1,0,3,2,0,3 +21,3,3,1,1,1,3,2,0,3 +25,2,2,1,1,1,3,1,0,3 +23,2,3,2,1,0,3,2,0,3 +25,2,2,1,1,1,3,3,0,3 +35,1,2,4,1,0,4,4,0,3 +32,3,4,4,1,1,3,2,0,3 +37,2,3,5,1,1,4,2,0,3 +31,3,4,4,1,0,3,3,0,3 +25,3,4,3,1,1,3,4,0,3 +26,3,4,4,1,1,2,4,0,3 +39,1,2,2,1,0,2,4,0,3 +21,3,3,2,1,1,2,3,0,3 +35,3,2,9,1,0,3,4,0,3 +20,2,2,1,1,1,3,2,0,3 +40,2,3,5,1,1,3,3,0,3 +41,3,3,7,0,1,2,4,0,3 +36,2,4,8,1,1,1,4,0,3 +40,2,4,3,0,1,3,4,0,3 +32,4,4,3,1,0,1,4,0,3 +24,3,4,4,1,1,2,3,0,3 +20,3,4,1,0,1,2,4,0,3 +32,4,4,4,1,0,1,4,0,3 +22,2,2,3,1,0,3,3,0,3 +30,3,3,4,1,0,2,2,0,3 +31,4,4,3,1,0,1,4,0,3 +35,3,4,5,1,1,1,4,0,3 +38,4,4,2,1,1,3,4,0,3 +31,4,4,1,1,1,1,4,0,3 +25,4,4,2,1,1,2,4,0,3 +30,4,4,2,1,1,1,4,0,3 +22,1,2,2,1,1,3,4,0,3 +31,4,4,3,1,1,1,4,0,3 +29,4,4,3,1,1,3,4,0,3 +29,2,3,3,1,0,3,2,0,3 +38,4,4,3,0,0,3,3,1,3 +22,3,4,2,1,1,2,4,0,3 +45,4,4,3,1,1,1,4,0,3 +26,3,4,3,1,1,3,2,0,3 +37,4,4,3,0,1,3,4,0,3 +36,4,4,3,0,1,2,4,0,3 +33,4,4,2,1,1,2,4,0,3 +32,4,4,3,1,0,1,4,0,3 +35,4,4,4,0,0,1,4,0,3 +25,4,4,1,1,1,1,4,0,3 +31,4,4,3,1,1,4,4,0,3 +25,2,3,5,1,1,2,3,0,3 +26,3,2,5,1,1,2,2,0,3 +23,2,3,3,1,1,2,3,0,3 +28,3,4,3,1,1,1,4,0,3 +34,3,4,3,0,0,3,4,0,3 +46,3,4,9,1,1,3,4,0,3 +25,4,4,1,1,1,1,4,0,3 +21,2,1,3,1,0,3,1,1,3 +25,2,3,4,1,0,3,2,0,3 +28,2,3,6,1,1,2,4,0,3 +36,2,4,11,1,1,1,4,1,3 +46,1,2,9,1,0,3,4,1,3 +35,2,2,6,1,0,3,1,1,1 +41,3,2,11,1,1,2,3,0,1 +27,2,4,1,1,0,1,1,0,1 +25,4,4,0,1,0,3,4,0,1 +23,2,2,2,1,1,3,3,0,1 +48,1,1,1,1,0,3,3,1,1 +23,4,4,0,1,0,1,4,0,1 +31,3,4,2,1,1,3,3,0,1 +49,4,4,1,1,1,3,4,0,1 +42,2,3,2,1,1,1,4,0,1 +26,2,4,3,1,1,3,4,0,1 +21,2,2,1,1,1,3,2,0,1 +29,3,4,4,1,1,1,4,0,1 +38,1,2,12,1,1,2,3,0,1 +25,4,4,2,1,1,3,2,0,1 +22,4,4,0,1,1,3,2,0,1 +20,3,4,0,1,1,3,3,0,1 +35,4,4,1,1,1,1,3,0,1 +32,2,3,1,1,1,3,3,0,1 +38,2,4,3,1,1,1,3,0,1 +27,1,2,4,1,1,1,4,1,1 +41,2,4,1,1,1,2,3,0,1 +28,3,4,0,1,0,1,3,0,1 +23,4,4,1,1,0,3,3,0,1 +27,4,4,1,0,0,2,4,0,1 +33,4,4,2,0,1,1,4,0,1 +39,2,2,5,1,0,3,2,1,1 +41,2,2,5,0,1,2,2,0,1 +40,2,2,8,1,1,1,3,0,1 +43,3,4,10,1,1,3,3,0,1 +35,3,4,6,1,0,2,2,0,1 +32,3,4,0,1,1,3,4,0,1 +49,2,3,4,1,1,2,3,0,1 +47,2,2,4,1,1,3,1,0,1 +42,4,4,1,1,1,2,4,0,1 +48,4,4,4,0,1,1,4,0,1 +42,4,4,3,1,1,1,4,0,1 +37,4,4,2,1,1,1,4,0,1 +23,3,3,1,1,0,2,4,0,1 +24,3,4,0,1,0,2,3,0,1 +43,2,3,3,0,1,2,3,0,1 +24,3,3,2,1,0,3,2,0,1 +29,2,4,0,1,0,3,4,0,1 +45,1,1,0,0,0,2,1,0,1 +31,4,4,3,1,0,2,3,0,1 +44,1,4,9,1,1,1,2,0,1 +28,2,3,1,1,1,3,1,0,1 +28,3,4,1,0,1,2,2,0,1 +33,1,2,0,1,0,3,3,0,1 +43,4,4,6,1,1,1,3,0,1 +34,3,4,4,1,1,1,3,0,1 +36,1,4,2,1,1,2,2,0,1 +22,4,4,0,1,0,2,4,0,1 +35,2,3,8,1,1,3,3,1,1 +25,4,4,1,1,1,1,4,0,1 +43,1,2,8,1,1,2,2,1,1 +46,1,4,8,1,1,1,4,1,1 +27,3,4,2,1,1,1,3,0,1 +30,1,1,7,1,1,2,3,0,1 +25,4,4,1,1,1,3,4,0,1 +42,4,4,1,1,0,1,4,0,1 +30,1,3,1,1,1,3,4,0,1 +27,3,3,4,1,1,2,4,0,1 +45,2,3,11,1,1,2,3,0,1 +29,3,4,2,1,1,1,3,0,1 +36,1,4,1,1,1,3,3,0,1 +49,1,2,8,1,1,3,1,1,1 +47,1,4,7,1,1,2,3,0,1 +30,4,4,4,1,1,2,4,0,1 +24,2,3,2,1,1,3,1,0,1 +48,1,1,6,1,1,2,3,1,1 +47,1,2,2,1,1,2,3,1,1 +24,3,4,1,1,0,3,3,0,1 +48,3,4,5,1,1,2,3,0,1 +38,1,2,8,1,1,3,3,1,1 +45,3,4,0,1,1,1,3,0,1 +43,4,4,3,0,1,2,4,0,1 +33,1,1,5,1,1,3,1,1,1 +37,3,4,7,1,1,3,4,0,1 +36,2,1,6,1,1,2,3,0,1 +25,3,4,3,1,1,1,1,0,1 +49,4,4,6,0,0,1,4,0,1 +30,4,4,1,1,0,1,4,0,1 +35,4,4,5,1,1,1,4,0,1 +28,2,4,0,1,1,3,1,0,1 +30,4,2,0,1,0,3,3,0,1 +27,4,4,2,1,0,1,4,0,1 +45,2,4,8,1,1,2,3,0,1 +46,3,1,5,1,1,3,2,0,1 +28,1,4,0,1,0,4,1,0,1 +21,2,4,0,1,1,2,2,0,1 +20,3,4,2,1,1,3,3,0,1 +28,3,3,5,1,1,3,3,0,1 +25,3,3,1,1,1,2,2,0,1 +38,1,2,6,1,1,2,1,1,1 +20,3,4,1,1,1,3,2,0,1 +24,3,4,2,1,1,2,4,0,1 +39,3,1,1,1,1,2,3,0,1 +25,1,3,1,1,1,4,2,0,1 +23,3,4,4,1,1,3,1,1,1 +24,4,4,1,1,0,1,3,0,1 +26,4,4,3,1,1,1,4,0,1 +20,3,4,2,1,1,3,2,0,1 +31,3,3,1,1,0,3,4,0,1 +22,4,4,0,1,1,2,1,1,1 +36,2,3,6,1,1,2,3,0,1 +45,3,4,0,1,1,1,3,0,1 +47,2,2,7,1,1,2,2,0,1 +30,2,3,1,1,1,2,1,0,1 +35,4,4,3,1,1,1,4,0,1 +34,2,2,9,1,0,2,3,0,1 +28,4,4,1,1,0,1,3,0,1 +18,3,4,1,1,1,3,3,0,1 +25,2,3,2,1,1,3,2,0,1 +27,3,4,0,1,0,3,3,0,1 +21,4,4,1,1,1,3,4,0,1 +19,4,4,1,1,1,1,3,0,1 +26,4,4,0,1,1,3,4,0,1 +28,4,4,5,1,1,1,4,0,1 +38,1,3,4,1,0,2,2,0,1 +39,2,3,10,1,1,3,4,0,1 +45,2,3,6,1,1,3,2,1,1 +30,3,4,4,0,1,2,3,0,1 +47,2,4,2,1,1,3,2,1,1 +38,2,2,6,1,1,3,4,0,1 +42,3,3,5,1,1,2,2,0,1 +31,4,4,2,1,1,1,2,0,1 +44,1,2,2,1,1,3,1,0,1 +42,4,4,3,1,0,1,4,0,1 +28,4,4,2,1,1,1,3,0,1 +34,2,4,6,1,0,3,4,0,1 +45,1,3,7,1,0,1,3,0,1 +43,4,4,5,1,1,1,3,0,1 +28,3,4,0,1,0,3,4,0,1 +27,3,4,0,1,1,3,3,0,1 +26,2,3,1,1,0,4,4,0,1 +26,3,4,1,0,1,2,2,0,1 +37,3,4,4,1,0,2,4,0,1 +48,3,4,11,1,1,1,4,0,1 +47,2,3,12,1,1,3,1,0,1 +22,2,4,1,1,1,3,4,0,1 +48,4,4,1,0,1,1,4,0,1 +42,1,3,12,1,1,2,4,0,1 +32,4,4,3,1,1,1,4,0,1 +33,4,4,3,0,0,1,3,0,1 +45,4,4,1,1,0,1,4,0,1 +26,2,3,1,1,0,3,4,1,1 +45,4,4,1,1,1,1,4,0,1 +25,3,3,1,1,0,2,4,0,1 +36,4,3,3,1,0,3,2,0,1 +31,3,3,2,1,0,3,3,0,1 +22,3,4,2,1,1,3,3,0,1 +46,4,4,10,1,1,1,3,0,1 +26,2,3,3,1,1,3,4,0,1 +25,3,2,1,1,0,3,3,0,1 +20,4,4,0,0,0,2,4,0,1 +31,3,4,3,0,0,2,4,0,1 +36,3,3,3,0,0,2,3,1,1 +47,3,2,5,1,1,4,4,0,1 +43,3,4,2,0,1,3,4,0,1 +20,3,4,0,1,1,3,3,0,1 +24,4,4,4,1,1,1,4,0,1 +33,2,2,3,0,0,2,4,0,1 +41,3,4,3,0,1,2,4,0,1 +48,3,4,4,0,1,2,4,0,1 +32,4,4,1,0,1,1,4,0,1 +38,3,3,4,0,0,3,2,1,1 +48,3,4,8,1,1,3,3,0,1 +34,1,3,4,1,0,3,4,0,1 +31,1,1,6,1,1,3,2,0,1 +46,1,2,7,1,1,3,1,1,1 +45,2,2,7,1,1,3,1,1,1 +34,3,4,4,0,1,3,1,1,1 +16,2,4,1,1,1,3,4,0,1 +18,3,2,1,1,1,3,1,0,1 +22,2,2,1,1,1,3,1,1,1 +32,2,2,2,1,1,2,4,0,1 +41,1,3,3,1,0,2,4,0,1 +22,3,3,2,1,1,2,4,0,1 +23,3,2,0,1,1,2,2,0,1 +22,3,4,0,1,0,3,4,0,1 +40,2,2,6,1,1,2,2,1,1 +29,3,4,1,0,1,2,3,0,1 +42,4,4,0,0,0,3,4,0,1 +29,3,4,2,1,1,3,4,0,1 +31,3,2,1,1,1,3,4,0,1 +28,4,4,3,0,0,1,4,0,1 +21,2,3,0,1,1,3,4,0,1 +26,4,4,2,1,1,1,3,0,1 +37,3,4,0,1,1,3,4,0,1 +24,3,4,2,1,1,2,2,0,1 +25,4,4,0,0,1,2,4,0,1 +22,2,1,0,1,1,2,2,0,1 +45,4,4,1,1,1,1,4,0,1 +47,4,4,1,1,1,1,4,0,1 +28,4,4,5,1,0,3,4,0,1 +24,2,2,4,1,1,3,4,0,1 +34,3,3,6,1,1,2,3,0,1 +47,2,3,5,1,1,1,1,0,1 +31,3,4,2,1,1,1,4,0,1 +26,1,3,1,1,1,3,1,0,1 +25,4,4,3,1,1,1,2,0,1 +47,1,1,4,1,1,2,2,0,1 +48,1,1,8,1,1,1,1,1,1 +38,2,2,10,1,1,3,2,0,1 +32,1,3,4,1,1,2,2,1,1 +47,1,2,2,1,1,2,4,0,1 +27,4,4,3,1,1,1,4,0,1 +40,1,1,0,1,1,3,2,0,1 +46,4,4,6,1,1,1,4,0,1 +33,3,3,3,1,1,3,4,0,1 +49,1,2,5,1,0,2,2,1,1 +21,2,4,3,1,1,3,2,0,1 +31,4,4,0,0,0,2,4,0,1 +42,4,4,4,1,1,3,4,0,2 +35,4,4,3,1,1,1,4,0,2 +31,4,4,4,1,1,3,3,0,2 +42,4,4,6,1,1,1,3,0,2 +49,3,4,4,1,1,3,4,0,2 +21,4,4,1,1,1,1,3,0,2 +28,3,4,3,1,1,1,4,0,2 +22,4,4,1,1,1,1,3,0,2 +36,4,4,3,1,0,1,4,0,2 +45,2,2,9,1,1,3,3,0,2 +40,4,4,4,1,1,1,4,0,2 +30,2,2,7,1,1,3,4,0,2 +35,4,4,5,1,0,1,4,0,2 +26,2,4,2,1,1,1,4,0,2 +45,4,4,3,1,1,1,4,0,2 +27,3,3,2,1,1,3,4,0,2 +32,4,4,4,1,1,1,2,0,2 +27,4,4,3,1,1,1,3,0,2 +28,4,4,2,0,1,1,4,0,2 +30,4,4,3,1,1,2,1,0,2 +34,4,4,4,1,1,2,4,0,2 +26,3,4,3,1,1,2,3,0,2 +44,1,4,2,1,1,1,3,0,2 +29,4,4,3,1,1,1,4,0,2 +26,3,4,2,1,1,3,3,0,2 +41,4,4,8,1,1,1,4,0,2 +32,3,3,4,1,0,3,3,0,2 +23,3,1,1,1,0,3,3,0,2 +38,2,3,4,1,1,1,2,0,2 +22,4,4,1,1,0,2,2,0,2 +34,3,4,3,0,1,1,4,0,2 +38,3,4,7,0,1,2,3,0,2 +21,4,4,1,1,1,1,4,0,2 +43,1,3,4,1,1,1,4,0,2 +44,3,3,9,1,1,1,1,0,2 +24,4,4,1,1,1,2,4,0,2 +27,4,4,1,1,0,3,3,0,2 +34,3,4,4,1,1,3,3,0,2 +29,3,4,2,1,1,1,4,0,2 +26,4,4,1,1,0,2,3,0,2 +37,4,4,7,1,1,1,4,0,2 +21,3,4,3,1,1,1,4,0,2 +47,4,4,4,0,1,2,4,0,2 +41,4,4,3,1,0,2,3,0,2 +29,3,4,3,1,1,1,3,0,2 +25,4,4,1,1,0,1,4,0,2 +33,4,4,2,0,0,1,3,0,2 +23,4,4,1,0,1,1,4,0,2 +32,4,4,4,1,1,2,3,0,2 +45,4,4,3,1,1,1,4,0,2 +37,4,4,4,1,1,1,4,0,2 +35,2,4,4,0,1,3,2,0,2 +37,4,4,3,1,1,3,4,0,2 +35,4,4,5,1,1,2,4,0,2 +23,4,4,1,1,1,3,3,0,2 +38,2,2,8,1,1,2,2,0,2 +41,4,4,4,0,0,2,4,0,2 +35,1,2,9,1,0,3,1,0,2 +29,4,4,3,1,1,3,2,0,2 +26,3,3,2,1,1,1,3,0,2 +22,3,3,2,1,1,3,3,0,2 +30,2,3,5,1,1,3,1,1,2 +36,4,4,3,1,1,3,3,0,2 +26,3,4,3,1,1,1,4,0,2 +32,4,4,2,1,0,3,4,0,2 +39,4,4,3,0,1,2,4,0,2 +29,3,4,3,1,1,1,3,0,2 +40,4,4,5,1,1,1,3,0,2 +43,3,3,6,1,0,3,2,0,2 +44,3,4,6,1,1,3,2,0,2 +24,4,4,2,1,1,2,4,0,2 +42,4,4,3,1,0,1,4,0,2 +33,3,4,2,1,1,1,4,0,2 +24,4,4,1,0,1,2,3,0,2 +28,3,4,2,0,1,2,3,0,2 +32,2,1,5,1,0,2,2,0,2 +26,2,2,4,1,1,3,4,0,2 +27,4,4,1,1,1,1,4,0,2 +41,3,4,3,0,1,2,4,0,2 +28,3,4,2,0,1,3,3,0,2 +23,4,4,1,1,0,3,4,0,2 +31,2,2,4,1,1,4,3,1,2 +33,4,4,3,0,1,2,4,0,2 +35,4,4,5,1,1,1,4,0,2 +39,3,4,5,1,1,3,4,0,2 +27,4,4,2,1,1,1,3,0,2 +48,4,4,3,0,1,1,4,0,2 +25,3,1,2,1,0,3,4,0,2 +38,4,4,4,1,1,2,4,0,2 +48,4,4,7,1,0,1,4,0,2 +34,4,4,4,1,0,3,3,0,2 +36,3,3,4,1,1,1,3,0,2 +34,4,4,3,1,0,3,4,0,2 +42,4,4,4,0,0,2,4,0,2 +45,3,3,5,0,1,1,3,0,2 +37,4,4,4,0,1,2,4,0,2 +38,4,4,3,0,0,2,4,0,2 +42,3,3,4,0,1,3,4,0,2 +42,2,4,5,0,1,2,3,0,2 +41,4,4,6,1,1,1,4,0,2 +43,4,4,5,0,1,1,4,0,2 +34,4,4,2,0,0,3,2,0,2 +44,2,3,1,0,1,2,4,0,2 +41,4,4,4,0,1,1,4,0,2 +36,4,4,3,1,1,1,4,0,2 +34,1,2,5,1,1,3,4,0,2 +32,3,2,5,1,1,3,3,0,3 +34,3,3,6,1,1,1,4,0,3 +24,4,4,2,1,0,3,3,0,3 +21,3,3,1,0,1,3,2,0,3 +38,4,3,9,1,1,3,3,0,3 +39,4,4,3,1,0,1,4,0,3 +35,2,2,6,1,0,2,2,1,3 +30,2,3,7,1,1,3,3,0,3 +17,3,4,1,1,1,3,3,0,3 +32,2,4,3,1,1,3,4,0,3 +43,4,4,5,0,1,1,4,0,3 +31,2,2,4,1,1,3,3,0,3 +24,1,2,3,1,1,3,2,0,3 +26,2,4,3,1,1,1,3,0,3 +35,3,4,4,1,0,1,4,0,3 +46,3,4,4,1,0,3,4,0,3 +29,2,3,3,1,1,3,3,0,3 +21,3,4,1,1,0,3,4,0,3 +31,4,4,5,1,0,1,4,0,3 +24,4,4,2,1,1,3,3,0,3 +24,2,4,3,1,1,3,3,0,3 +36,4,4,4,1,1,1,3,0,3 +37,2,3,9,1,1,3,3,0,3 +37,3,3,6,1,1,3,3,0,3 +37,3,3,11,1,1,1,2,0,3 +44,4,4,4,0,1,2,3,0,3 +35,2,3,6,1,1,2,3,0,3 +25,4,4,3,1,1,3,3,0,3 +21,4,4,2,1,1,2,3,0,3 +24,1,2,3,1,1,3,2,0,3 +16,2,3,1,1,1,3,1,0,3 +21,3,2,1,1,1,3,1,1,3 +25,2,4,2,1,1,2,4,0,3 +26,4,4,1,1,0,2,4,0,3 +43,4,4,2,1,1,1,4,0,3 +24,4,4,1,1,1,1,4,0,3 +30,3,4,3,0,1,3,3,0,3 +30,2,3,3,1,1,3,3,0,3 +26,4,4,2,1,1,3,3,0,3 +29,2,4,4,1,1,3,2,0,3 +24,3,4,4,1,1,3,1,0,3 +29,3,3,2,1,1,3,2,0,3 +29,1,2,1,1,1,3,3,0,3 +22,4,4,1,1,1,2,4,0,3 +30,3,3,4,1,0,3,2,0,3 +24,4,4,1,1,1,2,3,0,3 +25,4,4,2,1,1,1,2,0,3 +44,4,4,5,1,0,1,4,0,3 +28,2,3,3,1,1,2,4,0,3 +22,3,3,1,1,1,3,2,0,3 +30,2,2,6,1,0,2,3,0,3 +25,3,4,3,1,1,2,4,0,3 +25,4,4,2,1,1,3,3,0,3 +33,4,4,2,1,0,1,4,0,3 +33,3,3,4,1,0,2,3,0,3 +26,3,3,2,1,0,2,2,0,3 +29,3,4,5,1,1,3,3,0,3 +32,3,3,4,1,1,2,1,0,3 +30,4,4,1,1,0,3,2,0,3 +37,4,4,1,0,1,1,4,0,3 +31,1,2,4,1,1,3,3,0,3 +36,1,2,4,1,1,3,1,1,3 +36,2,2,7,1,0,2,2,1,3 +26,3,4,2,1,0,2,4,0,3 +36,4,4,3,1,1,1,4,0,3 +28,4,4,1,1,1,2,4,0,3 +33,4,4,5,1,1,3,3,0,3 +25,3,4,2,1,0,2,4,0,3 +31,4,4,2,0,1,1,4,0,3 +38,4,4,4,1,0,1,4,0,3 +35,1,2,5,1,1,3,3,0,3 +33,4,4,3,0,0,2,4,0,3 +30,2,2,2,1,1,3,2,0,3 +20,3,3,2,1,1,2,2,1,3 +18,3,3,1,1,1,2,2,0,3 +30,3,2,3,1,1,3,3,0,3 +35,2,3,6,1,0,3,2,0,3 +22,3,3,2,1,1,3,1,0,3 +28,2,4,7,1,1,3,3,0,3 +21,3,3,1,1,1,3,1,0,3 +27,3,4,4,1,1,3,4,0,3 +30,3,4,3,1,1,3,3,0,3 +25,3,4,2,1,1,3,3,0,3 +26,4,4,3,1,1,3,2,0,3 +37,2,4,5,1,1,1,4,0,3 +47,1,3,9,1,0,3,3,0,3 +36,1,1,5,1,1,3,3,0,3 +36,2,3,6,1,1,2,4,0,3 +36,4,4,3,1,1,2,4,0,3 +28,4,4,4,1,1,3,4,0,3 +21,4,4,2,1,1,3,3,0,3 +25,4,4,2,1,1,2,3,0,3 +30,3,4,3,1,1,3,4,0,3 +22,3,3,3,1,1,3,3,0,3 +23,4,3,2,1,0,2,3,0,3 +36,3,4,9,1,1,3,4,0,3 +28,4,3,2,1,1,3,2,0,3 +46,2,2,5,1,1,2,4,0,3 +47,4,4,5,1,1,3,2,0,3 +41,1,4,5,1,0,2,4,0,3 +44,4,4,5,1,1,3,4,0,3 +34,2,3,3,0,1,3,2,1,3 +37,2,3,6,1,1,3,4,0,3 +34,4,4,2,0,1,1,4,0,3 +22,2,4,2,1,1,3,2,0,3 +33,2,3,4,1,1,2,3,0,3 +21,3,4,3,1,1,3,1,0,3 +31,4,4,4,1,0,3,4,0,3 +41,4,3,5,1,0,3,4,0,3 +30,4,3,4,1,1,3,3,0,3 +32,2,4,5,1,1,3,4,0,3 +27,4,4,2,0,0,2,4,0,3 +26,4,4,1,0,0,2,4,0,3 +42,4,4,5,0,1,2,4,0,3 +24,4,4,1,1,1,1,3,0,3 +37,4,4,6,1,1,1,4,0,3 +30,2,3,5,1,0,3,4,0,3 +27,4,4,1,1,1,1,4,0,3 +34,2,4,6,1,1,3,3,0,3 +32,3,4,5,0,1,1,4,0,3 +28,4,4,1,0,0,2,4,0,3 +29,3,3,1,0,0,2,4,0,3 +42,4,4,3,0,1,2,4,0,3 +21,2,2,0,0,1,4,4,0,3 +22,4,4,2,1,1,3,4,0,3 +36,4,4,3,0,1,3,4,0,3 +45,2,3,3,0,0,2,4,0,3 +34,4,4,2,0,0,2,4,0,3 +35,2,3,5,1,1,3,3,0,3 +19,4,4,1,1,1,2,3,0,3 +27,4,4,4,1,1,3,3,0,3 +37,2,4,4,1,0,2,3,0,3 +26,3,2,4,1,1,3,4,0,3 +28,1,3,5,1,1,3,4,0,3 +31,3,4,5,1,1,1,4,0,3 +32,2,2,6,1,1,2,3,0,3 +40,2,2,4,0,0,2,4,0,3 +27,3,3,5,1,1,2,4,0,3 +27,2,2,3,1,1,2,2,1,3 +37,4,4,5,0,0,2,4,0,3 +22,4,3,1,1,1,2,2,0,3 +27,4,4,4,1,1,1,2,1,3 +21,4,4,1,0,1,2,4,0,3 +30,1,3,2,1,1,3,4,0,3 +23,2,2,1,1,1,2,4,0,3 +25,2,4,3,1,1,1,3,0,3 +42,2,4,6,1,1,2,4,0,3 +29,4,4,3,1,1,1,4,0,3 +33,4,4,2,1,0,2,4,0,3 +33,4,4,3,1,1,1,4,0,3 +39,3,3,8,1,0,1,4,0,3 +33,3,3,4,1,0,2,2,0,3 +17,3,3,1,1,1,2,4,0,3 diff --git a/clustering_datasets/glass.csv b/clustering_datasets/glass.csv new file mode 100644 index 0000000..ea2849b --- /dev/null +++ b/clustering_datasets/glass.csv @@ -0,0 +1,215 @@ +RI,Na,Mg,Al,Si,K,Ca,Ba,Fe,Type +1.52101,13.64,4.49,1.1,71.78,0.06,8.75,0,0,1 +1.51761,13.89,3.6,1.36,72.73,0.48,7.83,0,0,1 +1.51618,13.53,3.55,1.54,72.99,0.39,7.78,0,0,1 +1.51766,13.21,3.69,1.29,72.61,0.57,8.22,0,0,1 +1.51742,13.27,3.62,1.24,73.08,0.55,8.07,0,0,1 +1.51596,12.79,3.61,1.62,72.97,0.64,8.07,0,0.26,1 +1.51743,13.3,3.6,1.14,73.09,0.58,8.17,0,0,1 +1.51756,13.15,3.61,1.05,73.24,0.57,8.24,0,0,1 +1.51918,14.04,3.58,1.37,72.08,0.56,8.3,0,0,1 +1.51755,13,3.6,1.36,72.99,0.57,8.4,0,0.11,1 +1.51571,12.72,3.46,1.56,73.2,0.67,8.09,0,0.24,1 +1.51763,12.8,3.66,1.27,73.01,0.6,8.56,0,0,1 +1.51589,12.88,3.43,1.4,73.28,0.69,8.05,0,0.24,1 +1.51748,12.86,3.56,1.27,73.21,0.54,8.38,0,0.17,1 +1.51763,12.61,3.59,1.31,73.29,0.58,8.5,0,0,1 +1.51761,12.81,3.54,1.23,73.24,0.58,8.39,0,0,1 +1.51784,12.68,3.67,1.16,73.11,0.61,8.7,0,0,1 +1.52196,14.36,3.85,0.89,71.36,0.15,9.15,0,0,1 +1.51911,13.9,3.73,1.18,72.12,0.06,8.89,0,0,1 +1.51735,13.02,3.54,1.69,72.73,0.54,8.44,0,0.07,1 +1.5175,12.82,3.55,1.49,72.75,0.54,8.52,0,0.19,1 +1.51966,14.77,3.75,0.29,72.02,0.03,9,0,0,1 +1.51736,12.78,3.62,1.29,72.79,0.59,8.7,0,0,1 +1.51751,12.81,3.57,1.35,73.02,0.62,8.59,0,0,1 +1.5172,13.38,3.5,1.15,72.85,0.5,8.43,0,0,1 +1.51764,12.98,3.54,1.21,73,0.65,8.53,0,0,1 +1.51793,13.21,3.48,1.41,72.64,0.59,8.43,0,0,1 +1.51721,12.87,3.48,1.33,73.04,0.56,8.43,0,0,1 +1.51768,12.56,3.52,1.43,73.15,0.57,8.54,0,0,1 +1.51784,13.08,3.49,1.28,72.86,0.6,8.49,0,0,1 +1.51768,12.65,3.56,1.3,73.08,0.61,8.69,0,0.14,1 +1.51747,12.84,3.5,1.14,73.27,0.56,8.55,0,0,1 +1.51775,12.85,3.48,1.23,72.97,0.61,8.56,0.09,0.22,1 +1.51753,12.57,3.47,1.38,73.39,0.6,8.55,0,0.06,1 +1.51783,12.69,3.54,1.34,72.95,0.57,8.75,0,0,1 +1.51567,13.29,3.45,1.21,72.74,0.56,8.57,0,0,1 +1.51909,13.89,3.53,1.32,71.81,0.51,8.78,0.11,0,1 +1.51797,12.74,3.48,1.35,72.96,0.64,8.68,0,0,1 +1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0,0,1 +1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0,0,1 +1.51793,12.79,3.5,1.12,73.03,0.64,8.77,0,0,1 +1.51755,12.71,3.42,1.2,73.2,0.59,8.64,0,0,1 +1.51779,13.21,3.39,1.33,72.76,0.59,8.59,0,0,1 +1.5221,13.73,3.84,0.72,71.76,0.17,9.74,0,0,1 +1.51786,12.73,3.43,1.19,72.95,0.62,8.76,0,0.3,1 +1.519,13.49,3.48,1.35,71.95,0.55,9,0,0,1 +1.51869,13.19,3.37,1.18,72.72,0.57,8.83,0,0.16,1 +1.52667,13.99,3.7,0.71,71.57,0.02,9.82,0,0.1,1 +1.52223,13.21,3.77,0.79,71.99,0.13,10.02,0,0,1 +1.51898,13.58,3.35,1.23,72.08,0.59,8.91,0,0,1 +1.5232,13.72,3.72,0.51,71.75,0.09,10.06,0,0.16,1 +1.51926,13.2,3.33,1.28,72.36,0.6,9.14,0,0.11,1 +1.51808,13.43,2.87,1.19,72.84,0.55,9.03,0,0,1 +1.51837,13.14,2.84,1.28,72.85,0.55,9.07,0,0,1 +1.51778,13.21,2.81,1.29,72.98,0.51,9.02,0,0.09,1 +1.51769,12.45,2.71,1.29,73.7,0.56,9.06,0,0.24,1 +1.51215,12.99,3.47,1.12,72.98,0.62,8.35,0,0.31,1 +1.51824,12.87,3.48,1.29,72.95,0.6,8.43,0,0,1 +1.51754,13.48,3.74,1.17,72.99,0.59,8.03,0,0,1 +1.51754,13.39,3.66,1.19,72.79,0.57,8.27,0,0.11,1 +1.51905,13.6,3.62,1.11,72.64,0.14,8.76,0,0,1 +1.51977,13.81,3.58,1.32,71.72,0.12,8.67,0.69,0,1 +1.52172,13.51,3.86,0.88,71.79,0.23,9.54,0,0.11,1 +1.52227,14.17,3.81,0.78,71.35,0,9.69,0,0,1 +1.52172,13.48,3.74,0.9,72.01,0.18,9.61,0,0.07,1 +1.52099,13.69,3.59,1.12,71.96,0.09,9.4,0,0,1 +1.52152,13.05,3.65,0.87,72.22,0.19,9.85,0,0.17,1 +1.52152,13.05,3.65,0.87,72.32,0.19,9.85,0,0.17,1 +1.52152,13.12,3.58,0.9,72.2,0.23,9.82,0,0.16,1 +1.523,13.31,3.58,0.82,71.99,0.12,10.17,0,0.03,1 +1.51574,14.86,3.67,1.74,71.87,0.16,7.36,0,0.12,2 +1.51848,13.64,3.87,1.27,71.96,0.54,8.32,0,0.32,2 +1.51593,13.09,3.59,1.52,73.1,0.67,7.83,0,0,2 +1.51631,13.34,3.57,1.57,72.87,0.61,7.89,0,0,2 +1.51596,13.02,3.56,1.54,73.11,0.72,7.9,0,0,2 +1.5159,13.02,3.58,1.51,73.12,0.69,7.96,0,0,2 +1.51645,13.44,3.61,1.54,72.39,0.66,8.03,0,0,2 +1.51627,13,3.58,1.54,72.83,0.61,8.04,0,0,2 +1.51613,13.92,3.52,1.25,72.88,0.37,7.94,0,0.14,2 +1.5159,12.82,3.52,1.9,72.86,0.69,7.97,0,0,2 +1.51592,12.86,3.52,2.12,72.66,0.69,7.97,0,0,2 +1.51593,13.25,3.45,1.43,73.17,0.61,7.86,0,0,2 +1.51646,13.41,3.55,1.25,72.81,0.68,8.1,0,0,2 +1.51594,13.09,3.52,1.55,72.87,0.68,8.05,0,0.09,2 +1.51409,14.25,3.09,2.08,72.28,1.1,7.08,0,0,2 +1.51625,13.36,3.58,1.49,72.72,0.45,8.21,0,0,2 +1.51569,13.24,3.49,1.47,73.25,0.38,8.03,0,0,2 +1.51645,13.4,3.49,1.52,72.65,0.67,8.08,0,0.1,2 +1.51618,13.01,3.5,1.48,72.89,0.6,8.12,0,0,2 +1.5164,12.55,3.48,1.87,73.23,0.63,8.08,0,0.09,2 +1.51841,12.93,3.74,1.11,72.28,0.64,8.96,0,0.22,2 +1.51605,12.9,3.44,1.45,73.06,0.44,8.27,0,0,2 +1.51588,13.12,3.41,1.58,73.26,0.07,8.39,0,0.19,2 +1.5159,13.24,3.34,1.47,73.1,0.39,8.22,0,0,2 +1.51629,12.71,3.33,1.49,73.28,0.67,8.24,0,0,2 +1.5186,13.36,3.43,1.43,72.26,0.51,8.6,0,0,2 +1.51841,13.02,3.62,1.06,72.34,0.64,9.13,0,0.15,2 +1.51743,12.2,3.25,1.16,73.55,0.62,8.9,0,0.24,2 +1.51689,12.67,2.88,1.71,73.21,0.73,8.54,0,0,2 +1.51811,12.96,2.96,1.43,72.92,0.6,8.79,0.14,0,2 +1.51655,12.75,2.85,1.44,73.27,0.57,8.79,0.11,0.22,2 +1.5173,12.35,2.72,1.63,72.87,0.7,9.23,0,0,2 +1.5182,12.62,2.76,0.83,73.81,0.35,9.42,0,0.2,2 +1.52725,13.8,3.15,0.66,70.57,0.08,11.64,0,0,2 +1.5241,13.83,2.9,1.17,71.15,0.08,10.79,0,0,2 +1.52475,11.45,0,1.88,72.19,0.81,13.24,0,0.34,2 +1.53125,10.73,0,2.1,69.81,0.58,13.3,3.15,0.28,2 +1.53393,12.3,0,1,70.16,0.12,16.19,0,0.24,2 +1.52222,14.43,0,1,72.67,0.1,11.52,0,0.08,2 +1.51818,13.72,0,0.56,74.45,0,10.99,0,0,2 +1.52664,11.23,0,0.77,73.21,0,14.68,0,0,2 +1.52739,11.02,0,0.75,73.08,0,14.96,0,0,2 +1.52777,12.64,0,0.67,72.02,0.06,14.4,0,0,2 +1.51892,13.46,3.83,1.26,72.55,0.57,8.21,0,0.14,2 +1.51847,13.1,3.97,1.19,72.44,0.6,8.43,0,0,2 +1.51846,13.41,3.89,1.33,72.38,0.51,8.28,0,0,2 +1.51829,13.24,3.9,1.41,72.33,0.55,8.31,0,0.1,2 +1.51708,13.72,3.68,1.81,72.06,0.64,7.88,0,0,2 +1.51673,13.3,3.64,1.53,72.53,0.65,8.03,0,0.29,2 +1.51652,13.56,3.57,1.47,72.45,0.64,7.96,0,0,2 +1.51844,13.25,3.76,1.32,72.4,0.58,8.42,0,0,2 +1.51663,12.93,3.54,1.62,72.96,0.64,8.03,0,0.21,2 +1.51687,13.23,3.54,1.48,72.84,0.56,8.1,0,0,2 +1.51707,13.48,3.48,1.71,72.52,0.62,7.99,0,0,2 +1.52177,13.2,3.68,1.15,72.75,0.54,8.52,0,0,2 +1.51872,12.93,3.66,1.56,72.51,0.58,8.55,0,0.12,2 +1.51667,12.94,3.61,1.26,72.75,0.56,8.6,0,0,2 +1.52081,13.78,2.28,1.43,71.99,0.49,9.85,0,0.17,2 +1.52068,13.55,2.09,1.67,72.18,0.53,9.57,0.27,0.17,2 +1.5202,13.98,1.35,1.63,71.76,0.39,10.56,0,0.18,2 +1.52177,13.75,1.01,1.36,72.19,0.33,11.14,0,0,2 +1.52614,13.7,0,1.36,71.24,0.19,13.44,0,0.1,2 +1.51813,13.43,3.98,1.18,72.49,0.58,8.15,0,0,2 +1.518,13.71,3.93,1.54,71.81,0.54,8.21,0,0.15,2 +1.51811,13.33,3.85,1.25,72.78,0.52,8.12,0,0,2 +1.51789,13.19,3.9,1.3,72.33,0.55,8.44,0,0.28,2 +1.51806,13,3.8,1.08,73.07,0.56,8.38,0,0.12,2 +1.51711,12.89,3.62,1.57,72.96,0.61,8.11,0,0,2 +1.51674,12.79,3.52,1.54,73.36,0.66,7.9,0,0,2 +1.51674,12.87,3.56,1.64,73.14,0.65,7.99,0,0,2 +1.5169,13.33,3.54,1.61,72.54,0.68,8.11,0,0,2 +1.51851,13.2,3.63,1.07,72.83,0.57,8.41,0.09,0.17,2 +1.51662,12.85,3.51,1.44,73.01,0.68,8.23,0.06,0.25,2 +1.51709,13,3.47,1.79,72.72,0.66,8.18,0,0,2 +1.5166,12.99,3.18,1.23,72.97,0.58,8.81,0,0.24,2 +1.51839,12.85,3.67,1.24,72.57,0.62,8.68,0,0.35,2 +1.51769,13.65,3.66,1.11,72.77,0.11,8.6,0,0,3 +1.5161,13.33,3.53,1.34,72.67,0.56,8.33,0,0,3 +1.5167,13.24,3.57,1.38,72.7,0.56,8.44,0,0.1,3 +1.51643,12.16,3.52,1.35,72.89,0.57,8.53,0,0,3 +1.51665,13.14,3.45,1.76,72.48,0.6,8.38,0,0.17,3 +1.52127,14.32,3.9,0.83,71.5,0,9.49,0,0,3 +1.51779,13.64,3.65,0.65,73,0.06,8.93,0,0,3 +1.5161,13.42,3.4,1.22,72.69,0.59,8.32,0,0,3 +1.51694,12.86,3.58,1.31,72.61,0.61,8.79,0,0,3 +1.51646,13.04,3.4,1.26,73.01,0.52,8.58,0,0,3 +1.51655,13.41,3.39,1.28,72.64,0.52,8.65,0,0,3 +1.52121,14.03,3.76,0.58,71.79,0.11,9.65,0,0,3 +1.51776,13.53,3.41,1.52,72.04,0.58,8.79,0,0,3 +1.51796,13.5,3.36,1.63,71.94,0.57,8.81,0,0.09,3 +1.51832,13.33,3.34,1.54,72.14,0.56,8.99,0,0,3 +1.51934,13.64,3.54,0.75,72.65,0.16,8.89,0.15,0.24,3 +1.52211,14.19,3.78,0.91,71.36,0.23,9.14,0,0.37,3 +1.51514,14.01,2.68,3.5,69.89,1.68,5.87,2.2,0,5 +1.51915,12.73,1.85,1.86,72.69,0.6,10.09,0,0,5 +1.52171,11.56,1.88,1.56,72.86,0.47,11.41,0,0,5 +1.52151,11.03,1.71,1.56,73.44,0.58,11.62,0,0,5 +1.51969,12.64,0,1.65,73.75,0.38,11.53,0,0,5 +1.51666,12.86,0,1.83,73.88,0.97,10.17,0,0,5 +1.51994,13.27,0,1.76,73.03,0.47,11.32,0,0,5 +1.52369,13.44,0,1.58,72.22,0.32,12.24,0,0,5 +1.51316,13.02,0,3.04,70.48,6.21,6.96,0,0,5 +1.51321,13,0,3.02,70.7,6.21,6.93,0,0,5 +1.52043,13.38,0,1.4,72.25,0.33,12.5,0,0,5 +1.52058,12.85,1.61,2.17,72.18,0.76,9.7,0.24,0.51,5 +1.52119,12.97,0.33,1.51,73.39,0.13,11.27,0,0.28,5 +1.51905,14,2.39,1.56,72.37,0,9.57,0,0,6 +1.51937,13.79,2.41,1.19,72.76,0,9.77,0,0,6 +1.51829,14.46,2.24,1.62,72.38,0,9.26,0,0,6 +1.51852,14.09,2.19,1.66,72.67,0,9.32,0,0,6 +1.51299,14.4,1.74,1.54,74.55,0,7.59,0,0,6 +1.51888,14.99,0.78,1.74,72.5,0,9.95,0,0,6 +1.51916,14.15,0,2.09,72.74,0,10.88,0,0,6 +1.51969,14.56,0,0.56,73.48,0,11.22,0,0,6 +1.51115,17.38,0,0.34,75.41,0,6.65,0,0,6 +1.51131,13.69,3.2,1.81,72.81,1.76,5.43,1.19,0,7 +1.51838,14.32,3.26,2.22,71.25,1.46,5.79,1.63,0,7 +1.52315,13.44,3.34,1.23,72.38,0.6,8.83,0,0,7 +1.52247,14.86,2.2,2.06,70.26,0.76,9.76,0,0,7 +1.52365,15.79,1.83,1.31,70.43,0.31,8.61,1.68,0,7 +1.51613,13.88,1.78,1.79,73.1,0,8.67,0.76,0,7 +1.51602,14.85,0,2.38,73.28,0,8.76,0.64,0.09,7 +1.51623,14.2,0,2.79,73.46,0.04,9.04,0.4,0.09,7 +1.51719,14.75,0,2,73.02,0,8.53,1.59,0.08,7 +1.51683,14.56,0,1.98,73.29,0,8.52,1.57,0.07,7 +1.51545,14.14,0,2.68,73.39,0.08,9.07,0.61,0.05,7 +1.51556,13.87,0,2.54,73.23,0.14,9.41,0.81,0.01,7 +1.51727,14.7,0,2.34,73.28,0,8.95,0.66,0,7 +1.51531,14.38,0,2.66,73.1,0.04,9.08,0.64,0,7 +1.51609,15.01,0,2.51,73.05,0.05,8.83,0.53,0,7 +1.51508,15.15,0,2.25,73.5,0,8.34,0.63,0,7 +1.51653,11.95,0,1.19,75.18,2.7,8.93,0,0,7 +1.51514,14.85,0,2.42,73.72,0,8.39,0.56,0,7 +1.51658,14.8,0,1.99,73.11,0,8.28,1.71,0,7 +1.51617,14.95,0,2.27,73.3,0,8.71,0.67,0,7 +1.51732,14.95,0,1.8,72.99,0,8.61,1.55,0,7 +1.51645,14.94,0,1.87,73.11,0,8.67,1.38,0,7 +1.51831,14.39,0,1.82,72.86,1.41,6.47,2.88,0,7 +1.5164,14.37,0,2.74,72.85,0,9.45,0.54,0,7 +1.51623,14.14,0,2.88,72.61,0.08,9.18,1.06,0,7 +1.51685,14.92,0,1.99,73.06,0,8.4,1.59,0,7 +1.52065,14.36,0,2.02,73.42,0,8.44,1.64,0,7 +1.51651,14.38,0,1.94,73.61,0,8.48,1.57,0,7 +1.51711,14.23,0,2.08,73.36,0,8.62,1.67,0,7 diff --git a/clustering_datasets/run_cluster.py b/clustering_datasets/run_cluster.py new file mode 100644 index 0000000..c289fec --- /dev/null +++ b/clustering_datasets/run_cluster.py @@ -0,0 +1,71 @@ +# encoding=utf8 +from typing import Union, List, Tuple +import sys +import json + +import random +import logging + +import pandas as pd +import numpy as np +from sklearn.datasets import load_iris, load_wine, load_breast_cancer, make_blobs +from sklearn.preprocessing import LabelEncoder +from sklearn.model_selection import train_test_split + +from NiaPy import Runner +from NiaPy.algorithms import Algorithm +from NiaPy.util import OptimizationType, TaskConvSave, groupdatabylabel, classifie, clusters2labels +from NiaPy.benchmarks import Clustering, ClusteringMin, ClusteringMinPenalty, ClusteringClassification + +from clusterargparser import getDictArgs + +logging.basicConfig() +logger = logging.getLogger('cec_run') +logger.setLevel('INFO') + +def save_example(alg: Algorithm, runs: int = 10, nFES: int = 1000, nGEN: int = np.inf, seed: List[int] = [None], optType: OptimizationType = OptimizationType.MINIMIZATION, dataset: str = 'iris', ofun: str = 'gc', sseed: int = 1, split: float = .3, wout: bool = True, **kwu: dict) -> None: + data, labels, noc = None, None, 4 + if dataset == 'iris': data, labels = load_iris(True); noc = len(np.unique(labels)) + elif dataset == 'cancer': data, labels = load_breast_cancer(True); noc = len(np.unique(labels)) + elif dataset == 'wine': data, labels = load_wine(True); noc = len(np.unique(labels)) + elif dataset == 'glass': df = pd.read_csv('glass.csv'); data, labels = df.iloc[:, :-1].values, df.iloc[:, -1].values; noc = len(np.unique(labels)) + elif dataset == 'cmc': df = pd.read_csv('cmc.csv'); data, labels = df.iloc[:, :-1].values, df.iloc[:, -1].values; noc = len(np.unique(labels)) + else: data, labels = make_blobs(n_samples=500, n_features=9, centers=noc, random_state=sseed) + Xt, Xv, yt, yv = train_test_split(data, labels, test_size=split, random_state=sseed) + lt = LabelEncoder().fit(labels); gl = groupdatabylabel(data, labels, lt) + bests, conv_it, conv_f, func = list(), list(), list(), None + if ofun == 'c': func = Clustering(Xt) + elif ofun == 'cm': func = ClusteringMin(Xt) + elif ofun == 'cmp': func = ClusteringMinPenalty(Xt) + elif ofun == 'cc': func = ClusteringClassification(Xt, yt) + else: sys.exit(2) + if seed == [None]: seed = list(range(1, runs + 1)) + for it in range(runs): + task = TaskConvSave(D=noc * len(data[0]), nFES=nFES, nGEN=nGEN, optType=optType, benchmark=func) + algo = alg(seed=seed[it % len(seed)]) + best = algo.run(task) + V = best[0].reshape([noc, len(data[0])]) + l, ok = clusters2labels(V, gl), 0 + for i, d in enumerate(Xv): ok += 1 if lt.inverse_transform([l[classifie(d, V)]])[0] == yv[i] else 0 + a = ok / len(Xv) + logger.info('%d. run:\n%s %s' % (it, V, a)) + bests.append((best[0], a)) + conv_it.append(task.evals) + conv_f.append(task.x_f_vals) + if wout: + with open('%s_%s_%s_args' % (algo.Name[-1], ofun, dataset), 'w') as file: file.write(json.dumps(algo.getParameters())) + bpos, bval = np.asarray([x[0] for x in bests]), np.asarray([x[1] for x in bests]) + np.savetxt('%s_%s_%s_p' % (algo.Name[-1], ofun, dataset), bpos) + np.savetxt('%s_%s_%s_v' % (algo.Name[-1], ofun, dataset), bval) + inds = [] + for i in range(runs): inds.append('evals'), inds.append('funvl') + data = [] + for i in range(runs): data.append(conv_it[i]), data.append(conv_f[i]) + pd.DataFrame(data, index=inds).T.to_csv('%s_%s_%s.csv' % (algo.Name[-1], ofun, dataset), sep=',', index=False) + +if __name__ == '__main__': + pargs = getDictArgs(sys.argv[1:]) + algo = Runner.getAlgorithm(pargs['algo']) + save_example(algo, **pargs) + +# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3 diff --git a/clustering_datasets/run_cluster_kmeans.py b/clustering_datasets/run_cluster_kmeans.py new file mode 100644 index 0000000..53ee2c5 --- /dev/null +++ b/clustering_datasets/run_cluster_kmeans.py @@ -0,0 +1,48 @@ +# encoding=utf8 +from typing import Union, List, Tuple +import logging +import sys + +import pandas as pd +import numpy as np +from sklearn.datasets import load_iris, load_wine, load_breast_cancer, make_blobs +from sklearn.preprocessing import LabelEncoder +from sklearn.model_selection import train_test_split +from sklearn.cluster import KMeans + +from NiaPy.util import classifie, clusters2labels, groupdatabylabel + +from clusterargparser import getDictArgs + +logging.basicConfig() +logger = logging.getLogger('cec_run') +logger.setLevel('INFO') + +def save_example(runs: int = 10, nFES: int = 1000, seed: List[int] = [None], dataset: str = 'iris', ofun: str = 'gc', sseed: int = 1, split: float = .3, wout: bool = True, **kwu: dict) -> None: + data, labels, noc = None, None, 4 + if dataset == 'iris': data, labels = load_iris(True); noc = len(np.unique(labels)) + elif dataset == 'cancer': data, labels = load_breast_cancer(True); noc = len(np.unique(labels)) + elif dataset == 'wine': data, labels = load_wine(True); noc = len(np.unique(labels)) + elif dataset == 'glass': df = pd.read_csv('glass.csv'); data, labels = df.iloc[:, :-1].values, df.iloc[:, -1].values; noc = len(np.unique(labels)) + elif dataset == 'cmc': df = pd.read_csv('cmc.csv'); data, labels = df.iloc[:, :-1].values, df.iloc[:, -1].values; noc = len(np.unique(labels)) + else: data, labels = make_blobs(n_samples=500, n_features=9, centers=noc, random_state=sseed) + Xt, Xv, yt, yv = train_test_split(data, labels, test_size=split, random_state=sseed) + lt = LabelEncoder().fit(labels); gl, bests = groupdatabylabel(data, labels, lt), [] + if seed == [None]: seed = list(range(1, runs + 1)) + for it in range(runs): + kmeans = KMeans(n_clusters=noc, init='random', n_init=100, max_iter=nFES, random_state=1, algorithm='full').fit(Xt) + C, lt = kmeans.cluster_centers_, LabelEncoder().fit(labels) + l, ok = clusters2labels(C, groupdatabylabel(Xt, yt, lt)), 0 + for i, d in enumerate(Xv): ok += 1 if lt.inverse_transform([l[classifie(d, C)]]) == yv[i] else 0 + logger.info('%d. run:\n%s %s' % (it, C, ok / len(Xv))) + bests.append([C.flatten(), ok / len(Xv)]) + if wout: + bpos, bval = np.asarray([x[0] for x in bests]), np.asarray([x[1] for x in bests]) + np.savetxt('KM_%s_%s_p' % (ofun, dataset), bpos) + np.savetxt('KM_%s_%s_v' % (ofun, dataset), bval) + +if __name__ == '__main__': + pargs = getDictArgs(sys.argv[1:]) + save_example(**pargs) + +# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3 diff --git a/optimize_KNN_parameters/run.py b/optimize_KNN_parameters/run.py index 44e7be3..2a02e9e 100644 --- a/optimize_KNN_parameters/run.py +++ b/optimize_KNN_parameters/run.py @@ -5,7 +5,7 @@ from sklearn.model_selection import StratifiedKFold from sklearn.model_selection import cross_validate from NiaPy.algorithms.modified import HybridBatAlgorithm -import pygal +import pygal KNN_WEIGHT_FUNCTIONS = [ 'uniform', @@ -36,13 +36,13 @@ def swap_algorithm(val): # map from real number [0, 1] to integer ranging [10, 50] def swap_leaf_size(val): - return int(val * 40 + 10) + return int(val * 10 + 40) class KNNBreastCancerBenchmark(object): def __init__(self): self.Lower = 0 self.Upper = 1 - + def function(self): # our definition of fitness function def evaluate(D, solution): @@ -67,7 +67,7 @@ def __init__(self, seed=1234): dataset = datasets.load_breast_cancer() self.X = dataset.data self.y = dataset.target - + self.X_search, self.X_validate, self.y_search, self.y_validate = train_test_split(self.X, self.y, test_size=0.8, random_state=self.seed) self.X_search_train, self.X_search_test, self.y_search_train, self.y_search_test = train_test_split(self.X_search, self.y_search, test_size=0.8, random_state=self.seed)