From 77f1ae62c032c5c4929106eaedede105f5029c98 Mon Sep 17 00:00:00 2001 From: William Wilkinson Date: Fri, 19 Mar 2021 10:28:58 +0200 Subject: [PATCH] create public repo --- .gitignore | 136 + LICENSE | 201 + README.md | 41 + dev-requirements.txt | 3 + newt/__init__.py | 8 + newt/colormaps/__init__.py | 27 + .../batlow/CategoricalPalettes/batlowS.alut | 100 + .../batlow/CategoricalPalettes/batlowS.clm | 201 + .../batlow/CategoricalPalettes/batlowS.clr | 102 + .../batlow/CategoricalPalettes/batlowS.cpt | 105 + .../batlow/CategoricalPalettes/batlowS.ct | 406 + .../batlow/CategoricalPalettes/batlowS.lut | 100 + .../batlow/CategoricalPalettes/batlowS.mat | Bin 0 -> 2499 bytes .../batlow/CategoricalPalettes/batlowS.pal | 101 + .../batlow/CategoricalPalettes/batlowS.py | 122 + .../batlow/CategoricalPalettes/batlowS.spk | 100 + .../batlow/CategoricalPalettes/batlowS.svg | 216 + .../batlow/CategoricalPalettes/batlowS.txt | 100 + .../batlow/CategoricalPalettes/batlowS.xcmap | 112 + .../CategoricalPalettes/batlowS_HEX.txt | 102 + .../CategoricalPalettes/batlowS_PARAVIEW.xml | 104 + .../CategoricalPalettes/batlowS_QGIS.xml | 13 + .../batlow/DiscretePalettes/batlow10.gpl | 14 + .../batlow/DiscretePalettes/batlow10.mat | Bin 0 -> 438 bytes .../batlow/DiscretePalettes/batlow10.spk | 10 + .../batlow/DiscretePalettes/batlow10.txt | 12 + .../batlow/DiscretePalettes/batlow100.gpl | 104 + .../batlow/DiscretePalettes/batlow100.mat | Bin 0 -> 2495 bytes .../batlow/DiscretePalettes/batlow100.spk | 100 + .../batlow/DiscretePalettes/batlow100.txt | 102 + .../batlow/DiscretePalettes/batlow25.gpl | 29 + .../batlow/DiscretePalettes/batlow25.mat | Bin 0 -> 800 bytes .../batlow/DiscretePalettes/batlow25.spk | 25 + .../batlow/DiscretePalettes/batlow25.txt | 27 + .../batlow/DiscretePalettes/batlow50.gpl | 54 + .../batlow/DiscretePalettes/batlow50.mat | Bin 0 -> 1369 bytes .../batlow/DiscretePalettes/batlow50.spk | 50 + .../batlow/DiscretePalettes/batlow50.txt | 52 + newt/colormaps/batlow/batlow.alut | 256 + newt/colormaps/batlow/batlow.clm | 201 + newt/colormaps/batlow/batlow.clr | 102 + newt/colormaps/batlow/batlow.cpt | 261 + newt/colormaps/batlow/batlow.ct | 1030 ++ newt/colormaps/batlow/batlow.lut | 256 + newt/colormaps/batlow/batlow.mat | Bin 0 -> 6002 bytes newt/colormaps/batlow/batlow.ncmap | 256 + newt/colormaps/batlow/batlow.pal | 257 + newt/colormaps/batlow/batlow.py | 278 + newt/colormaps/batlow/batlow.spk | 256 + newt/colormaps/batlow/batlow.svg | 528 + newt/colormaps/batlow/batlow.txt | 256 + newt/colormaps/batlow/batlow.xcmap | 268 + newt/colormaps/batlow/batlow_PARAVIEW.xml | 260 + newt/colormaps/batlow/batlow_QGIS.xml | 13 + .../berlin/DiscretePalettes/berlin10.gpl | 14 + .../berlin/DiscretePalettes/berlin10.mat | Bin 0 -> 437 bytes .../berlin/DiscretePalettes/berlin10.spk | 10 + .../berlin/DiscretePalettes/berlin10.txt | 12 + .../berlin/DiscretePalettes/berlin100.gpl | 104 + .../berlin/DiscretePalettes/berlin100.mat | Bin 0 -> 2512 bytes .../berlin/DiscretePalettes/berlin100.spk | 100 + .../berlin/DiscretePalettes/berlin100.txt | 102 + .../berlin/DiscretePalettes/berlin25.gpl | 29 + .../berlin/DiscretePalettes/berlin25.mat | Bin 0 -> 802 bytes .../berlin/DiscretePalettes/berlin25.spk | 25 + .../berlin/DiscretePalettes/berlin25.txt | 27 + .../berlin/DiscretePalettes/berlin50.gpl | 54 + .../berlin/DiscretePalettes/berlin50.mat | Bin 0 -> 1378 bytes .../berlin/DiscretePalettes/berlin50.spk | 50 + .../berlin/DiscretePalettes/berlin50.txt | 52 + newt/colormaps/berlin/berlin.alut | 256 + newt/colormaps/berlin/berlin.clm | 201 + newt/colormaps/berlin/berlin.clr | 102 + newt/colormaps/berlin/berlin.cpt | 261 + newt/colormaps/berlin/berlin.ct | 1030 ++ newt/colormaps/berlin/berlin.lut | 256 + newt/colormaps/berlin/berlin.mat | Bin 0 -> 6023 bytes newt/colormaps/berlin/berlin.ncmap | 256 + newt/colormaps/berlin/berlin.pal | 257 + newt/colormaps/berlin/berlin.py | 278 + newt/colormaps/berlin/berlin.spk | 256 + newt/colormaps/berlin/berlin.svg | 528 + newt/colormaps/berlin/berlin.txt | 256 + newt/colormaps/berlin/berlin.xcmap | 268 + newt/colormaps/berlin/berlin_PARAVIEW.xml | 260 + newt/colormaps/berlin/berlin_QGIS.xml | 13 + newt/colormaps/vik/DiscretePalettes/vik10.gpl | 14 + newt/colormaps/vik/DiscretePalettes/vik10.mat | Bin 0 -> 435 bytes newt/colormaps/vik/DiscretePalettes/vik10.spk | 10 + newt/colormaps/vik/DiscretePalettes/vik10.txt | 12 + .../colormaps/vik/DiscretePalettes/vik100.gpl | 104 + .../colormaps/vik/DiscretePalettes/vik100.mat | Bin 0 -> 2492 bytes .../colormaps/vik/DiscretePalettes/vik100.spk | 100 + .../colormaps/vik/DiscretePalettes/vik100.txt | 102 + newt/colormaps/vik/DiscretePalettes/vik25.gpl | 29 + newt/colormaps/vik/DiscretePalettes/vik25.mat | Bin 0 -> 804 bytes newt/colormaps/vik/DiscretePalettes/vik25.spk | 25 + newt/colormaps/vik/DiscretePalettes/vik25.txt | 27 + newt/colormaps/vik/DiscretePalettes/vik50.gpl | 54 + newt/colormaps/vik/DiscretePalettes/vik50.mat | Bin 0 -> 1369 bytes newt/colormaps/vik/DiscretePalettes/vik50.spk | 50 + newt/colormaps/vik/DiscretePalettes/vik50.txt | 52 + newt/colormaps/vik/vik.alut | 256 + newt/colormaps/vik/vik.clm | 201 + newt/colormaps/vik/vik.clr | 102 + newt/colormaps/vik/vik.cpt | 261 + newt/colormaps/vik/vik.ct | 1030 ++ newt/colormaps/vik/vik.lut | 256 + newt/colormaps/vik/vik.mat | Bin 0 -> 6003 bytes newt/colormaps/vik/vik.ncmap | 256 + newt/colormaps/vik/vik.pal | 257 + newt/colormaps/vik/vik.py | 278 + newt/colormaps/vik/vik.spk | 256 + newt/colormaps/vik/vik.svg | 528 + newt/colormaps/vik/vik.txt | 256 + newt/colormaps/vik/vik.xcmap | 268 + newt/colormaps/vik/vik_PARAVIEW.xml | 260 + newt/colormaps/vik/vik_QGIS.xml | 13 + newt/cubature.py | 341 + newt/data/TRI2TU-data.csv | 12929 ++++++++++++++++ newt/data/banana_X_train | 400 + newt/data/banana_Y_train | 400 + newt/data/banana_large.csv | 5301 +++++++ newt/data/coal.txt | 191 + newt/data/mcycle.csv | 133 + newt/experiments/aircraft/aircraft.py | 147 + newt/experiments/aircraft/aircraft.slrm | 21 + .../aircraft/aircraft_accidents.txt | 1210 ++ .../aircraft/aircraft_baselines.slrm | 21 + newt/experiments/aircraft/aircraft_test.py | 155 + newt/experiments/aircraft/results.py | 25 + newt/experiments/audio/audio.py | 181 + newt/experiments/audio/audio.slrm | 21 + newt/experiments/audio/audio5.py | 240 + newt/experiments/audio/audio_baseline.slrm | 21 + newt/experiments/audio/audio_varyM.py | 193 + newt/experiments/audio/create_txts.py | 9 + newt/experiments/audio/results.py | 25 + newt/experiments/audio/results_varyM.py | 184 + newt/experiments/audio/speech_female.mat | Bin 0 -> 170657 bytes newt/experiments/banana/banana.py | 115 + newt/experiments/banana/banana.slrm | 21 + newt/experiments/banana/banana_baseline.slrm | 21 + newt/experiments/banana/results.py | 25 + newt/experiments/binary/binary.py | 116 + newt/experiments/binary/binary.slrm | 21 + newt/experiments/binary/binary_baselines.slrm | 21 + newt/experiments/binary/results.py | 26 + newt/experiments/coal/binned.csv | 333 + newt/experiments/coal/coal.py | 116 + newt/experiments/coal/coal.slrm | 21 + newt/experiments/coal/coal_baseline.slrm | 21 + newt/experiments/coal/cvind.csv | 333 + newt/experiments/coal/results.py | 26 + newt/experiments/electricity/electricity.py | 147 + newt/experiments/electricity/electricity.slrm | 21 + .../electricity/electricity_baselines.slrm | 21 + newt/experiments/electricity/results.py | 25 + newt/experiments/motorcycle/cvind.csv | 133 + newt/experiments/motorcycle/motorcycle.py | 170 + newt/experiments/motorcycle/motorcycle.slrm | 21 + .../motorcycle/motorcycle_baselines.slrm | 21 + newt/experiments/motorcycle/results.py | 36 + newt/experiments/rainforest/rainforest.py | 98 + newt/experiments/rainforest/rainforest0.slrm | 21 + newt/experiments/rainforest/rainforest1.slrm | 21 + newt/experiments/rainforest/rainforest2.slrm | 21 + newt/experiments/rainforest/results.py | 77 + newt/experiments/rainforest/timings.py | 80 + newt/inference.py | 438 + newt/kernels.py | 1159 ++ newt/likelihoods.py | 927 ++ newt/models.py | 831 + newt/notebooks/classification.py | 95 + newt/notebooks/heteroscedastic.py | 125 + newt/notebooks/log_gaussian_cox_process.py | 95 + newt/notebooks/regression.py | 111 + newt/ops.py | 323 + newt/tests/spatiotemporal_test.py | 89 + newt/tests/test_gp_vs_markovgp_class.py | 130 + newt/tests/test_gp_vs_markovgp_reg.py | 144 + newt/tests/test_gp_vs_markovgp_spacetime.py | 196 + newt/tests/test_sparsemarkov.py | 334 + newt/tests/test_vs_exact_marg_lik.py | 70 + newt/tests/test_vs_gpflow_class.py | 179 + newt/tests/test_vs_gpflow_reg.py | 193 + newt/tests/test_vs_gpflow_shutters.py | 245 + newt/tests/test_vs_gpflow_spacetime.py | 268 + newt/utils.py | 615 + setup.py | 12 + 190 files changed, 48427 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md create mode 100644 dev-requirements.txt create mode 100644 newt/__init__.py create mode 100644 newt/colormaps/__init__.py create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.alut create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.clm create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.clr create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.cpt create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.ct create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.lut create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.mat create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.pal create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.py create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.spk create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.svg create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.txt create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS.xcmap create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS_HEX.txt create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS_PARAVIEW.xml create mode 100644 newt/colormaps/batlow/CategoricalPalettes/batlowS_QGIS.xml create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow10.gpl create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow10.mat create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow10.spk create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow10.txt create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow100.gpl create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow100.mat create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow100.spk create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow100.txt create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow25.gpl create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow25.mat create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow25.spk create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow25.txt create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow50.gpl create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow50.mat create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow50.spk create mode 100644 newt/colormaps/batlow/DiscretePalettes/batlow50.txt create mode 100644 newt/colormaps/batlow/batlow.alut create mode 100644 newt/colormaps/batlow/batlow.clm create mode 100644 newt/colormaps/batlow/batlow.clr create mode 100644 newt/colormaps/batlow/batlow.cpt create mode 100644 newt/colormaps/batlow/batlow.ct create mode 100644 newt/colormaps/batlow/batlow.lut create mode 100644 newt/colormaps/batlow/batlow.mat create mode 100644 newt/colormaps/batlow/batlow.ncmap create mode 100644 newt/colormaps/batlow/batlow.pal create mode 100644 newt/colormaps/batlow/batlow.py create mode 100644 newt/colormaps/batlow/batlow.spk create mode 100644 newt/colormaps/batlow/batlow.svg create mode 100644 newt/colormaps/batlow/batlow.txt create mode 100644 newt/colormaps/batlow/batlow.xcmap create mode 100644 newt/colormaps/batlow/batlow_PARAVIEW.xml create mode 100644 newt/colormaps/batlow/batlow_QGIS.xml create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin10.gpl create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin10.mat create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin10.spk create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin10.txt create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin100.gpl create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin100.mat create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin100.spk create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin100.txt create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin25.gpl create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin25.mat create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin25.spk create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin25.txt create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin50.gpl create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin50.mat create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin50.spk create mode 100644 newt/colormaps/berlin/DiscretePalettes/berlin50.txt create mode 100644 newt/colormaps/berlin/berlin.alut create mode 100644 newt/colormaps/berlin/berlin.clm create mode 100644 newt/colormaps/berlin/berlin.clr create mode 100644 newt/colormaps/berlin/berlin.cpt create mode 100644 newt/colormaps/berlin/berlin.ct create mode 100644 newt/colormaps/berlin/berlin.lut create mode 100644 newt/colormaps/berlin/berlin.mat create mode 100644 newt/colormaps/berlin/berlin.ncmap create mode 100644 newt/colormaps/berlin/berlin.pal create mode 100644 newt/colormaps/berlin/berlin.py create mode 100644 newt/colormaps/berlin/berlin.spk create mode 100644 newt/colormaps/berlin/berlin.svg create mode 100644 newt/colormaps/berlin/berlin.txt create mode 100644 newt/colormaps/berlin/berlin.xcmap create mode 100644 newt/colormaps/berlin/berlin_PARAVIEW.xml create mode 100644 newt/colormaps/berlin/berlin_QGIS.xml create mode 100644 newt/colormaps/vik/DiscretePalettes/vik10.gpl create mode 100644 newt/colormaps/vik/DiscretePalettes/vik10.mat create mode 100644 newt/colormaps/vik/DiscretePalettes/vik10.spk create mode 100644 newt/colormaps/vik/DiscretePalettes/vik10.txt create mode 100644 newt/colormaps/vik/DiscretePalettes/vik100.gpl create mode 100644 newt/colormaps/vik/DiscretePalettes/vik100.mat create mode 100644 newt/colormaps/vik/DiscretePalettes/vik100.spk create mode 100644 newt/colormaps/vik/DiscretePalettes/vik100.txt create mode 100644 newt/colormaps/vik/DiscretePalettes/vik25.gpl create mode 100644 newt/colormaps/vik/DiscretePalettes/vik25.mat create mode 100644 newt/colormaps/vik/DiscretePalettes/vik25.spk create mode 100644 newt/colormaps/vik/DiscretePalettes/vik25.txt create mode 100644 newt/colormaps/vik/DiscretePalettes/vik50.gpl create mode 100644 newt/colormaps/vik/DiscretePalettes/vik50.mat create mode 100644 newt/colormaps/vik/DiscretePalettes/vik50.spk create mode 100644 newt/colormaps/vik/DiscretePalettes/vik50.txt create mode 100644 newt/colormaps/vik/vik.alut create mode 100644 newt/colormaps/vik/vik.clm create mode 100644 newt/colormaps/vik/vik.clr create mode 100644 newt/colormaps/vik/vik.cpt create mode 100644 newt/colormaps/vik/vik.ct create mode 100644 newt/colormaps/vik/vik.lut create mode 100644 newt/colormaps/vik/vik.mat create mode 100644 newt/colormaps/vik/vik.ncmap create mode 100644 newt/colormaps/vik/vik.pal create mode 100644 newt/colormaps/vik/vik.py create mode 100644 newt/colormaps/vik/vik.spk create mode 100644 newt/colormaps/vik/vik.svg create mode 100644 newt/colormaps/vik/vik.txt create mode 100644 newt/colormaps/vik/vik.xcmap create mode 100644 newt/colormaps/vik/vik_PARAVIEW.xml create mode 100644 newt/colormaps/vik/vik_QGIS.xml create mode 100644 newt/cubature.py create mode 100644 newt/data/TRI2TU-data.csv create mode 100644 newt/data/banana_X_train create mode 100644 newt/data/banana_Y_train create mode 100644 newt/data/banana_large.csv create mode 100644 newt/data/coal.txt create mode 100644 newt/data/mcycle.csv create mode 100644 newt/experiments/aircraft/aircraft.py create mode 100644 newt/experiments/aircraft/aircraft.slrm create mode 100644 newt/experiments/aircraft/aircraft_accidents.txt create mode 100644 newt/experiments/aircraft/aircraft_baselines.slrm create mode 100644 newt/experiments/aircraft/aircraft_test.py create mode 100644 newt/experiments/aircraft/results.py create mode 100644 newt/experiments/audio/audio.py create mode 100644 newt/experiments/audio/audio.slrm create mode 100644 newt/experiments/audio/audio5.py create mode 100644 newt/experiments/audio/audio_baseline.slrm create mode 100644 newt/experiments/audio/audio_varyM.py create mode 100644 newt/experiments/audio/create_txts.py create mode 100644 newt/experiments/audio/results.py create mode 100644 newt/experiments/audio/results_varyM.py create mode 100644 newt/experiments/audio/speech_female.mat create mode 100644 newt/experiments/banana/banana.py create mode 100644 newt/experiments/banana/banana.slrm create mode 100644 newt/experiments/banana/banana_baseline.slrm create mode 100644 newt/experiments/banana/results.py create mode 100644 newt/experiments/binary/binary.py create mode 100644 newt/experiments/binary/binary.slrm create mode 100644 newt/experiments/binary/binary_baselines.slrm create mode 100644 newt/experiments/binary/results.py create mode 100644 newt/experiments/coal/binned.csv create mode 100644 newt/experiments/coal/coal.py create mode 100644 newt/experiments/coal/coal.slrm create mode 100644 newt/experiments/coal/coal_baseline.slrm create mode 100644 newt/experiments/coal/cvind.csv create mode 100644 newt/experiments/coal/results.py create mode 100644 newt/experiments/electricity/electricity.py create mode 100644 newt/experiments/electricity/electricity.slrm create mode 100644 newt/experiments/electricity/electricity_baselines.slrm create mode 100644 newt/experiments/electricity/results.py create mode 100644 newt/experiments/motorcycle/cvind.csv create mode 100644 newt/experiments/motorcycle/motorcycle.py create mode 100644 newt/experiments/motorcycle/motorcycle.slrm create mode 100644 newt/experiments/motorcycle/motorcycle_baselines.slrm create mode 100644 newt/experiments/motorcycle/results.py create mode 100644 newt/experiments/rainforest/rainforest.py create mode 100644 newt/experiments/rainforest/rainforest0.slrm create mode 100644 newt/experiments/rainforest/rainforest1.slrm create mode 100644 newt/experiments/rainforest/rainforest2.slrm create mode 100644 newt/experiments/rainforest/results.py create mode 100644 newt/experiments/rainforest/timings.py create mode 100644 newt/inference.py create mode 100644 newt/kernels.py create mode 100644 newt/likelihoods.py create mode 100644 newt/models.py create mode 100644 newt/notebooks/classification.py create mode 100644 newt/notebooks/heteroscedastic.py create mode 100644 newt/notebooks/log_gaussian_cox_process.py create mode 100644 newt/notebooks/regression.py create mode 100644 newt/ops.py create mode 100644 newt/tests/spatiotemporal_test.py create mode 100644 newt/tests/test_gp_vs_markovgp_class.py create mode 100644 newt/tests/test_gp_vs_markovgp_reg.py create mode 100644 newt/tests/test_gp_vs_markovgp_spacetime.py create mode 100644 newt/tests/test_sparsemarkov.py create mode 100644 newt/tests/test_vs_exact_marg_lik.py create mode 100644 newt/tests/test_vs_gpflow_class.py create mode 100644 newt/tests/test_vs_gpflow_reg.py create mode 100644 newt/tests/test_vs_gpflow_shutters.py create mode 100644 newt/tests/test_vs_gpflow_spacetime.py create mode 100644 newt/utils.py create mode 100644 setup.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c7c9415 --- /dev/null +++ b/.gitignore @@ -0,0 +1,136 @@ +# This is a .gitignore file which aims to keep the git +# repository tidy by preventing the inclusion of different +# temporary or system files. + +# Exclude TeX temporary working files +*.acn +*.acr +*.alg +*.aux +*.bbl +*.blg +*.dvi +*.fdb_latexmk +*.glg +*.glo +*.gls +*.idx +*.ilg +*.ind +*.ist +*.lof +*.log +*.lot +*.maf +*.mtc +*.mtc0 +*.nav +*.nlo +*.out +*.pdfsync +*.ps +*.snm +*.synctex.gz +*.toc +*.vrb +*.xdy +*.tdo +*.dpth +*.auxlock +*.dep +*.brf +*.pdf + +# Exclude backup files freated by e.g. Matlab and Emacs +*~ + +# Exclude system specific thumbnail and other folder metadata +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +Icon? +ehthumbs.db +Thumbs.db + +# Exclude externalisation results from tikz +tikz*.pdf +*.md5 +*.spl + +*.lock +*.bin +*.iml + +.DS_Store +newt/.idea* +newt/__pycache__* + +**/.ipynb_checkpoints/* +*.png +*.pickle +*.npy +*.pyc + +newt/data/aq_data.csv +newt/data/electricity.csv +newt/notebooks/output/* +newt/experiments/rainforest/output/* +newt/experiments/motorcycle/output/* +newt/experiments/coal/output/* +newt/experiments/banana/output/* +newt/experiments/binary/output/* +newt/experiments/audio/output/* +newt/experiments/airline/output/* +newt/experiments/aircraft/output/* +newt/experiments/electricity/output/* + +newt/experiments/banana/banana0.slrm +newt/experiments/banana/banana1.slrm +newt/experiments/banana/banana2.slrm +newt/experiments/banana/banana3.slrm +newt/experiments/banana/banana4.slrm +newt/experiments/banana/banana_baseline0.slrm +newt/experiments/banana/banana_baseline1.slrm +newt/experiments/banana/banana_baseline2.slrm +newt/experiments/banana/banana_baseline3.slrm +newt/experiments/banana/banana_baseline4.slrm + +newt/experiments/audio/audio0.slrm +newt/experiments/audio/audio1.slrm +newt/experiments/audio/audio2.slrm +newt/experiments/audio/audio3.slrm +newt/experiments/audio/audio4.slrm +newt/experiments/audio/audio5.slrm + +newt/experiments/aircraft/aircraft0.slrm +newt/experiments/aircraft/aircraft1.slrm +newt/experiments/aircraft/aircraft2.slrm +newt/experiments/aircraft/aircraft3.slrm +newt/experiments/aircraft/aircraft4.slrm +newt/experiments/aircraft/aircraft5.slrm + +newt/experiments/electricity/electricity0.slrm +newt/experiments/electricity/electricity1.slrm +newt/experiments/electricity/electricity2.slrm +newt/experiments/electricity/electricity3.slrm +newt/experiments/electricity/electricity4.slrm +newt/experiments/electricity/electricity5.slrm + +newt/experiments/electricity/electricity.csv + +newt/experiments/audio/audiovm0.slrm +newt/experiments/audio/audiovm1.slrm +newt/experiments/audio/audiovm2.slrm +newt/experiments/audio/audiovm3.slrm +newt/experiments/audio/audiovm4.slrm +newt/experiments/audio/audiovm5.slrm + +newt/experiments/banana/create_txts.py +newt/experiments/banana/banana_varyM.py +newt/experiments/banana/results_varyM.py + +newt.egg-info/* + +.idea/ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..9096259 --- /dev/null +++ b/README.md @@ -0,0 +1,41 @@ +# Newt + + __ \/_ + (' \`\ + _\, \ \\/ + /`\/\ \\ + \ \\ + \ \\/\/_ + /\ \\'\ + __\ `\\\ + /|` `\\ + \\ + \\ + \\ , + `---' + +Newt is a Gaussian process library built in [JAX](https://github.com/google/jax) (with [objax](https://github.com/google/objax)). Newt differs from existing GP libraries in that it takes a unifying view of approximate Bayesian inference as variants of Newton's algorithm. This means that Newt encourages use of (and development of) many inference methods, rather than just focusing on VI. + +Newt currently provides the following models: + - GPs + - Sparse GPs + - Markov GPs (including spatio-temporal GPs) + - Sparse Markov GPs + - Infinite-horzion GPs + +with the following inference methods: + - Variational inference (with natural gradients) + - Power expectation propagation + - Laplace + - Posterior linearisation (i.e. classical nonlinear Kalman smoothers) + - Taylor (i.e. analytical linearisation / extended Kalman smoother) + +## Installation +In the top directory (Newt), run +```bash +pip install -e . +``` + +## License + +This software is provided under the Apache License 2.0. See the accompanying LICENSE file for details. diff --git a/dev-requirements.txt b/dev-requirements.txt new file mode 100644 index 0000000..49493eb --- /dev/null +++ b/dev-requirements.txt @@ -0,0 +1,3 @@ +jax +jaxlib +objax \ No newline at end of file diff --git a/newt/__init__.py b/newt/__init__.py new file mode 100644 index 0000000..0627dda --- /dev/null +++ b/newt/__init__.py @@ -0,0 +1,8 @@ +from .kernels import * +from .utils import * +from .ops import * +from .likelihoods import * +from .models import * +from .inference import * +from .cubature import * +from .colormaps import batlow, berlin, vik diff --git a/newt/colormaps/__init__.py b/newt/colormaps/__init__.py new file mode 100644 index 0000000..2448383 --- /dev/null +++ b/newt/colormaps/__init__.py @@ -0,0 +1,27 @@ +""" + ScientificColourMaps5 + + Usage + ----- + import ScientificColourMaps5 as SCM5 + plt.imshow(data, cmap=SCM5.berlin) + + Available colourmaps + --------------------- + acton, bamako, batlow, berlin, bilbao, broc, buda, cork, davos, devon, + grayC, hawaii, imola, lajolla, lapaz, lisbon, nuuk, oleron, oslo, roma, + tofino, tokyo, turku, vik +""" +import os +import numpy as np +import matplotlib.pyplot as plt +from matplotlib.colors import LinearSegmentedColormap + +folder = os.path.abspath(os.path.dirname(os.path.abspath(__file__))) + +__all__ = {'batlow', 'berlin', 'vik'} + +for name in __all__: + file = os.path.join(folder, name, name + '.txt') + cm_data = np.loadtxt(file) + vars()[name] = LinearSegmentedColormap.from_list(name, cm_data) diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS.alut b/newt/colormaps/batlow/CategoricalPalettes/batlowS.alut new file mode 100644 index 0000000..3a7df83 --- /dev/null +++ b/newt/colormaps/batlow/CategoricalPalettes/batlowS.alut @@ -0,0 +1,100 @@ +1,25,89,255 +250,204,250,255 +130,130,49,255 +34,96,97,255 +241,157,107,255 +77,115,77,255 +17,67,96,255 +253,180,180,255 +192,144,54,255 +23,82,98,255 +252,191,214,255 +252,168,144,255 +53,106,89,255 +13,49,93,255 +161,138,43,255 +103,123,62,255 +221,149,77,255 +8,37,91,255 +253,174,162,255 +251,198,232,255 +177,141,47,255 +207,147,64,255 +145,134,45,255 +43,101,94,255 +116,126,56,255 +19,75,97,255 +248,162,126,255 +15,59,95,255 +65,111,83,255 +27,89,98,255 +253,186,196,255 +90,119,69,255 +233,153,92,255 +96,121,66,255 +71,113,80,255 +250,165,135,255 +253,171,154,255 +21,79,98,255 +123,128,52,255 +109,124,59,255 +200,145,59,255 +227,151,84,255 +245,159,116,255 +25,86,98,255 +185,143,50,255 +169,140,44,255 +14,54,94,255 +16,63,96,255 +83,117,73,255 +251,201,241,255 +38,99,95,255 +153,136,44,255 +18,71,97,255 +253,183,188,255 +30,93,98,255 +138,132,47,255 +253,177,171,255 +252,195,223,255 +253,188,205,255 +48,104,92,255 +5,31,90,255 +11,43,92,255 +59,109,86,255 +214,148,70,255 +237,154,98,255 +68,112,82,255 +24,84,98,255 +36,97,96,255 +189,143,52,255 +120,127,54,255 +74,114,78,255 +253,175,167,255 +173,140,45,255 +157,137,43,255 +252,190,209,255 +40,100,95,255 +247,161,121,255 +243,158,112,255 +80,116,75,255 +149,135,44,255 +12,46,93,255 +10,40,92,255 +142,133,46,255 +252,169,149,255 +252,196,227,255 +17,65,96,255 +165,139,44,255 +106,123,61,255 +3,28,90,255 +56,108,88,255 +20,77,98,255 +253,172,158,255 +51,105,90,255 +181,142,48,255 +19,73,97,255 +210,147,67,255 +32,94,97,255 +230,152,88,255 +239,155,103,255 +251,166,140,255 diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS.clm b/newt/colormaps/batlow/CategoricalPalettes/batlowS.clm new file mode 100644 index 0000000..4f00133 --- /dev/null +++ b/newt/colormaps/batlow/CategoricalPalettes/batlowS.clm @@ -0,0 +1,201 @@ +1 25 89 +124 114 169 +248 202 248 +191 168 153 +131 131 54 +182 143 77 +238 156 105 +145 129 102 +42 98 97 +26 83 97 +18 68 96 +122 117 133 +239 173 175 +226 164 125 +194 147 62 +142 131 63 +85 116 75 +149 146 134 +236 184 202 +155 133 165 +37 63 105 +17 62 95 +22 78 97 +53 97 85 +92 117 67 +145 132 68 +204 145 73 +197 145 63 +167 139 47 +192 148 79 +238 163 129 +183 147 125 +85 116 98 +38 83 90 +16 49 91 +88 85 125 +209 159 177 +252 190 207 +251 196 225 +252 191 211 +253 179 176 +247 168 141 +236 157 105 +215 149 77 +187 143 56 +151 135 53 +108 123 64 +70 107 77 +36 85 91 +25 82 96 +37 95 95 +36 91 94 +22 70 95 +18 66 96 +24 81 97 +77 105 104 +186 142 118 +219 154 111 +154 136 76 +121 128 54 +135 132 48 +155 136 49 +186 142 57 +179 139 67 +108 122 77 +81 115 77 +133 129 57 +181 145 65 +224 164 128 +224 167 157 +133 135 113 +75 114 78 +94 120 68 +95 113 64 +47 73 80 +12 48 93 +17 65 96 +44 87 96 +145 123 88 +206 146 83 +119 123 88 +67 112 103 +169 154 159 +233 177 196 +116 116 142 +34 73 107 +150 142 179 +245 197 228 +195 165 131 +156 137 47 +198 146 74 +238 156 104 +245 176 165 +244 191 219 +134 141 157 +32 94 97 +64 107 82 +95 120 67 +108 124 60 +122 128 53 +183 144 85 +244 159 116 +247 162 125 +250 165 135 +137 112 115 +20 57 95 +16 62 95 +18 70 97 +47 92 86 +79 114 75 +47 78 81 +9 36 89 +90 81 76 +185 138 60 +171 139 53 +141 133 48 +95 118 67 +46 102 91 +97 117 76 +169 138 53 +211 158 105 +246 179 173 +237 169 142 +217 152 83 +141 124 80 +48 94 94 +109 117 119 +222 160 146 +183 149 130 +87 118 96 +46 99 90 +28 84 96 +78 101 82 +160 133 59 +142 130 64 +75 112 83 +40 93 92 +24 76 96 +21 76 97 +27 87 98 +96 119 126 +207 165 173 +183 145 164 +65 80 115 +21 65 94 +34 89 95 +32 84 94 +17 54 93 +40 63 82 +97 107 62 +95 104 62 +34 54 80 +63 66 117 +187 145 172 +203 166 177 +96 122 125 +32 90 96 +22 74 96 +33 76 90 +69 103 77 +102 122 64 +130 130 52 +123 123 54 +59 93 80 +61 96 124 +176 160 198 +225 188 215 +149 148 126 +96 120 66 +89 118 70 +74 108 75 +40 78 87 +27 65 90 +72 99 74 +92 113 66 +47 75 81 +35 58 89 +129 107 73 +207 148 69 +232 158 108 +249 165 135 +237 157 108 +210 145 87 +109 112 93 +24 83 98 +22 80 98 +36 84 102 +152 131 132 +253 173 161 +252 182 186 +249 188 201 +220 165 125 +188 142 57 +107 117 77 +39 96 99 +148 137 134 +251 174 165 +191 152 106 +132 131 49 +121 128 54 +111 125 58 diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS.clr b/newt/colormaps/batlow/CategoricalPalettes/batlowS.clr new file mode 100644 index 0000000..8e7f963 --- /dev/null +++ b/newt/colormaps/batlow/CategoricalPalettes/batlowS.clr @@ -0,0 +1,102 @@ +ColorMap 2 1 +0 1 25 89 255 +1 248 202 248 255 +2 132 131 53 255 +3 37 97 96 255 +4 233 155 107 255 +5 85 117 79 255 +6 21 70 95 255 +7 236 172 174 255 +8 197 147 64 255 +9 38 88 94 255 +10 229 180 202 255 +11 252 171 152 255 +12 77 113 96 255 +13 18 56 92 255 +14 140 126 50 255 +15 112 125 59 255 +16 202 145 75 255 +17 44 56 89 255 +18 209 149 149 255 +19 251 193 219 255 +20 192 152 84 255 +21 201 146 60 255 +22 159 137 49 255 +23 66 109 83 255 +24 98 120 65 255 +25 43 88 87 255 +26 188 139 118 255 +27 78 87 103 255 +28 51 96 86 255 +29 38 95 94 255 +30 185 157 167 255 +31 141 140 108 255 +32 187 142 85 255 +33 141 132 75 255 +34 80 116 75 255 +35 187 147 116 255 +36 252 169 147 255 +37 107 113 119 255 +38 84 109 69 255 +39 114 126 56 255 +40 164 137 59 255 +41 216 149 74 255 +42 237 156 103 255 +43 120 117 106 255 +44 115 118 71 255 +45 176 141 47 255 +46 85 94 71 255 +47 15 59 95 255 +48 51 91 84 255 +49 169 160 159 255 +50 145 150 168 255 +51 94 117 70 255 +52 88 105 69 255 +53 128 124 140 255 +54 150 142 147 255 +55 79 111 75 255 +56 189 152 102 255 +57 253 185 193 255 +58 252 192 215 255 +59 169 154 159 255 +60 31 75 91 255 +61 7 36 91 255 +62 29 68 90 255 +63 116 123 80 255 +64 222 150 80 255 +65 178 139 92 255 +66 53 102 87 255 +67 28 88 97 255 +68 85 112 82 255 +69 168 138 53 255 +70 106 123 61 255 +71 126 132 104 255 +72 231 165 133 255 +73 169 139 44 255 +74 182 151 86 255 +75 199 168 181 255 +76 90 115 101 255 +77 246 160 119 255 +78 207 149 104 255 +79 94 120 68 255 +80 122 117 54 255 +81 12 45 93 255 +82 34 57 84 255 +83 161 139 64 255 +84 252 173 161 255 +85 217 176 207 255 +86 38 75 89 255 +87 157 137 46 255 +88 94 112 64 255 +89 9 37 90 255 +90 52 105 89 255 +91 41 86 103 255 +92 237 167 153 255 +93 60 108 87 255 +94 171 138 51 255 +95 29 77 96 255 +96 203 145 68 255 +97 38 96 97 255 +98 230 152 88 255 +99 239 155 103 255 +100 251 166 140 255 diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS.cpt b/newt/colormaps/batlow/CategoricalPalettes/batlowS.cpt new file mode 100644 index 0000000..7e6bc7b --- /dev/null +++ b/newt/colormaps/batlow/CategoricalPalettes/batlowS.cpt @@ -0,0 +1,105 @@ +# +# batlowS +# www.fabiocrameri.ch/colourmaps +0.000000 1 25 89 0.010101 250 204 250 +0.010101 250 204 250 0.020202 130 130 49 +0.020202 130 130 49 0.030303 34 96 97 +0.030303 34 96 97 0.040404 241 157 107 +0.040404 241 157 107 0.050505 77 115 77 +0.050505 77 115 77 0.060606 17 67 96 +0.060606 17 67 96 0.070707 253 180 180 +0.070707 253 180 180 0.080808 192 144 54 +0.080808 192 144 54 0.090909 23 82 98 +0.090909 23 82 98 0.101010 252 191 214 +0.101010 252 191 214 0.111111 252 168 144 +0.111111 252 168 144 0.121212 53 106 89 +0.121212 53 106 89 0.131313 13 49 93 +0.131313 13 49 93 0.141414 161 138 43 +0.141414 161 138 43 0.151515 103 123 62 +0.151515 103 123 62 0.161616 221 149 77 +0.161616 221 149 77 0.171717 8 37 91 +0.171717 8 37 91 0.181818 253 174 162 +0.181818 253 174 162 0.191919 251 198 232 +0.191919 251 198 232 0.202020 177 141 47 +0.202020 177 141 47 0.212121 207 147 64 +0.212121 207 147 64 0.222222 145 134 45 +0.222222 145 134 45 0.232323 43 101 94 +0.232323 43 101 94 0.242424 116 126 56 +0.242424 116 126 56 0.252525 19 75 97 +0.252525 19 75 97 0.262626 248 162 126 +0.262626 248 162 126 0.272727 15 59 95 +0.272727 15 59 95 0.282828 65 111 83 +0.282828 65 111 83 0.292929 27 89 98 +0.292929 27 89 98 0.303030 253 186 196 +0.303030 253 186 196 0.313131 90 119 69 +0.313131 90 119 69 0.323232 233 153 92 +0.323232 233 153 92 0.333333 96 121 66 +0.333333 96 121 66 0.343434 71 113 80 +0.343434 71 113 80 0.353535 250 165 135 +0.353535 250 165 135 0.363636 253 171 154 +0.363636 253 171 154 0.373737 21 79 98 +0.373737 21 79 98 0.383838 123 128 52 +0.383838 123 128 52 0.393939 109 124 59 +0.393939 109 124 59 0.404040 200 145 59 +0.404040 200 145 59 0.414141 227 151 84 +0.414141 227 151 84 0.424242 245 159 116 +0.424242 245 159 116 0.434343 25 86 98 +0.434343 25 86 98 0.444444 185 143 50 +0.444444 185 143 50 0.454545 169 140 44 +0.454545 169 140 44 0.464646 14 54 94 +0.464646 14 54 94 0.474747 16 63 96 +0.474747 16 63 96 0.484848 83 117 73 +0.484848 83 117 73 0.494949 251 201 241 +0.494949 251 201 241 0.505051 38 99 95 +0.505051 38 99 95 0.515152 153 136 44 +0.515152 153 136 44 0.525253 18 71 97 +0.525253 18 71 97 0.535354 253 183 188 +0.535354 253 183 188 0.545455 30 93 98 +0.545455 30 93 98 0.555556 138 132 47 +0.555556 138 132 47 0.565657 253 177 171 +0.565657 253 177 171 0.575758 252 195 223 +0.575758 252 195 223 0.585859 253 188 205 +0.585859 253 188 205 0.595960 48 104 92 +0.595960 48 104 92 0.606061 5 31 90 +0.606061 5 31 90 0.616162 11 43 92 +0.616162 11 43 92 0.626263 59 109 86 +0.626263 59 109 86 0.636364 214 148 70 +0.636364 214 148 70 0.646465 237 154 98 +0.646465 237 154 98 0.656566 68 112 82 +0.656566 68 112 82 0.666667 24 84 98 +0.666667 24 84 98 0.676768 36 97 96 +0.676768 36 97 96 0.686869 189 143 52 +0.686869 189 143 52 0.696970 120 127 54 +0.696970 120 127 54 0.707071 74 114 78 +0.707071 74 114 78 0.717172 253 175 167 +0.717172 253 175 167 0.727273 173 140 45 +0.727273 173 140 45 0.737374 157 137 43 +0.737374 157 137 43 0.747475 252 190 209 +0.747475 252 190 209 0.757576 40 100 95 +0.757576 40 100 95 0.767677 247 161 121 +0.767677 247 161 121 0.777778 243 158 112 +0.777778 243 158 112 0.787879 80 116 75 +0.787879 80 116 75 0.797980 149 135 44 +0.797980 149 135 44 0.808081 12 46 93 +0.808081 12 46 93 0.818182 10 40 92 +0.818182 10 40 92 0.828283 142 133 46 +0.828283 142 133 46 0.838384 252 169 149 +0.838384 252 169 149 0.848485 252 196 227 +0.848485 252 196 227 0.858586 17 65 96 +0.858586 17 65 96 0.868687 165 139 44 +0.868687 165 139 44 0.878788 106 123 61 +0.878788 106 123 61 0.888889 3 28 90 +0.888889 3 28 90 0.898990 56 108 88 +0.898990 56 108 88 0.909091 20 77 98 +0.909091 20 77 98 0.919192 253 172 158 +0.919192 253 172 158 0.929293 51 105 90 +0.929293 51 105 90 0.939394 181 142 48 +0.939394 181 142 48 0.949495 19 73 97 +0.949495 19 73 97 0.959596 210 147 67 +0.959596 210 147 67 0.969697 32 94 97 +0.969697 32 94 97 0.979798 230 152 88 +0.979798 230 152 88 0.989899 239 155 103 +0.989899 239 155 103 1.000000 251 166 140 +N 255 255 255 +B 1 25 89 +F 251 166 140 diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS.ct b/newt/colormaps/batlow/CategoricalPalettes/batlowS.ct new file mode 100644 index 0000000..abd99e9 --- /dev/null +++ b/newt/colormaps/batlow/CategoricalPalettes/batlowS.ct @@ -0,0 +1,406 @@ + + + 1.11.0 + + + 1 25 89 255 + 0 + + + 250 204 250 255 + 0.010101 + + + 130 130 49 255 + 0.020202 + + + 34 96 97 255 + 0.030303 + + + 241 157 107 255 + 0.040404 + + + 77 115 77 255 + 0.050505 + + + 17 67 96 255 + 0.060606 + + + 253 180 180 255 + 0.070707 + + + 192 144 54 255 + 0.080808 + + + 23 82 98 255 + 0.090909 + + + 252 191 214 255 + 0.10101 + + + 252 168 144 255 + 0.11111 + + + 53 106 89 255 + 0.12121 + + + 13 49 93 255 + 0.13131 + + + 161 138 43 255 + 0.14141 + + + 103 123 62 255 + 0.15152 + + + 221 149 77 255 + 0.16162 + + + 8 37 91 255 + 0.17172 + + + 253 174 162 255 + 0.18182 + + + 251 198 232 255 + 0.19192 + + + 177 141 47 255 + 0.20202 + + + 207 147 64 255 + 0.21212 + + + 145 134 45 255 + 0.22222 + + + 43 101 94 255 + 0.23232 + + + 116 126 56 255 + 0.24242 + + + 19 75 97 255 + 0.25253 + + + 248 162 126 255 + 0.26263 + + + 15 59 95 255 + 0.27273 + + + 65 111 83 255 + 0.28283 + + + 27 89 98 255 + 0.29293 + + + 253 186 196 255 + 0.30303 + + + 90 119 69 255 + 0.31313 + + + 233 153 92 255 + 0.32323 + + + 96 121 66 255 + 0.33333 + + + 71 113 80 255 + 0.34343 + + + 250 165 135 255 + 0.35354 + + + 253 171 154 255 + 0.36364 + + + 21 79 98 255 + 0.37374 + + + 123 128 52 255 + 0.38384 + + + 109 124 59 255 + 0.39394 + + + 200 145 59 255 + 0.40404 + + + 227 151 84 255 + 0.41414 + + + 245 159 116 255 + 0.42424 + + + 25 86 98 255 + 0.43434 + + + 185 143 50 255 + 0.44444 + + + 169 140 44 255 + 0.45455 + + + 14 54 94 255 + 0.46465 + + + 16 63 96 255 + 0.47475 + + + 83 117 73 255 + 0.48485 + + + 251 201 241 255 + 0.49495 + + + 38 99 95 255 + 0.50505 + + + 153 136 44 255 + 0.51515 + + + 18 71 97 255 + 0.52525 + + + 253 183 188 255 + 0.53535 + + + 30 93 98 255 + 0.54545 + + + 138 132 47 255 + 0.55556 + + + 253 177 171 255 + 0.56566 + + + 252 195 223 255 + 0.57576 + + + 253 188 205 255 + 0.58586 + + + 48 104 92 255 + 0.59596 + + + 5 31 90 255 + 0.60606 + + + 11 43 92 255 + 0.61616 + + + 59 109 86 255 + 0.62626 + + + 214 148 70 255 + 0.63636 + + + 237 154 98 255 + 0.64646 + + + 68 112 82 255 + 0.65657 + + + 24 84 98 255 + 0.66667 + + + 36 97 96 255 + 0.67677 + + + 189 143 52 255 + 0.68687 + + + 120 127 54 255 + 0.69697 + + + 74 114 78 255 + 0.70707 + + + 253 175 167 255 + 0.71717 + + + 173 140 45 255 + 0.72727 + + + 157 137 43 255 + 0.73737 + + + 252 190 209 255 + 0.74747 + + + 40 100 95 255 + 0.75758 + + + 247 161 121 255 + 0.76768 + + + 243 158 112 255 + 0.77778 + + + 80 116 75 255 + 0.78788 + + + 149 135 44 255 + 0.79798 + + + 12 46 93 255 + 0.80808 + + + 10 40 92 255 + 0.81818 + + + 142 133 46 255 + 0.82828 + + + 252 169 149 255 + 0.83838 + + + 252 196 227 255 + 0.84848 + + + 17 65 96 255 + 0.85859 + + + 165 139 44 255 + 0.86869 + + + 106 123 61 255 + 0.87879 + + + 3 28 90 255 + 0.88889 + + + 56 108 88 255 + 0.89899 + + + 20 77 98 255 + 0.90909 + + + 253 172 158 255 + 0.91919 + + + 51 105 90 255 + 0.92929 + + + 181 142 48 255 + 0.93939 + + + 19 73 97 255 + 0.94949 + + + 210 147 67 255 + 0.9596 + + + 32 94 97 255 + 0.9697 + + + 230 152 88 255 + 0.9798 + + + 239 155 103 255 + 0.9899 + + + 251 166 140 255 + 1 + + + \ No newline at end of file diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS.lut b/newt/colormaps/batlow/CategoricalPalettes/batlowS.lut new file mode 100644 index 0000000..ea4b95b --- /dev/null +++ b/newt/colormaps/batlow/CategoricalPalettes/batlowS.lut @@ -0,0 +1,100 @@ +1 25 89 +250 204 250 +130 130 49 +34 96 97 +241 157 107 +77 115 77 +17 67 96 +253 180 180 +192 144 54 +23 82 98 +252 191 214 +252 168 144 +53 106 89 +13 49 93 +161 138 43 +103 123 62 +221 149 77 +8 37 91 +253 174 162 +251 198 232 +177 141 47 +207 147 64 +145 134 45 +43 101 94 +116 126 56 +19 75 97 +248 162 126 +15 59 95 +65 111 83 +27 89 98 +253 186 196 +90 119 69 +233 153 92 +96 121 66 +71 113 80 +250 165 135 +253 171 154 +21 79 98 +123 128 52 +109 124 59 +200 145 59 +227 151 84 +245 159 116 +25 86 98 +185 143 50 +169 140 44 +14 54 94 +16 63 96 +83 117 73 +251 201 241 +38 99 95 +153 136 44 +18 71 97 +253 183 188 +30 93 98 +138 132 47 +253 177 171 +252 195 223 +253 188 205 +48 104 92 +5 31 90 +11 43 92 +59 109 86 +214 148 70 +237 154 98 +68 112 82 +24 84 98 +36 97 96 +189 143 52 +120 127 54 +74 114 78 +253 175 167 +173 140 45 +157 137 43 +252 190 209 +40 100 95 +247 161 121 +243 158 112 +80 116 75 +149 135 44 +12 46 93 +10 40 92 +142 133 46 +252 169 149 +252 196 227 +17 65 96 +165 139 44 +106 123 61 +3 28 90 +56 108 88 +20 77 98 +253 172 158 +51 105 90 +181 142 48 +19 73 97 +210 147 67 +32 94 97 +230 152 88 +239 155 103 +251 166 140 diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS.mat b/newt/colormaps/batlow/CategoricalPalettes/batlowS.mat new file mode 100644 index 0000000000000000000000000000000000000000..ff6cb392dcdf7a0807827dbaa0fd18c986e6488f GIT binary patch literal 2499 zcmV;!2|V^qK~zjZLLfCRFd$7qR4ry{Y-KDUP;6mzW^ZzBIv`C!LrFF?EFeR2Wnpw> zWFT*DIv_@JX&_E-b|4@)ATc*OF)}(eG$1lCF*zV0ARr(hARr(hARr(hARr(hARr(h zARr(hARr(hARr(hARr(hARr(B00000000000ZB~{0000x2><|ioGq4lG!%*#$4B<9 z5;W6L-haknH7Xr5L!We1rPUN zzkm=2!b%I_ua#8vY5BU({!18kSV+S;0ZhI9Gj(11Q5U#QVPwq*$ky#k%e0?@Au00l zwv#ltjXw6U-~0yW%YARt$pV=CQ-K9FKEzCzIY*wnLw@YV_$S!{=$N0QXg(64%evh7 zOHC6J7&GFMwz&|=QB*x?z{94Nrsu{@T}Z#0y#G?+IC6ccbH?5oSjvmFaI6)eJ}u^g z)*S%~qt`0U>J8&JW!*XVrBPh{jqY&IYydPni8FzJlVO-#W`A}Y8}>=-om#!$VqX4@ ztDTkr=6oH$R{H|fPCR#?wr$76DrWkEb2%(@3Qky;37|SVL8L@7keFut!N_0|H=c29 zV>DT~NU#vP+u8y9N@t}$Ujb(8v@hot3*e)>VJf|!gz_t1BBA3PjH{H{*x7{R2hlwO_w5LnIo2(BV|aD23C z_Ppk6$X1?T$eZj$fOoK^VW&_ibZT*O_|H_*#I}Xm#f-nFbgIC%-57Bw?@f z{kf96S#WygBW3%z6(;PZDuu2w9HLM5QAv{!oBoxlDAj>M-Urv+PfO6YKQFLFp$3^- zQVW!GN1$=txnr{w2W>rGJJ|QB_^c$Fe3~M_f$ulVwvmP)6cqeG`rRO+N{k*iz7jy+ zENCI?U>!~yRgAq`nnLODSvos*3Nnv;#`V-Y(RG25JX1CR-i;Hq7Ge%4>&54ywm-+? z#vx&W6+m{+Lm9s$0YYe6rdjE)VQ7`zBVNpdX7Kz=gY<5&<7Qo=;%t9@T%b@)<)0 ziS$%+V>P}%YuD`Tn!rTz+)?(bDcrhwnDZh+fcF-iDr#C~$TyxmGnqCDmSil^+PNPN zR=(YjMOYx;>bsqvGl=ldHXpq^da)%wKuM0(4q{OOQSJU6qh#C@YeCQeymfx0g(Y44NnLIau5^iZtIHMmAzhfW zi*9~V(uZPWx6C>+6*^L`Kl?iH&_a^*yXxDDw$**|4sAV{*)>cl@?ceZG!=uM~5l@`oe&r1h@pSjp$e2KkAApjz>|3|#0q7+zuvNbdBC{^c{Z%*}BvO>g6+Rct zctMdy1Q((ED)mT`%@`}^d%y2MtEy-n#3XUL6bVC482hJQrD? zE_x49JAk(I*`4FVIC)0*#HcY7#hU(fvy^_^d3(CEdgn0O|6sIU^5LT=kvi|0GXk$S zO)+H~x*;YOcxwDVRS3AP?<-9r!$>rusKdGkdZF-AzdwMM){8E~rv}lSv;B&_TrWa; zO6vr2bWDmJO*T2pf>RuIbj=SgLhM8;Waj(e(EOXT=KWrn(RU>k2n|Cx+SB)ZIv0sA zhzUP-a`A2a&IK3d2xtbbIlDJ?L9~@KM9|_PcC-A_V%s3*DaSk&*Rnx#R@kL8*AM*z z&eBJe>S0gT{hVr9hntN`G{2I5EdLp8`s=Z_Q`dmPbGPo@6o1~mWLS)*K{HiW1bB`v8Vi~wQo1d+P!nvb^JRY zhsXY%iakRb5oMtR5NE#dJ)FUWzY06nbM+uLZ8N{{CTtMIv9J7KE)ShwJ$JR8W*~0g zm~+fo0ibPD&#!0zy-li6*p>z6kB$7x>}~}86zivNKm+4e%B`@iER5__*{B>njKbrR zd%q|*LrW?0fqyy+t4eI1m9~$;*2=3lQgr}LpZ^Y1c4uJEaFwUOHxqf`F@CGaWQett z);l}2p;#qiqlOa=mW97u^_XO!_tm2kQ+FP?yM?4zt((Mj;2Q~wTRpxmC1k2?ZHKC= z)fUG93jU+?QJNCN1n<5yt0oWvw+fe0t+)Phs~B9s8Af%%KOV?3cAm& zq!;IzAUhwp*~ld0+)@BX?+6Q{VO2Ye9*m&6__d+y4HgXgY*L#8>cMobzhe_J1Yy>h zTH@3=EO&dGwv~>er;WI~S%?X@iZRYmE(7;os`rHMVB%}Glk(PAbZpqR>j}BA1J5=d zbP0LMLjLvD+18<*D9T)LyPnAcn{ZriNsWb<75$oIp(c>eJ<1Q3X$Jov>9BQ~Gf-@Jwbqp_2&z3<5|&(aqWw;IEmWLMdDwM<;Ia_yXP*)pF;f#mTF4b7qf6#wyE{{G(+IC1kq z#?hZxXetv8j-MvuvR06UAQ`}jw&R-jn!f(gkiZtq-6(0zVv&{%oVc<%cuCQ2j7jEf@ N%In8)@n7l<%5 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS.txt b/newt/colormaps/batlow/CategoricalPalettes/batlowS.txt new file mode 100644 index 0000000..15aced7 --- /dev/null +++ b/newt/colormaps/batlow/CategoricalPalettes/batlowS.txt @@ -0,0 +1,100 @@ +0.005193 0.098238 0.349842 +0.981354 0.800406 0.981267 +0.511253 0.510898 0.193296 +0.133298 0.375282 0.379395 +0.946612 0.614218 0.419767 +0.302379 0.450282 0.300122 +0.066899 0.263188 0.377594 +0.992900 0.704852 0.704114 +0.754268 0.565033 0.211761 +0.088353 0.322167 0.384731 +0.989089 0.750968 0.837979 +0.987567 0.658422 0.566226 +0.209075 0.417412 0.349677 +0.049378 0.191076 0.365810 +0.631513 0.540752 0.170075 +0.402968 0.480466 0.244731 +0.865168 0.585882 0.302255 +0.032053 0.146774 0.358239 +0.992595 0.681914 0.636869 +0.985980 0.775272 0.908448 +0.693720 0.553797 0.182610 +0.811692 0.575187 0.252572 +0.570016 0.526186 0.175273 +0.167952 0.397889 0.367784 +0.455774 0.495585 0.217774 +0.075833 0.293321 0.381922 +0.973424 0.635183 0.492547 +0.059164 0.229842 0.372252 +0.254452 0.434529 0.326434 +0.106842 0.349774 0.384548 +0.991367 0.727614 0.770270 +0.351976 0.465440 0.272492 +0.912746 0.599191 0.360986 +0.377291 0.472952 0.258588 +0.278171 0.442524 0.313552 +0.981918 0.646664 0.529602 +0.990926 0.670230 0.602031 +0.081553 0.307858 0.383598 +0.483123 0.503216 0.205037 +0.429094 0.488011 0.231096 +0.783416 0.570162 0.230962 +0.889900 0.592087 0.330454 +0.961696 0.624282 0.455702 +0.096618 0.336161 0.385134 +0.724322 0.559628 0.195408 +0.662691 0.547503 0.174044 +0.054721 0.211234 0.369184 +0.063071 0.247085 0.375050 +0.327007 0.457900 0.286377 +0.983913 0.787757 0.944626 +0.149706 0.386975 0.374449 +0.600520 0.533605 0.170648 +0.071115 0.278497 0.379895 +0.992258 0.716210 0.737146 +0.118992 0.362849 0.382713 +0.540225 0.518584 0.183099 +0.993111 0.693451 0.670810 +0.987672 0.762996 0.872864 +0.990307 0.739184 0.803810 +0.187886 0.408003 0.359484 +0.019936 0.122985 0.354120 +0.042104 0.169557 0.362151 +0.231362 0.426197 0.338572 +0.838999 0.580339 0.276353 +0.928323 0.605212 0.385404 +0.266241 0.438555 0.320085 +0.092304 0.329220 0.385040 +0.141260 0.381240 0.377135 +0.739393 0.562386 0.203179 +0.469368 0.499393 0.211318 +0.290214 0.446420 0.306889 +0.992967 0.687705 0.653934 +0.678244 0.550712 0.177803 +0.615972 0.537231 0.169826 +0.989720 0.745039 0.820804 +0.158620 0.392531 0.371320 +0.967983 0.629639 0.474057 +0.954574 0.619137 0.437582 +0.314648 0.454107 0.293279 +0.585199 0.529927 0.172493 +0.045905 0.180460 0.364007 +0.037449 0.158313 0.360216 +0.555024 0.522391 0.178854 +0.989496 0.664329 0.584246 +0.986868 0.769105 0.890573 +0.064936 0.255264 0.376362 +0.647098 0.544183 0.171465 +0.415967 0.484225 0.237895 +0.012963 0.110779 0.351992 +0.220112 0.421864 0.344261 +0.078517 0.300622 0.382814 +0.991935 0.676091 0.619575 +0.198310 0.412798 0.354767 +0.709098 0.556772 0.188546 +0.073440 0.285942 0.380957 +0.825472 0.577725 0.264197 +0.125889 0.369160 0.381259 +0.901590 0.595507 0.345429 +0.937850 0.609582 0.402345 +0.985066 0.652522 0.547998 diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS.xcmap b/newt/colormaps/batlow/CategoricalPalettes/batlowS.xcmap new file mode 100644 index 0000000..5d30728 --- /dev/null +++ b/newt/colormaps/batlow/CategoricalPalettes/batlowS.xcmap @@ -0,0 +1,112 @@ + + + + + + scientific-batlowS + linear + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS_HEX.txt b/newt/colormaps/batlow/CategoricalPalettes/batlowS_HEX.txt new file mode 100644 index 0000000..82b077a --- /dev/null +++ b/newt/colormaps/batlow/CategoricalPalettes/batlowS_HEX.txt @@ -0,0 +1,102 @@ +Scientific Colour Map Categorical Palette +batlow 100 scrambled Swatches + 1 25 89 batlow-1 #011959 +250 204 250 batlow-256 #FACCFA +129 130 50 batlow-128 #818232 +241 157 107 batlow-192 #F19D6B + 34 96 97 batlow-65 #226061 + 17 67 96 batlow-33 #114360 +253 180 180 batlow-224 #FDB4B4 +190 144 53 batlow-160 #BE9035 + 76 114 77 batlow-96 #4C724D +252 191 214 batlow-240 #FCBFD6 + 13 49 93 batlow-17 #0D315D + 23 82 98 batlow-49 #175262 +101 122 63 batlow-112 #657A3F +219 149 75 batlow-176 #DB954B +159 137 43 batlow-144 #9F892B +252 168 144 batlow-208 #FCA890 + 53 106 89 batlow-81 #356A59 + 8 37 91 batlow-9 #08255B +253 186 196 batlow-232 #FDBAC4 +251 198 232 batlow-248 #FBC6E8 +253 174 162 batlow-216 #FDAEA2 +231 152 90 batlow-184 #E7985A +175 141 46 batlow-152 #AF8D2E + 88 118 70 batlow-104 #587646 + 19 75 97 batlow-41 #134B61 + 43 101 94 batlow-73 #2B655E + 15 59 95 batlow-25 #0F3B5F + 27 89 98 batlow-57 #1B5962 +248 162 126 batlow-200 #F8A27E +115 126 56 batlow-120 #737E38 +143 134 45 batlow-136 #8F862D +205 146 63 batlow-168 #CD923F + 63 110 84 batlow-88 #3F6E54 +167 139 44 batlow-148 #A78B2C +253 177 171 batlow-220 #FDB1AB + 69 112 81 batlow-92 #457051 +108 124 60 batlow-116 #6C7C3C + 11 43 92 batlow-13 #0B2B5C + 21 79 98 batlow-45 #154F62 +225 151 82 batlow-180 #E19752 + 48 104 92 batlow-77 #30685C +253 188 205 batlow-236 #FDBCCD + 16 63 96 batlow-29 #103F60 +251 201 241 batlow-252 #FBC9F1 +151 136 44 batlow-140 #97882C +237 154 98 batlow-188 #ED9A62 +252 195 223 batlow-244 #FCC3DF + 30 93 98 batlow-61 #1E5D62 + 95 120 67 batlow-108 #5F7843 +121 128 53 batlow-124 #798035 +245 159 116 batlow-196 #F59F74 +250 165 135 batlow-204 #FAA587 + 14 54 94 batlow-21 #0E365E + 18 71 97 batlow-37 #124761 + 82 116 74 batlow-100 #52744A + 5 31 90 batlow-5 #051F5A +198 145 58 batlow-164 #C6913A +136 132 47 batlow-132 #88842F + 38 99 95 batlow-69 #26635F +183 142 49 batlow-156 #B78E31 +253 183 188 batlow-228 #FDB7BC +212 148 69 batlow-172 #D49445 + 25 86 98 batlow-53 #195662 +253 171 154 batlow-212 #FDAB9A + 59 109 86 batlow-85 #3B6D56 + 22 80 98 batlow-47 #165062 +187 143 51 batlow-158 #BB8F33 + 51 105 90 batlow-79 #33695A + 18 69 97 batlow-35 #124561 + 29 91 98 batlow-59 #1D5B62 +253 184 192 batlow-230 #FDB8C0 + 13 51 94 batlow-19 #0D335E + 40 100 95 batlow-71 #28645F + 10 40 92 batlow-11 #0A285C +125 129 52 batlow-126 #7D8134 + 3 28 90 batlow-3 #031C5A +253 187 201 batlow-234 #FDBBC9 + 36 97 96 batlow-67 #246160 + 17 65 96 batlow-31 #114160 + 91 119 69 batlow-106 #5B7745 +147 135 44 batlow-138 #93872C + 19 73 97 batlow-39 #134961 +251 202 246 batlow-254 #FBCAF6 + 98 121 65 batlow-110 #627941 + 85 117 72 batlow-102 #557548 + 15 56 95 batlow-23 #0F385F +104 123 62 batlow-114 #687B3E + 12 46 93 batlow-15 #0C2E5D +202 146 60 batlow-166 #CA923C +251 166 140 batlow-206 #FBA68C +228 151 86 batlow-182 #E49756 + 24 84 98 batlow-51 #185462 + 20 77 98 batlow-43 #144D62 +253 172 158 batlow-214 #FDAC9E +252 190 209 batlow-238 #FCBED1 +194 144 55 batlow-162 #C29037 + 32 94 97 batlow-63 #205E61 +253 175 167 batlow-218 #FDAFA7 +132 131 49 batlow-130 #848331 +111 125 58 batlow-118 #6F7D3A diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS_PARAVIEW.xml b/newt/colormaps/batlow/CategoricalPalettes/batlowS_PARAVIEW.xml new file mode 100644 index 0000000..3574229 --- /dev/null +++ b/newt/colormaps/batlow/CategoricalPalettes/batlowS_PARAVIEW.xml @@ -0,0 +1,104 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/newt/colormaps/batlow/CategoricalPalettes/batlowS_QGIS.xml b/newt/colormaps/batlow/CategoricalPalettes/batlowS_QGIS.xml new file mode 100644 index 0000000..ec818c3 --- /dev/null +++ b/newt/colormaps/batlow/CategoricalPalettes/batlowS_QGIS.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow10.gpl b/newt/colormaps/batlow/DiscretePalettes/batlow10.gpl new file mode 100644 index 0000000..4f310d9 --- /dev/null +++ b/newt/colormaps/batlow/DiscretePalettes/batlow10.gpl @@ -0,0 +1,14 @@ +GIMP Palette +Name: batlow 10 Swatches +Columns: 1 +# + 1 25 89 batlow-1 #011959 + 16 63 96 batlow-29 #103F60 + 28 90 98 batlow-58 #1C5A62 + 60 109 86 batlow-86 #3C6D56 +104 123 62 batlow-114 #687B3E +157 137 43 batlow-143 #9D892B +210 147 67 batlow-171 #D29343 +248 161 123 batlow-199 #F8A17B +253 183 188 batlow-228 #FDB7BC +250 204 250 batlow-256 #FACCFA diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow10.mat b/newt/colormaps/batlow/DiscretePalettes/batlow10.mat new file mode 100644 index 0000000000000000000000000000000000000000..daf6689eccb674916e21c066f34782a71dfc1c6c GIT binary patch literal 438 zcmeZu4DoSvQZUssQ1EpO(M`+DN!3vZ$Vn_o%P-2c0*X0%nwjV*I2WZRmZYXAMnp+tfS(zFu7#SE^Di8}8V1UunmmkR1V`N~cnDaO}A>l$oN`2)c4=H&>(!`AH$K^c7Il%Ez;e}Kl|txRsYiFzcb&y zWv}agv(aAp#O!#f!gKTfUOedBtG4}(WVhF{!u;wSeT&yW^rx>0|91K9w%VOKAMV{# zj;_5sh57y4`KDT&rK6-BD z?SI#2xB7;^zq@(YLlYnfZrCm0z>jWFNOZ|GCR-^>5#vKd(2e_q?a|v) zwin0UivKw`tH$o zWFT*DIv_@JX&_E-b|4@)ATc*OF)}(eGaxcBF*zV0ARr(hARr(hARr(hARr(hARr(h zARr(hARr(hARr(hARr(hARr(B00000000000ZB~{0000t2><|ioGny&G!=*&6^e>b zBxI7LCPYR>2}-~3R{zt)n}q1xzn4wtug`GZe0uNt|@2`<68(kyZ^x}*ADTYw0~eu zp0YP`EX1)o)}aKoBDl9#=B8~c#$z3eJqlIDa8uqs`<(F_6PJ0nalbA>?o9HFyzONW z(eU%%KbU5z@g&7xyMHMsFK|K*(| zBGit0e@Qp1L$?vXlUHy(x@|_gYaAQ#8%v5vFKI%bLAm-jVhd;|qFYlGThS^a=;>?K zj@s!|hdILztee|U5#P`Wx^7Rwq;eNZ`*Y?37P}yj=r*Ogiwwcat6y?w$XI+lesYaW zFGM_#AG~5oLC{N@VZ?4KUfH~pJ!{_wK2{AeRIMLVQg4k6uMgleQ>wJod=T4r{4K7P zID~Tl%e;Y8G*sPctTgMOL-_QqG)_DNlPc%qVg#8;Y9{TeAu^$)DyJ72z=DltK-`nB zEUfYNiQiZ{4BkxfUu+JtaYLLQ**DKd_myzoNS_fD+!L0VR~^N}W7WFXIisK*^D=zk zK87RDD)2>i9H)goo_oU-;*D=>XViW6_Tmo zAlh3aQoW3W5;0mJqlkm>vcpNuIUG1&jy}vt7-&2ZCA&0v>T3G!i1N$wzYF zKCLL%9>PH--}f3jKMqDMz7XKT!FwIDl&t()6p%V^^UYSkV$-86jXTxwx@|ZW)J%kN zz4E80dUfy+aAEDd+yE0Ex1y@%MmSg-rO($j!}V%1$0hRvlqzO>rzx#4+E-++_n;k< z<+Pf>6cYHiP>Zt?IScwJb6f{N+C<-CK^nk^OhtcVkwG}sD`dKR58}K^NC>@f5Q`0`FNq5e zAzy+VarER6l0wzq^5hJm;%>fkJRc3m=}xyP`ZOp>Zr-8vf`)I?FK+eFXjtAM%J*24 zj?*JH>ajQIP!HX>{bw&7wRQdjR+0>yi2rWYslkAVno1GBD+6}1ZiPYj7`W9e(d|*j zfNxKpg;KDoO6jyAf=tKQG5UIfBSyM{>2wD0J2}aP?jvg^yA!%`trx zAU&T~nHq(5f<|;Lo4}E@vKurBbmjF`LA*`mgBu zGjq>9DvnhUx+0ya=pk(=;4!4)tmCjdQIHCuzwHD|YbgkjXy4cnL4o3##Y?BEN!ts@5mxb)r+)Ej5d$rO5F_X2GMGrf$|B{1)TRjOh z(Gvr9!|g~`)~Sk)YXeiLMfiDHE1a!krB3^Pz(3uAo|)P$NH-h)lIGtGO?C?HFQ+C5 ze|2cJt!%^=j}^T*aIb)N1MYO%4)V>_z-GYZbEKlV-X18i8v>kgW0 z$WIXtXc9;;$P@N^GTs3nv6}|TTf4xqZ~2}v*@dO0A>Sv=I8w2pA* J;C~>#(6VrW!!iH> literal 0 HcmV?d00001 diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow100.spk b/newt/colormaps/batlow/DiscretePalettes/batlow100.spk new file mode 100644 index 0000000..7b3c2d1 --- /dev/null +++ b/newt/colormaps/batlow/DiscretePalettes/batlow100.spk @@ -0,0 +1,100 @@ + 0.00 0.52 9.82 34.98 + 1.18 1.65 11.69 35.31 + 1.96 2.32 12.90 35.52 + 3.14 3.21 14.68 35.82 + 3.92 3.74 15.83 36.02 + 5.10 4.41 17.51 36.31 + 5.88 4.77 18.58 36.49 + 7.06 5.22 20.13 36.75 + 8.24 5.59 21.60 37.00 + 9.02 5.80 22.53 37.15 + 10.20 6.11 23.86 37.37 + 10.98 6.31 24.71 37.50 + 12.16 6.59 25.93 37.70 + 12.94 6.79 26.71 37.82 + 14.12 7.11 27.85 37.99 + 15.29 7.46 28.97 38.15 + 16.08 7.71 29.70 38.24 + 17.25 8.16 30.79 38.36 + 18.04 8.48 31.50 38.42 + 19.22 9.03 32.57 38.49 + 20.39 9.66 33.62 38.51 + 21.18 10.15 34.30 38.50 + 22.35 10.97 35.31 38.42 + 23.14 11.57 35.96 38.33 + 24.31 12.59 36.92 38.13 + 25.10 13.33 37.53 37.94 + 26.27 14.54 38.41 37.58 + 27.45 15.86 39.25 37.13 + 28.24 16.80 39.79 36.78 + 29.41 18.27 40.56 36.17 + 30.20 19.31 41.04 35.72 + 31.37 20.91 41.74 34.97 + 32.16 22.01 42.19 34.43 + 33.33 23.71 42.83 33.56 + 34.51 25.45 43.45 32.64 + 35.29 26.62 43.86 32.01 + 36.47 28.42 44.45 31.02 + 37.25 29.63 44.84 30.35 + 38.43 31.46 45.41 29.33 + 39.22 32.70 45.79 28.64 + 40.39 34.57 46.36 27.60 + 41.57 36.46 46.92 26.55 + 42.35 37.73 47.30 25.86 + 43.53 39.65 47.86 24.82 + 44.31 40.95 48.24 24.13 + 45.49 42.91 48.80 23.11 + 46.27 44.24 49.18 22.44 + 47.45 46.25 49.75 21.45 + 48.63 48.31 50.32 20.50 + 49.41 49.71 50.71 19.90 + 50.59 51.84 51.28 19.06 + 51.37 53.29 51.67 18.55 + 52.55 55.50 52.24 17.89 + 53.73 57.76 52.81 17.38 + 54.51 59.28 53.18 17.14 + 55.69 61.60 53.72 16.98 + 56.47 63.15 54.08 17.01 + 57.65 65.49 54.59 17.26 + 58.43 67.05 54.91 17.57 + 59.61 69.37 55.38 18.26 + 60.78 71.67 55.82 19.19 + 61.57 73.19 56.10 19.92 + 62.75 75.43 56.50 21.18 + 63.53 76.89 56.76 22.10 + 64.71 79.06 57.14 23.62 + 65.49 80.47 57.39 24.70 + 66.67 82.55 57.77 26.42 + 67.84 84.57 58.17 28.26 + 68.63 85.87 58.44 29.56 + 69.80 87.77 58.89 31.61 + 70.59 88.99 59.21 33.05 + 71.76 90.72 59.73 35.31 + 72.55 91.81 60.11 36.90 + 73.73 93.32 60.74 39.38 + 74.90 94.66 61.42 41.98 + 75.69 95.46 61.91 43.76 + 76.86 96.49 62.69 46.49 + 77.65 97.08 63.24 48.33 + 78.82 97.81 64.09 51.11 + 79.61 98.19 64.67 52.96 + 80.78 98.64 65.55 55.71 + 81.96 98.95 66.43 58.42 + 82.75 99.09 67.02 60.20 + 83.92 99.23 67.90 62.83 + 84.71 99.28 68.48 64.54 + 85.88 99.31 69.35 67.08 + 87.06 99.30 70.20 69.58 + 87.84 99.28 70.77 71.24 + 89.02 99.23 71.62 73.71 + 89.80 99.18 72.19 75.37 + 90.98 99.11 73.05 77.86 + 91.76 99.06 73.63 79.54 + 92.94 98.97 74.50 82.08 + 94.12 98.88 75.39 84.66 + 94.90 98.80 76.00 86.41 + 96.08 98.69 76.91 89.06 + 96.86 98.60 77.53 90.84 + 98.04 98.45 78.46 93.55 + 98.82 98.33 79.09 95.37 +100.00 98.14 80.04 98.13 diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow100.txt b/newt/colormaps/batlow/DiscretePalettes/batlow100.txt new file mode 100644 index 0000000..cc05bc7 --- /dev/null +++ b/newt/colormaps/batlow/DiscretePalettes/batlow100.txt @@ -0,0 +1,102 @@ +Scientific Colour Map Categorical Palette +batlow 100 Swatches + 1 25 89 batlow-1 #011959 + 4 30 90 batlow-4 #041E5A + 6 33 91 batlow-6 #06215B + 8 37 91 batlow-9 #08255B + 10 40 92 batlow-11 #0A285C + 11 45 93 batlow-14 #0B2D5D + 12 47 93 batlow-16 #0C2F5D + 13 51 94 batlow-19 #0D335E + 14 55 94 batlow-22 #0E375E + 15 57 95 batlow-24 #0F395F + 16 61 95 batlow-27 #103D5F + 16 63 96 batlow-29 #103F60 + 17 66 96 batlow-32 #114260 + 17 68 96 batlow-34 #114460 + 18 71 97 batlow-37 #124761 + 19 74 97 batlow-40 #134A61 + 20 76 98 batlow-42 #144C62 + 21 79 98 batlow-45 #154F62 + 22 80 98 batlow-47 #165062 + 23 83 98 batlow-50 #175362 + 25 86 98 batlow-53 #195662 + 26 87 98 batlow-55 #1A5762 + 28 90 98 batlow-58 #1C5A62 + 30 92 98 batlow-60 #1E5C62 + 32 94 97 batlow-63 #205E61 + 34 96 97 batlow-65 #226061 + 37 98 96 batlow-68 #256260 + 40 100 95 batlow-71 #28645F + 43 101 94 batlow-73 #2B655E + 47 103 92 batlow-76 #2F675C + 49 105 91 batlow-78 #31695B + 53 106 89 batlow-81 #356A59 + 56 108 88 batlow-83 #386C58 + 60 109 86 batlow-86 #3C6D56 + 65 111 83 batlow-89 #416F53 + 68 112 82 batlow-91 #447052 + 72 113 79 batlow-94 #48714F + 76 114 77 batlow-96 #4C724D + 80 116 75 batlow-99 #50744B + 83 117 73 batlow-101 #537549 + 88 118 70 batlow-104 #587646 + 93 120 68 batlow-107 #5D7844 + 96 121 66 batlow-109 #607942 +101 122 63 batlow-112 #657A3F +104 123 62 batlow-114 #687B3E +109 124 59 batlow-117 #6D7C3B +113 125 57 batlow-119 #717D39 +118 127 55 batlow-122 #767F37 +123 128 52 batlow-125 #7B8034 +127 129 51 batlow-127 #7F8133 +132 131 49 batlow-130 #848331 +136 132 47 batlow-132 #88842F +142 133 46 batlow-135 #8E852E +147 135 44 batlow-138 #93872C +151 136 44 batlow-140 #97882C +157 137 43 batlow-143 #9D892B +161 138 43 batlow-145 #A18A2B +167 139 44 batlow-148 #A78B2C +171 140 45 batlow-150 #AB8C2D +177 141 47 batlow-153 #B18D2F +183 142 49 batlow-156 #B78E31 +187 143 51 batlow-158 #BB8F33 +192 144 54 batlow-161 #C09036 +196 145 56 batlow-163 #C49138 +202 146 60 batlow-166 #CA923C +205 146 63 batlow-168 #CD923F +210 147 67 batlow-171 #D29343 +216 148 72 batlow-174 #D89448 +219 149 75 batlow-176 #DB954B +224 150 81 batlow-179 #E09651 +227 151 84 batlow-181 #E39754 +231 152 90 batlow-184 #E7985A +234 153 94 batlow-186 #EA995E +238 155 100 batlow-189 #EE9B64 +241 157 107 batlow-192 #F19D6B +243 158 112 batlow-194 #F39E70 +246 160 119 batlow-197 #F6A077 +248 161 123 batlow-199 #F8A17B +249 163 130 batlow-202 #F9A382 +250 165 135 batlow-204 #FAA587 +252 167 142 batlow-207 #FCA78E +252 169 149 batlow-210 #FCA995 +253 171 154 batlow-212 #FDAB9A +253 173 160 batlow-215 #FDADA0 +253 175 165 batlow-217 #FDAFA5 +253 177 171 batlow-220 #FDB1AB +253 179 177 batlow-223 #FDB3B1 +253 180 182 batlow-225 #FDB4B6 +253 183 188 batlow-228 #FDB7BC +253 184 192 batlow-230 #FDB8C0 +253 186 199 batlow-233 #FDBAC7 +253 188 203 batlow-235 #FDBCCB +252 190 209 batlow-238 #FCBED1 +252 192 216 batlow-241 #FCC0D8 +252 194 220 batlow-243 #FCC2DC +252 196 227 batlow-246 #FCC4E3 +251 198 232 batlow-248 #FBC6E8 +251 200 239 batlow-251 #FBC8EF +251 202 243 batlow-253 #FBCAF3 +250 204 250 batlow-256 #FACCFA diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow25.gpl b/newt/colormaps/batlow/DiscretePalettes/batlow25.gpl new file mode 100644 index 0000000..0a35262 --- /dev/null +++ b/newt/colormaps/batlow/DiscretePalettes/batlow25.gpl @@ -0,0 +1,29 @@ +GIMP Palette +Name: batlow 25 Swatches +Columns: 1 +# + 1 25 89 batlow-1 #011959 + 10 42 92 batlow-12 #0A2A5C + 14 55 94 batlow-22 #0E375E + 17 67 96 batlow-33 #114360 + 20 78 98 batlow-44 #144E62 + 25 87 98 batlow-54 #195762 + 34 96 97 batlow-65 #226061 + 45 103 93 batlow-75 #2D675D + 60 109 86 batlow-86 #3C6D56 + 77 115 77 batlow-97 #4D734D + 93 120 68 batlow-107 #5D7844 +111 125 58 batlow-118 #6F7D3A +130 130 49 batlow-129 #828231 +149 135 44 batlow-139 #95872C +171 140 45 batlow-150 #AB8C2D +190 144 53 batlow-160 #BE9035 +210 147 67 batlow-171 #D29343 +228 151 86 batlow-182 #E49756 +241 157 107 batlow-192 #F19D6B +250 164 133 batlow-203 #FAA485 +253 172 158 batlow-214 #FDAC9E +253 180 180 batlow-224 #FDB4B4 +253 188 203 batlow-235 #FDBCCB +252 195 225 batlow-245 #FCC3E1 +250 204 250 batlow-256 #FACCFA diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow25.mat b/newt/colormaps/batlow/DiscretePalettes/batlow25.mat new file mode 100644 index 0000000000000000000000000000000000000000..136a0af6f998962963a7bbc8be2b7beeb6f2ac5c GIT binary patch literal 800 zcmeZu4DoSvQZUssQ1EpO(M`+DN!3vZ$Vn_o%P-2c0*X0%nwjV*I2WZRmZYXAMnp+tfS(zFu7#SE^Di8}8V1UunmmkQU!NkB&F-Nz%TIUKQ!}0Y2 zexBhWMp{!B*=x;txix?CcX?muIWq!EEZRGiGFAE)30}Mtxg*CS@%l5Fsn52aTYb*f z=KX}nNB-_Iz06grd;jg`^WT3TFK6#`soFNv<<_FDb}#q;cg;_poptfzrp)B_HWe4+ z$vG=ia=%~yH*5d;`~4-JF10tbZ!h|gwPn%`XVs}+Il?wN+47q`JZ-*f_7gq+S#^0# zt;gKc-{?KIT+A2XcU;Qu|Ak(=E=A*u^$dJ_bgp$W2x|CeejFKxr>rl z)tN;lIwy<#f8g(Yq+-o)lSHv6^Zp1#Kb6tYudcIn*ITf@^ZDeM-5iJgmVe6fH?@1^ zb)Jt+D1+1N=gK^F0p*TUqUC}W*+w&0WvCg&3Tl(JeZ!VZ`$J<(>cLqui1huFeH=bK*LS8Ll|d_}`Myv_NI7^kyElb*M7 znQSeOq||oCU(XA-q?@)(&zb8I9Am4l;lL+psegCsvWAYdob zJ5I1&wy>{$B4}SKwQG9qE4~S77p7{Se|qiAf#W{GXS9BIRbPvAwVb-yBR|i>e%5-I z^+#^oE=}N*O$~d@Df#(Rby@G+VjJ<^5VA;L5s9^zq78l zA9!FK^LFj8#>aWKpWp7?VYP5uf!VvZ_4lVF*sPV7`grP{cjf#Cr|!-Sv9fPjw|DjG zqoPS3|7V=PKU3o9wgb`Hd#-b>`MPecNJpAM`KdEC_1lF0GWDCh%N6CTzN&j#R_x=E Q%*JWLeh>7&blUC!0C&G+f&c&j literal 0 HcmV?d00001 diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow25.spk b/newt/colormaps/batlow/DiscretePalettes/batlow25.spk new file mode 100644 index 0000000..42886f6 --- /dev/null +++ b/newt/colormaps/batlow/DiscretePalettes/batlow25.spk @@ -0,0 +1,25 @@ + 0.00 0.52 9.82 34.98 + 4.31 3.98 16.40 36.12 + 8.24 5.59 21.60 37.00 + 12.55 6.69 26.32 37.76 + 16.86 8.00 30.43 38.32 + 20.78 9.90 33.96 38.51 + 25.10 13.33 37.53 37.94 + 29.02 17.78 40.30 36.38 + 33.33 23.71 42.83 33.56 + 37.65 30.24 45.03 30.01 + 41.57 36.46 46.92 26.55 + 45.88 43.57 48.99 22.77 + 50.20 51.13 51.09 19.33 + 54.12 58.52 52.99 17.25 + 58.43 67.05 54.91 17.57 + 62.35 74.69 56.37 20.74 + 66.67 82.55 57.77 26.42 + 70.98 89.58 59.38 33.79 + 74.90 94.66 61.42 41.98 + 79.22 98.01 64.38 52.03 + 83.53 99.19 67.61 61.96 + 87.45 99.29 70.49 70.41 + 91.76 99.06 73.63 79.54 + 95.69 98.73 76.60 88.17 +100.00 98.14 80.04 98.13 diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow25.txt b/newt/colormaps/batlow/DiscretePalettes/batlow25.txt new file mode 100644 index 0000000..f52842c --- /dev/null +++ b/newt/colormaps/batlow/DiscretePalettes/batlow25.txt @@ -0,0 +1,27 @@ +Scientific Colour Map Categorical Palette +batlow 25 Swatches + 1 25 89 batlow-1 #011959 + 10 42 92 batlow-12 #0A2A5C + 14 55 94 batlow-22 #0E375E + 17 67 96 batlow-33 #114360 + 20 78 98 batlow-44 #144E62 + 25 87 98 batlow-54 #195762 + 34 96 97 batlow-65 #226061 + 45 103 93 batlow-75 #2D675D + 60 109 86 batlow-86 #3C6D56 + 77 115 77 batlow-97 #4D734D + 93 120 68 batlow-107 #5D7844 +111 125 58 batlow-118 #6F7D3A +130 130 49 batlow-129 #828231 +149 135 44 batlow-139 #95872C +171 140 45 batlow-150 #AB8C2D +190 144 53 batlow-160 #BE9035 +210 147 67 batlow-171 #D29343 +228 151 86 batlow-182 #E49756 +241 157 107 batlow-192 #F19D6B +250 164 133 batlow-203 #FAA485 +253 172 158 batlow-214 #FDAC9E +253 180 180 batlow-224 #FDB4B4 +253 188 203 batlow-235 #FDBCCB +252 195 225 batlow-245 #FCC3E1 +250 204 250 batlow-256 #FACCFA diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow50.gpl b/newt/colormaps/batlow/DiscretePalettes/batlow50.gpl new file mode 100644 index 0000000..86b15fb --- /dev/null +++ b/newt/colormaps/batlow/DiscretePalettes/batlow50.gpl @@ -0,0 +1,54 @@ +GIMP Palette +Name: batlow 50 Swatches +Columns: 1 +# + 1 25 89 batlow-1 #011959 + 6 33 91 batlow-6 #06215B + 10 40 92 batlow-11 #0A285C + 13 49 93 batlow-17 #0D315D + 14 55 94 batlow-22 #0E375E + 16 61 95 batlow-27 #103D5F + 17 66 96 batlow-32 #114260 + 18 71 97 batlow-37 #124761 + 20 77 98 batlow-43 #144D62 + 22 81 98 batlow-48 #165162 + 25 86 98 batlow-53 #195662 + 28 90 98 batlow-58 #1C5A62 + 32 94 97 batlow-63 #205E61 + 38 99 95 batlow-69 #26635F + 44 102 93 batlow-74 #2C665D + 51 105 90 batlow-79 #33695A + 58 108 87 batlow-84 #3A6C57 + 65 111 83 batlow-89 #416F53 + 74 114 78 batlow-95 #4A724E + 82 116 74 batlow-100 #52744A + 90 119 69 batlow-105 #5A7745 + 98 121 65 batlow-110 #627941 +106 123 61 batlow-115 #6A7B3D +116 126 56 batlow-121 #747E38 +125 129 52 batlow-126 #7D8134 +134 131 48 batlow-131 #868330 +143 134 45 batlow-136 #8F862D +155 137 43 batlow-142 #9B892B +165 139 44 batlow-147 #A58B2C +175 141 46 batlow-152 #AF8D2E +185 143 50 batlow-157 #B98F32 +194 144 55 batlow-162 #C29037 +205 146 63 batlow-168 #CD923F +214 148 70 batlow-173 #D69446 +222 150 79 batlow-178 #DE964F +230 152 88 batlow-183 #E69858 +237 154 98 batlow-188 #ED9A62 +243 158 112 batlow-194 #F39E70 +248 161 123 batlow-199 #F8A17B +250 165 135 batlow-204 #FAA587 +252 169 147 batlow-209 #FCA993 +253 172 158 batlow-214 #FDAC9E +253 177 171 batlow-220 #FDB1AB +253 180 182 batlow-225 #FDB4B6 +253 184 192 batlow-230 #FDB8C0 +253 188 203 batlow-235 #FDBCCB +252 191 214 batlow-240 #FCBFD6 +252 196 227 batlow-246 #FCC4E3 +251 200 239 batlow-251 #FBC8EF +250 204 250 batlow-256 #FACCFA diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow50.mat b/newt/colormaps/batlow/DiscretePalettes/batlow50.mat new file mode 100644 index 0000000000000000000000000000000000000000..d2b24e2d15e1fb02a9ad07ccdc5454c7ac2f1454 GIT binary patch literal 1369 zcmV-f1*ZB zWFT*DIv_@JX&_E-b|4@)ATc*OF)}(eGaxcBF*zV0ARr(hARr(hARr(hARr(hARr(h zARr(hARr(hARr(hARr(hARr(B00000000000ZB~{0002d1ONbdoGnpnFcb&?UT>Ax zg>l++rF40nD2`I!QC&1$uRV8Z!XYAhoJVwdjwC}wS9{KmV?B~cvGUjwt@m2X((XpP z*k!E^E3q+hotwLF=9}*^^ZojC2n2#ul|WEmRZYTw`Y+V}&vvU+S*6nd+0Qp3^m3H_ z&NZw4$?C3Z_V3sWha$0IOPDgNk&HG&jfE7eLfqkg&{*U45DkR9-uu*YtbK2ZovxKA z3+^@gEPMi&7LfyacNG|_-uacrG^qQs`n1iS!BAuNfr4I#?%I^5WrOF)r_l<`Oq#%u znTKVWGZ5GCSW9ZshQ*CZ+V*~(Fra@)UJ`eMF;(b0=hh4MP>{7Cn*)W5@y*m*1K2b_ zmC$yR3q6l)Gt&${%pw$J_J0k+;a>fbKuZC(UhQ*<^%{op<1@OdoDt~8C?{(ZMCcMM zq*PxO!{K1bkVefIBJZbvTmE4j!k&w>#nuyenj3iNfR7ZNB#R>Jrb+A(EWI>%BSS-$ zNZCjH4lC4Nvw=5LIQDvam_wUJoh0zJAZ-TZ{mcWBePwOg?)3$?JaVcY)jPLyxP|o3S&m9MY*Mp>!QR;M~2}fFO-sc=>fyx?A#5a*02)>jjKmULUcZ;fD z4y5%!Q=ea5O72DRY^&L{Qw3y~;1rJcSa#De=RW@+s()<#-78d!|L*EBeS6+ME%ndirq z+D8z3?M9xB=_tHudOmZ(qxd=b%7(q=qoC1J_C!lZF}XWvBIYX*evK!p<#~x<7lWX! z*&-Y};lFr~DZ)jfAT2;gjBZx=S&5ezX`A(z7TIF7$2HbxIgcSTL17jBbPP?#wJk~( zawRAj0~t(7#xM{bZ**jm8t`;F8>~XHGytQAwE5kb-i;VNW7O z3a8L`%Oc_=DjcWJO;aW@upF?9?azhm4~p?|Iu~fUHrl4mgR|siz{Ox5SZeVe#T7iv zo+MRf%6Mr0G~bo6nUC{JmxXpG_;^dK7O5xjVczc=(wNJ~7TM#&nk9Uw|M`%uLgvHe z1%-1vkPn-}gXj2V9bkJr7^-pA!nR-X0Q{v!~&MKUI!HA zV*6JmZE)`TIKSN5imrOKh|IZG80j`h4Yb;kb1KK%RHYN2SubNlpD?lg9mRGpg^i7t zyYgBq`*5z<;Z;mWKVs6|@)L<%P)&X~8&$?fgma_tw9;F=7&}dHoDrg9_l}f=xe?@k z5#^T^B*IF^%E!!pF?gQsw+}~+L$!oq)yI@z8D-r@o1|dT-Qz04C!t(>m|E8>LygCZ bes=s6jz*_&h4N{nyUILM1LgP!40Ui9IfjxD literal 0 HcmV?d00001 diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow50.spk b/newt/colormaps/batlow/DiscretePalettes/batlow50.spk new file mode 100644 index 0000000..046d51e --- /dev/null +++ b/newt/colormaps/batlow/DiscretePalettes/batlow50.spk @@ -0,0 +1,50 @@ + 0.00 0.52 9.82 34.98 + 1.96 2.32 12.90 35.52 + 3.92 3.74 15.83 36.02 + 6.27 4.94 19.11 36.58 + 8.24 5.59 21.60 37.00 + 10.20 6.11 23.86 37.37 + 12.16 6.59 25.93 37.70 + 14.12 7.11 27.85 37.99 + 16.47 7.85 30.06 38.28 + 18.43 8.65 31.86 38.45 + 20.39 9.66 33.62 38.51 + 22.35 10.97 35.31 38.42 + 24.31 12.59 36.92 38.13 + 26.67 14.97 38.70 37.44 + 28.63 17.28 40.05 36.59 + 30.59 19.83 41.28 35.48 + 32.55 22.57 42.40 34.15 + 34.51 25.45 43.45 32.64 + 36.86 29.02 44.64 30.69 + 38.82 32.08 45.60 28.98 + 40.78 35.20 46.54 27.25 + 42.75 38.37 47.48 25.51 + 44.71 41.60 48.42 23.79 + 47.06 45.58 49.56 21.78 + 49.02 49.01 50.51 20.20 + 50.98 52.56 51.47 18.80 + 52.94 56.25 52.43 17.70 + 55.29 60.82 53.54 17.01 + 57.25 64.71 54.42 17.15 + 59.22 68.60 55.23 18.01 + 61.18 72.43 55.96 19.54 + 63.14 76.16 56.63 21.63 + 65.49 80.47 57.39 24.70 + 67.45 83.90 58.03 27.64 + 69.41 87.15 58.74 30.91 + 71.37 90.16 59.55 34.54 + 73.33 92.83 60.52 38.54 + 75.69 95.46 61.91 43.76 + 77.65 97.08 63.24 48.33 + 79.61 98.19 64.67 52.96 + 81.57 98.86 66.14 57.53 + 83.53 99.19 67.61 61.96 + 85.88 99.31 69.35 67.08 + 87.84 99.28 70.77 71.24 + 89.80 99.18 72.19 75.37 + 91.76 99.06 73.63 79.54 + 93.73 98.91 75.10 83.80 + 96.08 98.69 76.91 89.06 + 98.04 98.45 78.46 93.55 +100.00 98.14 80.04 98.13 diff --git a/newt/colormaps/batlow/DiscretePalettes/batlow50.txt b/newt/colormaps/batlow/DiscretePalettes/batlow50.txt new file mode 100644 index 0000000..a1ce50a --- /dev/null +++ b/newt/colormaps/batlow/DiscretePalettes/batlow50.txt @@ -0,0 +1,52 @@ +Scientific Colour Map Categorical Palette +batlow 50 Swatches + 1 25 89 batlow-1 #011959 + 6 33 91 batlow-6 #06215B + 10 40 92 batlow-11 #0A285C + 13 49 93 batlow-17 #0D315D + 14 55 94 batlow-22 #0E375E + 16 61 95 batlow-27 #103D5F + 17 66 96 batlow-32 #114260 + 18 71 97 batlow-37 #124761 + 20 77 98 batlow-43 #144D62 + 22 81 98 batlow-48 #165162 + 25 86 98 batlow-53 #195662 + 28 90 98 batlow-58 #1C5A62 + 32 94 97 batlow-63 #205E61 + 38 99 95 batlow-69 #26635F + 44 102 93 batlow-74 #2C665D + 51 105 90 batlow-79 #33695A + 58 108 87 batlow-84 #3A6C57 + 65 111 83 batlow-89 #416F53 + 74 114 78 batlow-95 #4A724E + 82 116 74 batlow-100 #52744A + 90 119 69 batlow-105 #5A7745 + 98 121 65 batlow-110 #627941 +106 123 61 batlow-115 #6A7B3D +116 126 56 batlow-121 #747E38 +125 129 52 batlow-126 #7D8134 +134 131 48 batlow-131 #868330 +143 134 45 batlow-136 #8F862D +155 137 43 batlow-142 #9B892B +165 139 44 batlow-147 #A58B2C +175 141 46 batlow-152 #AF8D2E +185 143 50 batlow-157 #B98F32 +194 144 55 batlow-162 #C29037 +205 146 63 batlow-168 #CD923F +214 148 70 batlow-173 #D69446 +222 150 79 batlow-178 #DE964F +230 152 88 batlow-183 #E69858 +237 154 98 batlow-188 #ED9A62 +243 158 112 batlow-194 #F39E70 +248 161 123 batlow-199 #F8A17B +250 165 135 batlow-204 #FAA587 +252 169 147 batlow-209 #FCA993 +253 172 158 batlow-214 #FDAC9E +253 177 171 batlow-220 #FDB1AB +253 180 182 batlow-225 #FDB4B6 +253 184 192 batlow-230 #FDB8C0 +253 188 203 batlow-235 #FDBCCB +252 191 214 batlow-240 #FCBFD6 +252 196 227 batlow-246 #FCC4E3 +251 200 239 batlow-251 #FBC8EF +250 204 250 batlow-256 #FACCFA diff --git a/newt/colormaps/batlow/batlow.alut b/newt/colormaps/batlow/batlow.alut new file mode 100644 index 0000000..3126368 --- /dev/null +++ b/newt/colormaps/batlow/batlow.alut @@ -0,0 +1,256 @@ +1,25,89,255 +2,27,89,255 +3,28,90,255 +4,30,90,255 +5,31,90,255 +6,33,91,255 +7,34,91,255 +7,36,91,255 +8,37,91,255 +9,39,92,255 +10,40,92,255 +10,42,92,255 +11,43,92,255 +11,45,93,255 +12,46,93,255 +12,47,93,255 +13,49,93,255 +13,50,94,255 +13,51,94,255 +14,53,94,255 +14,54,94,255 +14,55,94,255 +15,56,95,255 +15,57,95,255 +15,59,95,255 +15,60,95,255 +16,61,95,255 +16,62,95,255 +16,63,96,255 +16,64,96,255 +17,65,96,255 +17,66,96,255 +17,67,96,255 +17,68,96,255 +18,69,97,255 +18,70,97,255 +18,71,97,255 +18,72,97,255 +19,73,97,255 +19,74,97,255 +19,75,97,255 +20,76,98,255 +20,77,98,255 +20,78,98,255 +21,79,98,255 +21,79,98,255 +22,80,98,255 +22,81,98,255 +23,82,98,255 +23,83,98,255 +24,84,98,255 +24,85,98,255 +25,86,98,255 +25,87,98,255 +26,87,98,255 +27,88,98,255 +27,89,98,255 +28,90,98,255 +29,91,98,255 +30,92,98,255 +30,93,98,255 +31,93,97,255 +32,94,97,255 +33,95,97,255 +34,96,97,255 +35,96,96,255 +36,97,96,255 +37,98,96,255 +38,99,95,255 +39,99,95,255 +40,100,95,255 +42,101,94,255 +43,101,94,255 +44,102,93,255 +45,103,93,255 +47,103,92,255 +48,104,92,255 +49,105,91,255 +51,105,90,255 +52,106,90,255 +53,106,89,255 +55,107,88,255 +56,108,88,255 +58,108,87,255 +59,109,86,255 +60,109,86,255 +62,110,85,255 +63,110,84,255 +65,111,83,255 +66,111,82,255 +68,112,82,255 +69,112,81,255 +71,113,80,255 +72,113,79,255 +74,114,78,255 +76,114,77,255 +77,115,77,255 +79,115,76,255 +80,116,75,255 +82,116,74,255 +83,117,73,255 +85,117,72,255 +87,118,71,255 +88,118,70,255 +90,119,69,255 +91,119,69,255 +93,120,68,255 +95,120,67,255 +96,121,66,255 +98,121,65,255 +99,122,64,255 +101,122,63,255 +103,123,62,255 +104,123,62,255 +106,123,61,255 +108,124,60,255 +109,124,59,255 +111,125,58,255 +113,125,57,255 +115,126,56,255 +116,126,56,255 +118,127,55,255 +120,127,54,255 +121,128,53,255 +123,128,52,255 +125,129,52,255 +127,129,51,255 +129,130,50,255 +130,130,49,255 +132,131,49,255 +134,131,48,255 +136,132,47,255 +138,132,47,255 +140,133,46,255 +142,133,46,255 +143,134,45,255 +145,134,45,255 +147,135,44,255 +149,135,44,255 +151,136,44,255 +153,136,44,255 +155,137,43,255 +157,137,43,255 +159,137,43,255 +161,138,43,255 +163,138,44,255 +165,139,44,255 +167,139,44,255 +169,140,44,255 +171,140,45,255 +173,140,45,255 +175,141,46,255 +177,141,47,255 +179,142,47,255 +181,142,48,255 +183,142,49,255 +185,143,50,255 +187,143,51,255 +189,143,52,255 +190,144,53,255 +192,144,54,255 +194,144,55,255 +196,145,56,255 +198,145,58,255 +200,145,59,255 +202,146,60,255 +203,146,62,255 +205,146,63,255 +207,147,64,255 +209,147,66,255 +210,147,67,255 +212,148,69,255 +214,148,70,255 +216,148,72,255 +217,149,74,255 +219,149,75,255 +221,149,77,255 +222,150,79,255 +224,150,81,255 +225,151,82,255 +227,151,84,255 +228,151,86,255 +230,152,88,255 +231,152,90,255 +233,153,92,255 +234,153,94,255 +235,154,96,255 +237,154,98,255 +238,155,100,255 +239,155,103,255 +240,156,105,255 +241,157,107,255 +242,157,109,255 +243,158,112,255 +244,159,114,255 +245,159,116,255 +246,160,119,255 +247,161,121,255 +248,161,123,255 +248,162,126,255 +249,163,128,255 +249,163,130,255 +250,164,133,255 +250,165,135,255 +251,166,137,255 +251,166,140,255 +252,167,142,255 +252,168,144,255 +252,169,147,255 +252,169,149,255 +253,170,151,255 +253,171,154,255 +253,172,156,255 +253,172,158,255 +253,173,160,255 +253,174,162,255 +253,175,165,255 +253,175,167,255 +253,176,169,255 +253,177,171,255 +253,178,173,255 +253,178,175,255 +253,179,177,255 +253,180,180,255 +253,180,182,255 +253,181,184,255 +253,182,186,255 +253,183,188,255 +253,183,190,255 +253,184,192,255 +253,185,194,255 +253,186,196,255 +253,186,199,255 +253,187,201,255 +253,188,203,255 +253,188,205,255 +252,189,207,255 +252,190,209,255 +252,191,211,255 +252,191,214,255 +252,192,216,255 +252,193,218,255 +252,194,220,255 +252,195,223,255 +252,195,225,255 +252,196,227,255 +252,197,229,255 +251,198,232,255 +251,198,234,255 +251,199,236,255 +251,200,239,255 +251,201,241,255 +251,202,243,255 +251,202,246,255 +250,203,248,255 +250,204,250,255 diff --git a/newt/colormaps/batlow/batlow.clm b/newt/colormaps/batlow/batlow.clm new file mode 100644 index 0000000..e424f62 --- /dev/null +++ b/newt/colormaps/batlow/batlow.clm @@ -0,0 +1,201 @@ +1 25 89 +2 27 89 +4 29 90 +5 31 90 +6 33 91 +7 35 91 +8 37 91 +9 39 92 +10 40 92 +10 42 92 +11 45 93 +12 46 93 +12 48 93 +13 50 94 +13 51 94 +14 53 94 +14 54 94 +15 56 95 +15 57 95 +15 59 95 +16 61 95 +16 62 95 +16 63 96 +16 64 96 +17 66 96 +17 67 96 +17 68 96 +18 69 97 +18 71 97 +18 72 97 +19 73 97 +19 75 97 +20 76 98 +20 77 98 +20 78 98 +21 79 98 +22 80 98 +22 81 98 +23 82 98 +24 84 98 +24 85 98 +25 86 98 +26 87 98 +27 88 98 +27 89 98 +28 90 98 +30 92 98 +30 93 98 +31 93 97 +32 94 97 +34 96 97 +35 96 96 +36 97 96 +38 99 95 +39 99 95 +40 100 95 +42 101 94 +44 102 93 +45 103 93 +47 103 92 +49 105 92 +51 105 90 +52 106 90 +54 106 89 +56 108 88 +58 108 87 +59 109 86 +61 109 86 +63 110 84 +65 111 83 +67 111 82 +69 112 81 +71 113 80 +72 113 79 +75 114 78 +77 115 77 +79 115 76 +80 116 75 +82 116 74 +84 117 72 +87 118 71 +89 118 70 +91 119 69 +93 120 68 +95 120 67 +97 121 66 +99 122 64 +101 122 63 +103 123 62 +105 123 62 +108 124 60 +109 124 59 +112 125 58 +114 126 56 +116 126 56 +118 127 55 +120 127 54 +122 128 52 +125 129 52 +127 129 51 +130 130 50 +132 131 49 +134 131 48 +137 132 47 +139 133 46 +142 133 46 +143 134 45 +146 134 45 +148 135 44 +151 136 44 +154 136 44 +156 137 43 +159 137 43 +161 138 43 +164 138 44 +166 139 44 +169 140 44 +171 140 45 +174 140 45 +176 141 47 +179 142 47 +182 142 48 +184 143 50 +187 143 51 +189 143 52 +191 144 53 +193 144 55 +196 145 56 +198 145 58 +201 145 59 +203 146 62 +205 146 63 +208 147 65 +210 147 67 +212 148 69 +214 148 70 +216 148 73 +218 149 75 +221 149 77 +222 150 79 +225 151 82 +227 151 84 +228 151 86 +230 152 89 +232 153 91 +234 153 94 +235 154 96 +237 154 99 +239 155 102 +240 156 105 +241 157 108 +243 158 111 +244 159 114 +245 159 116 +246 160 120 +248 161 122 +248 162 126 +249 163 128 +249 163 131 +250 165 134 +251 166 137 +251 166 141 +252 168 143 +252 169 146 +252 169 149 +253 170 152 +253 172 155 +253 172 158 +253 173 160 +253 174 163 +253 175 167 +253 176 169 +253 177 172 +253 178 174 +253 179 177 +253 180 180 +253 180 183 +253 182 185 +253 183 188 +253 183 190 +253 185 193 +253 186 196 +253 186 199 +253 187 202 +253 188 204 +252 189 207 +252 190 209 +252 191 212 +252 192 215 +252 193 218 +252 194 221 +252 195 224 +252 196 227 +252 197 229 +251 198 233 +251 199 235 +251 200 239 +251 201 241 +251 202 244 +250 203 247 +250 204 250 diff --git a/newt/colormaps/batlow/batlow.clr b/newt/colormaps/batlow/batlow.clr new file mode 100644 index 0000000..fb1f5f2 --- /dev/null +++ b/newt/colormaps/batlow/batlow.clr @@ -0,0 +1,102 @@ +ColorMap 2 1 +0 1 25 89 255 +1 4 29 90 255 +2 6 33 91 255 +3 8 37 91 255 +4 10 40 92 255 +5 11 45 93 255 +6 12 48 93 255 +7 13 51 94 255 +8 14 54 94 255 +9 15 57 95 255 +10 16 61 95 255 +11 16 63 96 255 +12 17 66 96 255 +13 17 68 96 255 +14 18 71 97 255 +15 19 73 97 255 +16 20 76 98 255 +17 20 78 98 255 +18 22 80 98 255 +19 23 82 98 255 +20 24 85 98 255 +21 26 87 98 255 +22 27 89 98 255 +23 30 92 98 255 +24 31 93 97 255 +25 34 96 97 255 +26 36 97 96 255 +27 39 99 95 255 +28 42 101 94 255 +29 45 103 93 255 +30 49 105 92 255 +31 52 106 90 255 +32 56 108 88 255 +33 59 109 86 255 +34 63 110 84 255 +35 67 111 82 255 +36 71 113 80 255 +37 75 114 78 255 +38 79 115 76 255 +39 82 116 74 255 +40 87 118 71 255 +41 91 119 69 255 +42 95 120 67 255 +43 99 122 64 255 +44 103 123 62 255 +45 108 124 60 255 +46 112 125 58 255 +47 116 126 56 255 +48 120 127 54 255 +49 125 129 52 255 +50 130 130 50 255 +51 134 131 48 255 +52 139 133 46 255 +53 143 134 45 255 +54 148 135 44 255 +55 154 136 44 255 +56 159 137 43 255 +57 164 138 44 255 +58 169 140 44 255 +59 174 140 45 255 +60 179 142 47 255 +61 184 143 50 255 +62 189 143 52 255 +63 193 144 55 255 +64 198 145 58 255 +65 203 146 62 255 +66 208 147 65 255 +67 212 148 69 255 +68 216 148 73 255 +69 221 149 77 255 +70 225 151 82 255 +71 228 151 86 255 +72 232 153 91 255 +73 235 154 96 255 +74 239 155 102 255 +75 241 157 108 255 +76 244 159 114 255 +77 246 160 120 255 +78 248 162 126 255 +79 249 163 131 255 +80 251 166 137 255 +81 252 168 143 255 +82 252 169 149 255 +83 253 172 155 255 +84 253 173 160 255 +85 253 175 167 255 +86 253 177 172 255 +87 253 179 177 255 +88 253 180 183 255 +89 253 183 188 255 +90 253 185 193 255 +91 253 186 199 255 +92 253 188 204 255 +93 252 190 209 255 +94 252 192 215 255 +95 252 194 221 255 +96 252 196 227 255 +97 251 198 233 255 +98 251 200 239 255 +99 251 202 244 255 +100 250 204 250 255 diff --git a/newt/colormaps/batlow/batlow.cpt b/newt/colormaps/batlow/batlow.cpt new file mode 100644 index 0000000..8c8cff0 --- /dev/null +++ b/newt/colormaps/batlow/batlow.cpt @@ -0,0 +1,261 @@ +# +# batlow +# www.fabiocrameri.ch/colourmaps +0.000000 1 25 89 0.003922 2 27 89 +0.003922 2 27 89 0.007843 3 28 90 +0.007843 3 28 90 0.011765 4 30 90 +0.011765 4 30 90 0.015686 5 31 90 +0.015686 5 31 90 0.019608 6 33 91 +0.019608 6 33 91 0.023529 7 34 91 +0.023529 7 34 91 0.027451 7 36 91 +0.027451 7 36 91 0.031373 8 37 91 +0.031373 8 37 91 0.035294 9 39 92 +0.035294 9 39 92 0.039216 10 40 92 +0.039216 10 40 92 0.043137 10 42 92 +0.043137 10 42 92 0.047059 11 43 92 +0.047059 11 43 92 0.050980 11 45 93 +0.050980 11 45 93 0.054902 12 46 93 +0.054902 12 46 93 0.058824 12 47 93 +0.058824 12 47 93 0.062745 13 49 93 +0.062745 13 49 93 0.066667 13 50 94 +0.066667 13 50 94 0.070588 13 51 94 +0.070588 13 51 94 0.074510 14 53 94 +0.074510 14 53 94 0.078431 14 54 94 +0.078431 14 54 94 0.082353 14 55 94 +0.082353 14 55 94 0.086275 15 56 95 +0.086275 15 56 95 0.090196 15 57 95 +0.090196 15 57 95 0.094118 15 59 95 +0.094118 15 59 95 0.098039 15 60 95 +0.098039 15 60 95 0.101961 16 61 95 +0.101961 16 61 95 0.105882 16 62 95 +0.105882 16 62 95 0.109804 16 63 96 +0.109804 16 63 96 0.113725 16 64 96 +0.113725 16 64 96 0.117647 17 65 96 +0.117647 17 65 96 0.121569 17 66 96 +0.121569 17 66 96 0.125490 17 67 96 +0.125490 17 67 96 0.129412 17 68 96 +0.129412 17 68 96 0.133333 18 69 97 +0.133333 18 69 97 0.137255 18 70 97 +0.137255 18 70 97 0.141176 18 71 97 +0.141176 18 71 97 0.145098 18 72 97 +0.145098 18 72 97 0.149020 19 73 97 +0.149020 19 73 97 0.152941 19 74 97 +0.152941 19 74 97 0.156863 19 75 97 +0.156863 19 75 97 0.160784 20 76 98 +0.160784 20 76 98 0.164706 20 77 98 +0.164706 20 77 98 0.168627 20 78 98 +0.168627 20 78 98 0.172549 21 79 98 +0.172549 21 79 98 0.176471 21 79 98 +0.176471 21 79 98 0.180392 22 80 98 +0.180392 22 80 98 0.184314 22 81 98 +0.184314 22 81 98 0.188235 23 82 98 +0.188235 23 82 98 0.192157 23 83 98 +0.192157 23 83 98 0.196078 24 84 98 +0.196078 24 84 98 0.200000 24 85 98 +0.200000 24 85 98 0.203922 25 86 98 +0.203922 25 86 98 0.207843 25 87 98 +0.207843 25 87 98 0.211765 26 87 98 +0.211765 26 87 98 0.215686 27 88 98 +0.215686 27 88 98 0.219608 27 89 98 +0.219608 27 89 98 0.223529 28 90 98 +0.223529 28 90 98 0.227451 29 91 98 +0.227451 29 91 98 0.231373 30 92 98 +0.231373 30 92 98 0.235294 30 93 98 +0.235294 30 93 98 0.239216 31 93 97 +0.239216 31 93 97 0.243137 32 94 97 +0.243137 32 94 97 0.247059 33 95 97 +0.247059 33 95 97 0.250980 34 96 97 +0.250980 34 96 97 0.254902 35 96 96 +0.254902 35 96 96 0.258824 36 97 96 +0.258824 36 97 96 0.262745 37 98 96 +0.262745 37 98 96 0.266667 38 99 95 +0.266667 38 99 95 0.270588 39 99 95 +0.270588 39 99 95 0.274510 40 100 95 +0.274510 40 100 95 0.278431 42 101 94 +0.278431 42 101 94 0.282353 43 101 94 +0.282353 43 101 94 0.286275 44 102 93 +0.286275 44 102 93 0.290196 45 103 93 +0.290196 45 103 93 0.294118 47 103 92 +0.294118 47 103 92 0.298039 48 104 92 +0.298039 48 104 92 0.301961 49 105 91 +0.301961 49 105 91 0.305882 51 105 90 +0.305882 51 105 90 0.309804 52 106 90 +0.309804 52 106 90 0.313725 53 106 89 +0.313725 53 106 89 0.317647 55 107 88 +0.317647 55 107 88 0.321569 56 108 88 +0.321569 56 108 88 0.325490 58 108 87 +0.325490 58 108 87 0.329412 59 109 86 +0.329412 59 109 86 0.333333 60 109 86 +0.333333 60 109 86 0.337255 62 110 85 +0.337255 62 110 85 0.341176 63 110 84 +0.341176 63 110 84 0.345098 65 111 83 +0.345098 65 111 83 0.349020 66 111 82 +0.349020 66 111 82 0.352941 68 112 82 +0.352941 68 112 82 0.356863 69 112 81 +0.356863 69 112 81 0.360784 71 113 80 +0.360784 71 113 80 0.364706 72 113 79 +0.364706 72 113 79 0.368627 74 114 78 +0.368627 74 114 78 0.372549 76 114 77 +0.372549 76 114 77 0.376471 77 115 77 +0.376471 77 115 77 0.380392 79 115 76 +0.380392 79 115 76 0.384314 80 116 75 +0.384314 80 116 75 0.388235 82 116 74 +0.388235 82 116 74 0.392157 83 117 73 +0.392157 83 117 73 0.396078 85 117 72 +0.396078 85 117 72 0.400000 87 118 71 +0.400000 87 118 71 0.403922 88 118 70 +0.403922 88 118 70 0.407843 90 119 69 +0.407843 90 119 69 0.411765 91 119 69 +0.411765 91 119 69 0.415686 93 120 68 +0.415686 93 120 68 0.419608 95 120 67 +0.419608 95 120 67 0.423529 96 121 66 +0.423529 96 121 66 0.427451 98 121 65 +0.427451 98 121 65 0.431373 99 122 64 +0.431373 99 122 64 0.435294 101 122 63 +0.435294 101 122 63 0.439216 103 123 62 +0.439216 103 123 62 0.443137 104 123 62 +0.443137 104 123 62 0.447059 106 123 61 +0.447059 106 123 61 0.450980 108 124 60 +0.450980 108 124 60 0.454902 109 124 59 +0.454902 109 124 59 0.458824 111 125 58 +0.458824 111 125 58 0.462745 113 125 57 +0.462745 113 125 57 0.466667 115 126 56 +0.466667 115 126 56 0.470588 116 126 56 +0.470588 116 126 56 0.474510 118 127 55 +0.474510 118 127 55 0.478431 120 127 54 +0.478431 120 127 54 0.482353 121 128 53 +0.482353 121 128 53 0.486275 123 128 52 +0.486275 123 128 52 0.490196 125 129 52 +0.490196 125 129 52 0.494118 127 129 51 +0.494118 127 129 51 0.498039 129 130 50 +0.498039 129 130 50 0.501961 130 130 49 +0.501961 130 130 49 0.505882 132 131 49 +0.505882 132 131 49 0.509804 134 131 48 +0.509804 134 131 48 0.513725 136 132 47 +0.513725 136 132 47 0.517647 138 132 47 +0.517647 138 132 47 0.521569 140 133 46 +0.521569 140 133 46 0.525490 142 133 46 +0.525490 142 133 46 0.529412 143 134 45 +0.529412 143 134 45 0.533333 145 134 45 +0.533333 145 134 45 0.537255 147 135 44 +0.537255 147 135 44 0.541176 149 135 44 +0.541176 149 135 44 0.545098 151 136 44 +0.545098 151 136 44 0.549020 153 136 44 +0.549020 153 136 44 0.552941 155 137 43 +0.552941 155 137 43 0.556863 157 137 43 +0.556863 157 137 43 0.560784 159 137 43 +0.560784 159 137 43 0.564706 161 138 43 +0.564706 161 138 43 0.568627 163 138 44 +0.568627 163 138 44 0.572549 165 139 44 +0.572549 165 139 44 0.576471 167 139 44 +0.576471 167 139 44 0.580392 169 140 44 +0.580392 169 140 44 0.584314 171 140 45 +0.584314 171 140 45 0.588235 173 140 45 +0.588235 173 140 45 0.592157 175 141 46 +0.592157 175 141 46 0.596078 177 141 47 +0.596078 177 141 47 0.600000 179 142 47 +0.600000 179 142 47 0.603922 181 142 48 +0.603922 181 142 48 0.607843 183 142 49 +0.607843 183 142 49 0.611765 185 143 50 +0.611765 185 143 50 0.615686 187 143 51 +0.615686 187 143 51 0.619608 189 143 52 +0.619608 189 143 52 0.623529 190 144 53 +0.623529 190 144 53 0.627451 192 144 54 +0.627451 192 144 54 0.631373 194 144 55 +0.631373 194 144 55 0.635294 196 145 56 +0.635294 196 145 56 0.639216 198 145 58 +0.639216 198 145 58 0.643137 200 145 59 +0.643137 200 145 59 0.647059 202 146 60 +0.647059 202 146 60 0.650980 203 146 62 +0.650980 203 146 62 0.654902 205 146 63 +0.654902 205 146 63 0.658824 207 147 64 +0.658824 207 147 64 0.662745 209 147 66 +0.662745 209 147 66 0.666667 210 147 67 +0.666667 210 147 67 0.670588 212 148 69 +0.670588 212 148 69 0.674510 214 148 70 +0.674510 214 148 70 0.678431 216 148 72 +0.678431 216 148 72 0.682353 217 149 74 +0.682353 217 149 74 0.686275 219 149 75 +0.686275 219 149 75 0.690196 221 149 77 +0.690196 221 149 77 0.694118 222 150 79 +0.694118 222 150 79 0.698039 224 150 81 +0.698039 224 150 81 0.701961 225 151 82 +0.701961 225 151 82 0.705882 227 151 84 +0.705882 227 151 84 0.709804 228 151 86 +0.709804 228 151 86 0.713725 230 152 88 +0.713725 230 152 88 0.717647 231 152 90 +0.717647 231 152 90 0.721569 233 153 92 +0.721569 233 153 92 0.725490 234 153 94 +0.725490 234 153 94 0.729412 235 154 96 +0.729412 235 154 96 0.733333 237 154 98 +0.733333 237 154 98 0.737255 238 155 100 +0.737255 238 155 100 0.741176 239 155 103 +0.741176 239 155 103 0.745098 240 156 105 +0.745098 240 156 105 0.749020 241 157 107 +0.749020 241 157 107 0.752941 242 157 109 +0.752941 242 157 109 0.756863 243 158 112 +0.756863 243 158 112 0.760784 244 159 114 +0.760784 244 159 114 0.764706 245 159 116 +0.764706 245 159 116 0.768627 246 160 119 +0.768627 246 160 119 0.772549 247 161 121 +0.772549 247 161 121 0.776471 248 161 123 +0.776471 248 161 123 0.780392 248 162 126 +0.780392 248 162 126 0.784314 249 163 128 +0.784314 249 163 128 0.788235 249 163 130 +0.788235 249 163 130 0.792157 250 164 133 +0.792157 250 164 133 0.796078 250 165 135 +0.796078 250 165 135 0.800000 251 166 137 +0.800000 251 166 137 0.803922 251 166 140 +0.803922 251 166 140 0.807843 252 167 142 +0.807843 252 167 142 0.811765 252 168 144 +0.811765 252 168 144 0.815686 252 169 147 +0.815686 252 169 147 0.819608 252 169 149 +0.819608 252 169 149 0.823529 253 170 151 +0.823529 253 170 151 0.827451 253 171 154 +0.827451 253 171 154 0.831373 253 172 156 +0.831373 253 172 156 0.835294 253 172 158 +0.835294 253 172 158 0.839216 253 173 160 +0.839216 253 173 160 0.843137 253 174 162 +0.843137 253 174 162 0.847059 253 175 165 +0.847059 253 175 165 0.850980 253 175 167 +0.850980 253 175 167 0.854902 253 176 169 +0.854902 253 176 169 0.858824 253 177 171 +0.858824 253 177 171 0.862745 253 178 173 +0.862745 253 178 173 0.866667 253 178 175 +0.866667 253 178 175 0.870588 253 179 177 +0.870588 253 179 177 0.874510 253 180 180 +0.874510 253 180 180 0.878431 253 180 182 +0.878431 253 180 182 0.882353 253 181 184 +0.882353 253 181 184 0.886275 253 182 186 +0.886275 253 182 186 0.890196 253 183 188 +0.890196 253 183 188 0.894118 253 183 190 +0.894118 253 183 190 0.898039 253 184 192 +0.898039 253 184 192 0.901961 253 185 194 +0.901961 253 185 194 0.905882 253 186 196 +0.905882 253 186 196 0.909804 253 186 199 +0.909804 253 186 199 0.913725 253 187 201 +0.913725 253 187 201 0.917647 253 188 203 +0.917647 253 188 203 0.921569 253 188 205 +0.921569 253 188 205 0.925490 252 189 207 +0.925490 252 189 207 0.929412 252 190 209 +0.929412 252 190 209 0.933333 252 191 211 +0.933333 252 191 211 0.937255 252 191 214 +0.937255 252 191 214 0.941176 252 192 216 +0.941176 252 192 216 0.945098 252 193 218 +0.945098 252 193 218 0.949020 252 194 220 +0.949020 252 194 220 0.952941 252 195 223 +0.952941 252 195 223 0.956863 252 195 225 +0.956863 252 195 225 0.960784 252 196 227 +0.960784 252 196 227 0.964706 252 197 229 +0.964706 252 197 229 0.968627 251 198 232 +0.968627 251 198 232 0.972549 251 198 234 +0.972549 251 198 234 0.976471 251 199 236 +0.976471 251 199 236 0.980392 251 200 239 +0.980392 251 200 239 0.984314 251 201 241 +0.984314 251 201 241 0.988235 251 202 243 +0.988235 251 202 243 0.992157 251 202 246 +0.992157 251 202 246 0.996078 250 203 248 +0.996078 250 203 248 1.000000 250 204 250 +N 255 255 255 +B 1 25 89 +F 250 204 250 diff --git a/newt/colormaps/batlow/batlow.ct b/newt/colormaps/batlow/batlow.ct new file mode 100644 index 0000000..e5913ae --- /dev/null +++ b/newt/colormaps/batlow/batlow.ct @@ -0,0 +1,1030 @@ + + + 1.11.0 + + + 1 25 89 255 + 0 + + + 2 27 89 255 + 0.0039216 + + + 3 28 90 255 + 0.0078431 + + + 4 30 90 255 + 0.011765 + + + 5 31 90 255 + 0.015686 + + + 6 33 91 255 + 0.019608 + + + 7 34 91 255 + 0.023529 + + + 7 36 91 255 + 0.027451 + + + 8 37 91 255 + 0.031373 + + + 9 39 92 255 + 0.035294 + + + 10 40 92 255 + 0.039216 + + + 10 42 92 255 + 0.043137 + + + 11 43 92 255 + 0.047059 + + + 11 45 93 255 + 0.05098 + + + 12 46 93 255 + 0.054902 + + + 12 47 93 255 + 0.058824 + + + 13 49 93 255 + 0.062745 + + + 13 50 94 255 + 0.066667 + + + 13 51 94 255 + 0.070588 + + + 14 53 94 255 + 0.07451 + + + 14 54 94 255 + 0.078431 + + + 14 55 94 255 + 0.082353 + + + 15 56 95 255 + 0.086275 + + + 15 57 95 255 + 0.090196 + + + 15 59 95 255 + 0.094118 + + + 15 60 95 255 + 0.098039 + + + 16 61 95 255 + 0.10196 + + + 16 62 95 255 + 0.10588 + + + 16 63 96 255 + 0.1098 + + + 16 64 96 255 + 0.11373 + + + 17 65 96 255 + 0.11765 + + + 17 66 96 255 + 0.12157 + + + 17 67 96 255 + 0.12549 + + + 17 68 96 255 + 0.12941 + + + 18 69 97 255 + 0.13333 + + + 18 70 97 255 + 0.13725 + + + 18 71 97 255 + 0.14118 + + + 18 72 97 255 + 0.1451 + + + 19 73 97 255 + 0.14902 + + + 19 74 97 255 + 0.15294 + + + 19 75 97 255 + 0.15686 + + + 20 76 98 255 + 0.16078 + + + 20 77 98 255 + 0.16471 + + + 20 78 98 255 + 0.16863 + + + 21 79 98 255 + 0.17255 + + + 21 79 98 255 + 0.17647 + + + 22 80 98 255 + 0.18039 + + + 22 81 98 255 + 0.18431 + + + 23 82 98 255 + 0.18824 + + + 23 83 98 255 + 0.19216 + + + 24 84 98 255 + 0.19608 + + + 24 85 98 255 + 0.2 + + + 25 86 98 255 + 0.20392 + + + 25 87 98 255 + 0.20784 + + + 26 87 98 255 + 0.21176 + + + 27 88 98 255 + 0.21569 + + + 27 89 98 255 + 0.21961 + + + 28 90 98 255 + 0.22353 + + + 29 91 98 255 + 0.22745 + + + 30 92 98 255 + 0.23137 + + + 30 93 98 255 + 0.23529 + + + 31 93 97 255 + 0.23922 + + + 32 94 97 255 + 0.24314 + + + 33 95 97 255 + 0.24706 + + + 34 96 97 255 + 0.25098 + + + 35 96 96 255 + 0.2549 + + + 36 97 96 255 + 0.25882 + + + 37 98 96 255 + 0.26275 + + + 38 99 95 255 + 0.26667 + + + 39 99 95 255 + 0.27059 + + + 40 100 95 255 + 0.27451 + + + 42 101 94 255 + 0.27843 + + + 43 101 94 255 + 0.28235 + + + 44 102 93 255 + 0.28627 + + + 45 103 93 255 + 0.2902 + + + 47 103 92 255 + 0.29412 + + + 48 104 92 255 + 0.29804 + + + 49 105 91 255 + 0.30196 + + + 51 105 90 255 + 0.30588 + + + 52 106 90 255 + 0.3098 + + + 53 106 89 255 + 0.31373 + + + 55 107 88 255 + 0.31765 + + + 56 108 88 255 + 0.32157 + + + 58 108 87 255 + 0.32549 + + + 59 109 86 255 + 0.32941 + + + 60 109 86 255 + 0.33333 + + + 62 110 85 255 + 0.33725 + + + 63 110 84 255 + 0.34118 + + + 65 111 83 255 + 0.3451 + + + 66 111 82 255 + 0.34902 + + + 68 112 82 255 + 0.35294 + + + 69 112 81 255 + 0.35686 + + + 71 113 80 255 + 0.36078 + + + 72 113 79 255 + 0.36471 + + + 74 114 78 255 + 0.36863 + + + 76 114 77 255 + 0.37255 + + + 77 115 77 255 + 0.37647 + + + 79 115 76 255 + 0.38039 + + + 80 116 75 255 + 0.38431 + + + 82 116 74 255 + 0.38824 + + + 83 117 73 255 + 0.39216 + + + 85 117 72 255 + 0.39608 + + + 87 118 71 255 + 0.4 + + + 88 118 70 255 + 0.40392 + + + 90 119 69 255 + 0.40784 + + + 91 119 69 255 + 0.41176 + + + 93 120 68 255 + 0.41569 + + + 95 120 67 255 + 0.41961 + + + 96 121 66 255 + 0.42353 + + + 98 121 65 255 + 0.42745 + + + 99 122 64 255 + 0.43137 + + + 101 122 63 255 + 0.43529 + + + 103 123 62 255 + 0.43922 + + + 104 123 62 255 + 0.44314 + + + 106 123 61 255 + 0.44706 + + + 108 124 60 255 + 0.45098 + + + 109 124 59 255 + 0.4549 + + + 111 125 58 255 + 0.45882 + + + 113 125 57 255 + 0.46275 + + + 115 126 56 255 + 0.46667 + + + 116 126 56 255 + 0.47059 + + + 118 127 55 255 + 0.47451 + + + 120 127 54 255 + 0.47843 + + + 121 128 53 255 + 0.48235 + + + 123 128 52 255 + 0.48627 + + + 125 129 52 255 + 0.4902 + + + 127 129 51 255 + 0.49412 + + + 129 130 50 255 + 0.49804 + + + 130 130 49 255 + 0.50196 + + + 132 131 49 255 + 0.50588 + + + 134 131 48 255 + 0.5098 + + + 136 132 47 255 + 0.51373 + + + 138 132 47 255 + 0.51765 + + + 140 133 46 255 + 0.52157 + + + 142 133 46 255 + 0.52549 + + + 143 134 45 255 + 0.52941 + + + 145 134 45 255 + 0.53333 + + + 147 135 44 255 + 0.53725 + + + 149 135 44 255 + 0.54118 + + + 151 136 44 255 + 0.5451 + + + 153 136 44 255 + 0.54902 + + + 155 137 43 255 + 0.55294 + + + 157 137 43 255 + 0.55686 + + + 159 137 43 255 + 0.56078 + + + 161 138 43 255 + 0.56471 + + + 163 138 44 255 + 0.56863 + + + 165 139 44 255 + 0.57255 + + + 167 139 44 255 + 0.57647 + + + 169 140 44 255 + 0.58039 + + + 171 140 45 255 + 0.58431 + + + 173 140 45 255 + 0.58824 + + + 175 141 46 255 + 0.59216 + + + 177 141 47 255 + 0.59608 + + + 179 142 47 255 + 0.6 + + + 181 142 48 255 + 0.60392 + + + 183 142 49 255 + 0.60784 + + + 185 143 50 255 + 0.61176 + + + 187 143 51 255 + 0.61569 + + + 189 143 52 255 + 0.61961 + + + 190 144 53 255 + 0.62353 + + + 192 144 54 255 + 0.62745 + + + 194 144 55 255 + 0.63137 + + + 196 145 56 255 + 0.63529 + + + 198 145 58 255 + 0.63922 + + + 200 145 59 255 + 0.64314 + + + 202 146 60 255 + 0.64706 + + + 203 146 62 255 + 0.65098 + + + 205 146 63 255 + 0.6549 + + + 207 147 64 255 + 0.65882 + + + 209 147 66 255 + 0.66275 + + + 210 147 67 255 + 0.66667 + + + 212 148 69 255 + 0.67059 + + + 214 148 70 255 + 0.67451 + + + 216 148 72 255 + 0.67843 + + + 217 149 74 255 + 0.68235 + + + 219 149 75 255 + 0.68627 + + + 221 149 77 255 + 0.6902 + + + 222 150 79 255 + 0.69412 + + + 224 150 81 255 + 0.69804 + + + 225 151 82 255 + 0.70196 + + + 227 151 84 255 + 0.70588 + + + 228 151 86 255 + 0.7098 + + + 230 152 88 255 + 0.71373 + + + 231 152 90 255 + 0.71765 + + + 233 153 92 255 + 0.72157 + + + 234 153 94 255 + 0.72549 + + + 235 154 96 255 + 0.72941 + + + 237 154 98 255 + 0.73333 + + + 238 155 100 255 + 0.73725 + + + 239 155 103 255 + 0.74118 + + + 240 156 105 255 + 0.7451 + + + 241 157 107 255 + 0.74902 + + + 242 157 109 255 + 0.75294 + + + 243 158 112 255 + 0.75686 + + + 244 159 114 255 + 0.76078 + + + 245 159 116 255 + 0.76471 + + + 246 160 119 255 + 0.76863 + + + 247 161 121 255 + 0.77255 + + + 248 161 123 255 + 0.77647 + + + 248 162 126 255 + 0.78039 + + + 249 163 128 255 + 0.78431 + + + 249 163 130 255 + 0.78824 + + + 250 164 133 255 + 0.79216 + + + 250 165 135 255 + 0.79608 + + + 251 166 137 255 + 0.8 + + + 251 166 140 255 + 0.80392 + + + 252 167 142 255 + 0.80784 + + + 252 168 144 255 + 0.81176 + + + 252 169 147 255 + 0.81569 + + + 252 169 149 255 + 0.81961 + + + 253 170 151 255 + 0.82353 + + + 253 171 154 255 + 0.82745 + + + 253 172 156 255 + 0.83137 + + + 253 172 158 255 + 0.83529 + + + 253 173 160 255 + 0.83922 + + + 253 174 162 255 + 0.84314 + + + 253 175 165 255 + 0.84706 + + + 253 175 167 255 + 0.85098 + + + 253 176 169 255 + 0.8549 + + + 253 177 171 255 + 0.85882 + + + 253 178 173 255 + 0.86275 + + + 253 178 175 255 + 0.86667 + + + 253 179 177 255 + 0.87059 + + + 253 180 180 255 + 0.87451 + + + 253 180 182 255 + 0.87843 + + + 253 181 184 255 + 0.88235 + + + 253 182 186 255 + 0.88627 + + + 253 183 188 255 + 0.8902 + + + 253 183 190 255 + 0.89412 + + + 253 184 192 255 + 0.89804 + + + 253 185 194 255 + 0.90196 + + + 253 186 196 255 + 0.90588 + + + 253 186 199 255 + 0.9098 + + + 253 187 201 255 + 0.91373 + + + 253 188 203 255 + 0.91765 + + + 253 188 205 255 + 0.92157 + + + 252 189 207 255 + 0.92549 + + + 252 190 209 255 + 0.92941 + + + 252 191 211 255 + 0.93333 + + + 252 191 214 255 + 0.93725 + + + 252 192 216 255 + 0.94118 + + + 252 193 218 255 + 0.9451 + + + 252 194 220 255 + 0.94902 + + + 252 195 223 255 + 0.95294 + + + 252 195 225 255 + 0.95686 + + + 252 196 227 255 + 0.96078 + + + 252 197 229 255 + 0.96471 + + + 251 198 232 255 + 0.96863 + + + 251 198 234 255 + 0.97255 + + + 251 199 236 255 + 0.97647 + + + 251 200 239 255 + 0.98039 + + + 251 201 241 255 + 0.98431 + + + 251 202 243 255 + 0.98824 + + + 251 202 246 255 + 0.99216 + + + 250 203 248 255 + 0.99608 + + + 250 204 250 255 + 1 + + + \ No newline at end of file diff --git a/newt/colormaps/batlow/batlow.lut b/newt/colormaps/batlow/batlow.lut new file mode 100644 index 0000000..3c35dab --- /dev/null +++ b/newt/colormaps/batlow/batlow.lut @@ -0,0 +1,256 @@ +1 25 89 +2 27 89 +3 28 90 +4 30 90 +5 31 90 +6 33 91 +7 34 91 +7 36 91 +8 37 91 +9 39 92 +10 40 92 +10 42 92 +11 43 92 +11 45 93 +12 46 93 +12 47 93 +13 49 93 +13 50 94 +13 51 94 +14 53 94 +14 54 94 +14 55 94 +15 56 95 +15 57 95 +15 59 95 +15 60 95 +16 61 95 +16 62 95 +16 63 96 +16 64 96 +17 65 96 +17 66 96 +17 67 96 +17 68 96 +18 69 97 +18 70 97 +18 71 97 +18 72 97 +19 73 97 +19 74 97 +19 75 97 +20 76 98 +20 77 98 +20 78 98 +21 79 98 +21 79 98 +22 80 98 +22 81 98 +23 82 98 +23 83 98 +24 84 98 +24 85 98 +25 86 98 +25 87 98 +26 87 98 +27 88 98 +27 89 98 +28 90 98 +29 91 98 +30 92 98 +30 93 98 +31 93 97 +32 94 97 +33 95 97 +34 96 97 +35 96 96 +36 97 96 +37 98 96 +38 99 95 +39 99 95 +40 100 95 +42 101 94 +43 101 94 +44 102 93 +45 103 93 +47 103 92 +48 104 92 +49 105 91 +51 105 90 +52 106 90 +53 106 89 +55 107 88 +56 108 88 +58 108 87 +59 109 86 +60 109 86 +62 110 85 +63 110 84 +65 111 83 +66 111 82 +68 112 82 +69 112 81 +71 113 80 +72 113 79 +74 114 78 +76 114 77 +77 115 77 +79 115 76 +80 116 75 +82 116 74 +83 117 73 +85 117 72 +87 118 71 +88 118 70 +90 119 69 +91 119 69 +93 120 68 +95 120 67 +96 121 66 +98 121 65 +99 122 64 +101 122 63 +103 123 62 +104 123 62 +106 123 61 +108 124 60 +109 124 59 +111 125 58 +113 125 57 +115 126 56 +116 126 56 +118 127 55 +120 127 54 +121 128 53 +123 128 52 +125 129 52 +127 129 51 +129 130 50 +130 130 49 +132 131 49 +134 131 48 +136 132 47 +138 132 47 +140 133 46 +142 133 46 +143 134 45 +145 134 45 +147 135 44 +149 135 44 +151 136 44 +153 136 44 +155 137 43 +157 137 43 +159 137 43 +161 138 43 +163 138 44 +165 139 44 +167 139 44 +169 140 44 +171 140 45 +173 140 45 +175 141 46 +177 141 47 +179 142 47 +181 142 48 +183 142 49 +185 143 50 +187 143 51 +189 143 52 +190 144 53 +192 144 54 +194 144 55 +196 145 56 +198 145 58 +200 145 59 +202 146 60 +203 146 62 +205 146 63 +207 147 64 +209 147 66 +210 147 67 +212 148 69 +214 148 70 +216 148 72 +217 149 74 +219 149 75 +221 149 77 +222 150 79 +224 150 81 +225 151 82 +227 151 84 +228 151 86 +230 152 88 +231 152 90 +233 153 92 +234 153 94 +235 154 96 +237 154 98 +238 155 100 +239 155 103 +240 156 105 +241 157 107 +242 157 109 +243 158 112 +244 159 114 +245 159 116 +246 160 119 +247 161 121 +248 161 123 +248 162 126 +249 163 128 +249 163 130 +250 164 133 +250 165 135 +251 166 137 +251 166 140 +252 167 142 +252 168 144 +252 169 147 +252 169 149 +253 170 151 +253 171 154 +253 172 156 +253 172 158 +253 173 160 +253 174 162 +253 175 165 +253 175 167 +253 176 169 +253 177 171 +253 178 173 +253 178 175 +253 179 177 +253 180 180 +253 180 182 +253 181 184 +253 182 186 +253 183 188 +253 183 190 +253 184 192 +253 185 194 +253 186 196 +253 186 199 +253 187 201 +253 188 203 +253 188 205 +252 189 207 +252 190 209 +252 191 211 +252 191 214 +252 192 216 +252 193 218 +252 194 220 +252 195 223 +252 195 225 +252 196 227 +252 197 229 +251 198 232 +251 198 234 +251 199 236 +251 200 239 +251 201 241 +251 202 243 +251 202 246 +250 203 248 +250 204 250 diff --git a/newt/colormaps/batlow/batlow.mat b/newt/colormaps/batlow/batlow.mat new file mode 100644 index 0000000000000000000000000000000000000000..06ed6394d847d16c5069a0d59603bf7443089a08 GIT binary patch literal 6002 zcma*bRag@Ypn&0DB}I_#?v(BhNk@0%KtdX%dvuo|Ii@1r-O}APKyvix8ZZXu+?@OK zUVYC8kk|htuRt&SfsY;_ug~+v(b+A9{K*dVWy}egO$#L3#l`esTK$umAqfD>cCT|LtSa|NisK=P~k?TAW3; z)Uy;bn?^ds#=bpQQ0~Y~RaG=$NsCsBQfkinUOB;7Uyq<&rdisvBwOypZP49BqJl%g zp&K>V`3I={11YeVHXJ5`grPQzn28-%><{iH&NU%BfSgNzWrE6X+Xy2ycL0)Yp z;C}sDPE~-s@BnMllEe53DL?wMiW!+2a)nJ^{F!8Sy(3K!w_6`n%jEAkvqC8QaQIn* z9nel~C`G}(6YCQCAoFI8%q8C3*Zy+}T(OPIN&wQY@9iv}kM4C#M}1sng8$Tw+u<$y z`8+*mqii*WFL+$oOYw{-T?}sM*8G6x8}an~yH!iI;cNE23JMKg_d_E_)DEV!Jn#`V zzQSiNW>qi2L*;Ax-Y!zh7G^CRJk8Za9kglUXNv$)UpSApAQ$uB#4ODv4m5#1#6CBf zB&^#<((>s*4euxnqzunx)c1dS^-ZOE^OD_-&CPp&+Efe2LsS;0`=Q?oo6Ot%x0)P} z@Y9g5H-dLFGwQ5T`AMk290e&1%2A6O_d&n2ZaURAO~Mv~Hg)&oaW1-8<_#t~@xL14 zV6r}Q4)pX+G-VDje)3F%?N27m=ARoqn>}%jl^?r4F$aGPu02z73c^-53-zFF)r{Tma67Ad8cIgQpXOOBkY32M$y*R}+sPd7^UDyop#Y`1_70*sgG{Ib4T(-v$y;U_-Sh{=|R+v{ZUc?I(f=;wcA9Q!I? zOU|HgUfrK(U%#)`cMAn%ms5{a3`g7$5!`Cv&X^wXbp+Wlwd2MMhZ=}PT*jtszGxDa zMl(#FH;XEE`kht`LcaVcUc;drY%DVQwEHK|gllOUI+#-J1p0V}o;NWiq7Pt&KJhsX zf0)b;9nf3rMFQDO6l&kFZPLu@cpd(ANq&eoKS0DY&1@c!u?w zv)%1J4mmf_hv8cj8tJNsU6>)U4OTu(&}81+hAyF}%~4%b=vjzyvtCPXM#d6J34@Vu z<9^`S(H{I$>Wj`%t1-1AbVlO6_0-Cs_qk4OpEnVp^kIjWW~OdG6gutls`SP;*mw%a z6F)hz-UuA9t2VHw^LIg$k|vX|bCcG$ei`WpRNTr#yE%b$c=k_&~N{Y(HEV%;krE>E?QgO@MBbejH4vW`8zVN6)aZ4=?Y7LnR zo3s4+GlJ?(;7vy0!xece8~?@6!02!e0{KO!CL8?xCqK(pJlMz_tk0#b-#w6`H0{!h zoDN)-ny*9i3WcMUSTv4hE=avzB9laaVcm8{vxHSzrMa2zlNb>&v%>z@hB)r6Tm4dT zvO>{^5#?{T#K@oOe7yZiKAEQs(qCCM6GV#L587_)9j zS2!|XQY32KX+8Y`&sAFhKZI0_ zI1uvK!A0XXz)D^|XE_~?8O8ydAC?!T;~oYkAE+Km@M)_|AP||T>X7U3CK0_>FTGFv zOxACQa;qXX9c@wv7QK=|1_&y|FxX zveuNt3skx$W%GV#DqTf7l1a{+DRnNO`PNDufoD4p${O$P34Fs^z8224G2_Ch;_&B5 z1*CB;S-C94Co@W&_NrS%gA^}2R|8M1wg_nUJ>;4eM~ecLL6E-%s75h;}C&4E( z@rRq)gArEU%AXV^a4tJH*5216awyy-#YA$@*We74AZ%`T?R;H-*vj4DDV^a^y+52d zN&FYo-@h>5HDE9zc8SYo7>m?0j(r!e7H&j#8mjS(-f`BvmoD+lT|4(Cd%PYF=W7er zs35};JvG0*^Q+Y!Lv-|w6`+^tG)=2m7~?NbT-%7(Ukm1>3<;BV(;lX$r|~MyOlTfO z)-$5H6h2RWeITJ*lPJ6ZWQS!hKlBZN1b%S8f2>6~idV-=34Q0UmNTljW8#3Qi^=lm zUO1OVwq-3)hV*b!SL-}KE7wpK13$I}W|wMImLocG9jLD^7J{gk zd3;^!{c^&d!wN{xo6YCi@|%XBQ%s-8NSQd>E~kmJ=S=Buq4w^g{jLbJ`R#qQH>U}%8XJmUNL^fQK-H7dw z*UmQ+n*CQsmc}Y1gmr78ttXW94>n)v!n~T%NkqhQAn-evg;=BE{X#MHgSlkOBQ9Mr z&RG}wA~(MenBuKjs-%L_=cX3GBN5}yl#8|~Qhi`|uy}<@P^lBr(iG*ww0&@vwwkZO ze1c-Hth|qo6gzEgnsZ_PjcZFMF{&6nNV4Yz%(a!$k6WeRRl=Zq0QJZD+Oj+B5{^rb z^3D1l#%)97c-jMg8Wu!wZ0MySojJ7IbSQ5H(qHFgt%vGYaENhx$(cW5qN9DQ$n2u( zfOsF@SdXN+{=h|wb9tkVd_KFI?Cx&s_s^5OsZipXesAuDW+|8 z-iXEUMsfbveM-EiY&~Z7)8kLdX^ae=)t1=uZ*htxJxFi(DMb#c= zI9FNU{TdJ*zhj{zSxc!}EoIXhgoJZxjXdgC?grS;1?0T_h??9ipw(MJ1nv#dTh>Mz_^e(o?mq8mDe}eJQX$*#gcDv$=y*0 z@dZ7sRRYiHt9h0={amE5;^o4!!yo7e7JT+D)^Pijho+kL&gd)qP8+1bxQtp;aiypL z{EQrqDH;xt-#>aa_X#?s>6>Mu{BNL<5)yqG2Hd_Ra?eJ4KYTQ=%k}sM&=1KPhQRok z=(`}+K|d1Q_vg|{23Zh>Cy>!dkA=`XJb6sYZIdZ zIxVY6i_FdbAY$z=QBn!|my5!4@(z+@LshEB;C0umFYOF@A6n_J1gu9Uo2C5D8nm>A z7ocAqE(lBbbV-xhohrmEU66tm-b?Iba_;j!&O(mE;q^A&`eoHM#BmT1_~4GU;JtHC zd`r2N?7YqfQ;0xOVls@4msd$-*5kzB$8W5tWO3#GUVjXlbPPstRp?Bw*VCs**v@EG?OUJsM|I|gmCtUGZhToi&POWe~NYm%phN9Dsic>|#ouA6;) z5!d;)@2Ape;cFI3$XJF2DH^}R904@2BCRKHLJWSbLVwVKHSw4a4Gh@iQ_`BZRYqQp& z7zp4WP1>M%#4+^Q1Tad_c^}kP5wwiO?8`;qLzT3Zb?ZrB1t*~1s@kJprwBLF%*r;$ zH{(6SDzVh0?2SfRnd|i=Z;g#b1K#yv&5(*VeEp2Jrs!2*(W(r&c5+iK+hxZf^ke;Q zpbWWbKlf;yWp`fK{nl(jJ($b_$^)GJDrL(dm&T&~=;n9uZ!uFRR_XB%9T$}yB8jYp zd-Rvzsv1eqB;}yFP|LCq9g)r~BQ|N?kJKRDG_kVL$ZsCQ+pT(^8o?}0tt)~$m&)DK zx7b9pY>1QV_+U3jHW~DOz-47#p z)OvLNpzQBAO>9H%K%breO4iBe9IU=mqztE)iGQ=>40vbJQ*-9p@u||(genxE*NRyQK$)8k}vcj&CkO2MIvK&y-&N$p|W zS<=;rHnP^(Wb8-@c1f0%lw*Odab%?Q!fesj*H%z2S&25MdFZFq#Lns7_hdXuy=}w1 zn+2wd@`K=r0N4+hpi3i1{)QGPMqGjc59I+2s z?q+}UUu#SC?i#rCGjk+NL27Ua*&_Aw$RJ7^AvkXkj(0%9X}zU@K+UwyCbc01LOR(5{lD zz>&b@syrE`7jcANQDxMjCjKqXU)kO?`=7e(Ff&aXBcWhRLhlMy^ujjg88)kYSz0*- z7(!(O?ySmgmF^S((Nt))1Lg?JVsj+M_@7BDJ!y)Vm1JrTfFp1FUcZlp%1V$Zin8u> zHMj>gqY(tsyl$bj^OnD`vK~FqvKR?!m0J1x3+G9r8zP8d!ETNPZMUjSK%HHs>>sg& zQLyZG38aN>be4{vB{wrv&XuMHBbCow`;}BrjIuKiVO&#ao{Yr#+O`g zg>SZsoSf8$bN=0pOa1}AkbqXJGDRu%ni zB0b>5zVp{ck8TQk**$3*$*pmn^}Mr*Ddz2~eo=rZeCE-x+~^|&uY&sJ*iOKSKb!xz zWsZ3QF4YamC%2`YtawFiZzr2o*)DmjPEsvNZsdK)LebFE2n@p$w~_L|K4f`84*0LH z{QN7fVO%7yFAz9H6sq!}xl1Wr^o*?xBPnX!9JJEu++lIRLD&S{hW%l4HBM+aa2O0O zY^s%|ulS>9f`dKF_@~vKCPd8sdf9Y&8*z``f0ZubU|)a`%qmks*=O7o6$NyPqlVO@ zJ6<8_2HGC{Md+(+wX35W2sY!cD{fv>US1~t?X`mEUepwyKwu@T#xw7)%jq14zR_h< zfa1QCwmW|}!gA~l32E_4xog_FPohPSsxutXQmQyfaqT}JOjV*N_KC#iq$Uhn!UIi^ zoYmxh5ycBd220xC$sQU^%u>bU)Aw{CM9eOY6pN|Sjy)x+;Obp{v z3*?kO#&+OmmKujHqaPu0sqR8f`o68&o*VizIx#;##U(QJG0hwnv=C!2xUJv_HE*Qw z7A3D%GXWb%eCP$E6Hy7#+30p0em&1WiUTRp?Q29-=nUaQ>k4+R8&|IRmh+FmW8?QD zFH3^wH;AHx@C3hsB+FK(@4Ay>$eDO+a8+6`R1@yDsC^tWG88Y{zK3e9aa+8d&x|LG zrij@WA{ocJtyYwIquS!LDcur;FFDSUK%5|tWnu}Q2GA;Xts2VK(1FNTKAR3h6#MlC z?J5?Y9Qodlm76Yq1f@OS%536#E5)^0z3tMB`J3Zww(EH)c8+i>*mjdJXA+2r4Nx38 zEF>wwA@9>g%?0t + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/newt/colormaps/batlow/batlow.txt b/newt/colormaps/batlow/batlow.txt new file mode 100644 index 0000000..c36f357 --- /dev/null +++ b/newt/colormaps/batlow/batlow.txt @@ -0,0 +1,256 @@ +0.005193 0.098238 0.349842 +0.009065 0.104487 0.350933 +0.012963 0.110779 0.351992 +0.016530 0.116913 0.353070 +0.019936 0.122985 0.354120 +0.023189 0.129035 0.355182 +0.026291 0.135044 0.356210 +0.029245 0.140964 0.357239 +0.032053 0.146774 0.358239 +0.034853 0.152558 0.359233 +0.037449 0.158313 0.360216 +0.039845 0.163978 0.361187 +0.042104 0.169557 0.362151 +0.044069 0.175053 0.363084 +0.045905 0.180460 0.364007 +0.047665 0.185844 0.364915 +0.049378 0.191076 0.365810 +0.050795 0.196274 0.366684 +0.052164 0.201323 0.367524 +0.053471 0.206357 0.368370 +0.054721 0.211234 0.369184 +0.055928 0.216046 0.369974 +0.057033 0.220754 0.370750 +0.058032 0.225340 0.371509 +0.059164 0.229842 0.372252 +0.060167 0.234299 0.372978 +0.061052 0.238625 0.373691 +0.062060 0.242888 0.374386 +0.063071 0.247085 0.375050 +0.063982 0.251213 0.375709 +0.064936 0.255264 0.376362 +0.065903 0.259257 0.376987 +0.066899 0.263188 0.377594 +0.067921 0.267056 0.378191 +0.069002 0.270922 0.378774 +0.070001 0.274713 0.379342 +0.071115 0.278497 0.379895 +0.072192 0.282249 0.380434 +0.073440 0.285942 0.380957 +0.074595 0.289653 0.381452 +0.075833 0.293321 0.381922 +0.077136 0.296996 0.382376 +0.078517 0.300622 0.382814 +0.079984 0.304252 0.383224 +0.081553 0.307858 0.383598 +0.083082 0.311461 0.383936 +0.084778 0.315043 0.384240 +0.086503 0.318615 0.384506 +0.088353 0.322167 0.384731 +0.090281 0.325685 0.384910 +0.092304 0.329220 0.385040 +0.094462 0.332712 0.385116 +0.096618 0.336161 0.385134 +0.099015 0.339621 0.385090 +0.101481 0.343036 0.384981 +0.104078 0.346410 0.384801 +0.106842 0.349774 0.384548 +0.109695 0.353098 0.384217 +0.112655 0.356391 0.383807 +0.115748 0.359638 0.383310 +0.118992 0.362849 0.382713 +0.122320 0.366030 0.382026 +0.125889 0.369160 0.381259 +0.129519 0.372238 0.380378 +0.133298 0.375282 0.379395 +0.137212 0.378282 0.378315 +0.141260 0.381240 0.377135 +0.145432 0.384130 0.375840 +0.149706 0.386975 0.374449 +0.154073 0.389777 0.372934 +0.158620 0.392531 0.371320 +0.163246 0.395237 0.369609 +0.167952 0.397889 0.367784 +0.172788 0.400496 0.365867 +0.177752 0.403041 0.363833 +0.182732 0.405551 0.361714 +0.187886 0.408003 0.359484 +0.193050 0.410427 0.357177 +0.198310 0.412798 0.354767 +0.203676 0.415116 0.352253 +0.209075 0.417412 0.349677 +0.214555 0.419661 0.347019 +0.220112 0.421864 0.344261 +0.225707 0.424049 0.341459 +0.231362 0.426197 0.338572 +0.237075 0.428325 0.335634 +0.242795 0.430418 0.332635 +0.248617 0.432493 0.329571 +0.254452 0.434529 0.326434 +0.260320 0.436556 0.323285 +0.266241 0.438555 0.320085 +0.272168 0.440541 0.316831 +0.278171 0.442524 0.313552 +0.284175 0.444484 0.310243 +0.290214 0.446420 0.306889 +0.296294 0.448357 0.303509 +0.302379 0.450282 0.300122 +0.308517 0.452205 0.296721 +0.314648 0.454107 0.293279 +0.320834 0.456006 0.289841 +0.327007 0.457900 0.286377 +0.333235 0.459794 0.282937 +0.339469 0.461685 0.279468 +0.345703 0.463563 0.275998 +0.351976 0.465440 0.272492 +0.358277 0.467331 0.269037 +0.364589 0.469213 0.265543 +0.370922 0.471085 0.262064 +0.377291 0.472952 0.258588 +0.383675 0.474842 0.255131 +0.390070 0.476711 0.251665 +0.396505 0.478587 0.248212 +0.402968 0.480466 0.244731 +0.409455 0.482351 0.241314 +0.415967 0.484225 0.237895 +0.422507 0.486113 0.234493 +0.429094 0.488011 0.231096 +0.435714 0.489890 0.227728 +0.442365 0.491795 0.224354 +0.449052 0.493684 0.221074 +0.455774 0.495585 0.217774 +0.462539 0.497497 0.214518 +0.469368 0.499393 0.211318 +0.476221 0.501314 0.208148 +0.483123 0.503216 0.205037 +0.490081 0.505137 0.201976 +0.497089 0.507058 0.198994 +0.504153 0.508984 0.196118 +0.511253 0.510898 0.193296 +0.518425 0.512822 0.190566 +0.525637 0.514746 0.187990 +0.532907 0.516662 0.185497 +0.540225 0.518584 0.183099 +0.547599 0.520486 0.180884 +0.555024 0.522391 0.178854 +0.562506 0.524293 0.176964 +0.570016 0.526186 0.175273 +0.577582 0.528058 0.173775 +0.585199 0.529927 0.172493 +0.592846 0.531777 0.171449 +0.600520 0.533605 0.170648 +0.608240 0.535423 0.170104 +0.615972 0.537231 0.169826 +0.623739 0.539002 0.169814 +0.631513 0.540752 0.170075 +0.639301 0.542484 0.170622 +0.647098 0.544183 0.171465 +0.654889 0.545863 0.172603 +0.662691 0.547503 0.174044 +0.670477 0.549127 0.175747 +0.678244 0.550712 0.177803 +0.685995 0.552274 0.180056 +0.693720 0.553797 0.182610 +0.701421 0.555294 0.185478 +0.709098 0.556772 0.188546 +0.716731 0.558205 0.191851 +0.724322 0.559628 0.195408 +0.731878 0.561011 0.199174 +0.739393 0.562386 0.203179 +0.746850 0.563725 0.207375 +0.754268 0.565033 0.211761 +0.761629 0.566344 0.216322 +0.768942 0.567630 0.221045 +0.776208 0.568899 0.225930 +0.783416 0.570162 0.230962 +0.790568 0.571421 0.236160 +0.797665 0.572682 0.241490 +0.804709 0.573928 0.246955 +0.811692 0.575187 0.252572 +0.818610 0.576462 0.258303 +0.825472 0.577725 0.264197 +0.832272 0.579026 0.270211 +0.838999 0.580339 0.276353 +0.845657 0.581672 0.282631 +0.852247 0.583037 0.289036 +0.858747 0.584440 0.295572 +0.865168 0.585882 0.302255 +0.871505 0.587352 0.309112 +0.877741 0.588873 0.316081 +0.883878 0.590450 0.323195 +0.889900 0.592087 0.330454 +0.895809 0.593765 0.337865 +0.901590 0.595507 0.345429 +0.907242 0.597319 0.353142 +0.912746 0.599191 0.360986 +0.918103 0.601126 0.368999 +0.923300 0.603137 0.377139 +0.928323 0.605212 0.385404 +0.933176 0.607369 0.393817 +0.937850 0.609582 0.402345 +0.942332 0.611867 0.411006 +0.946612 0.614218 0.419767 +0.950697 0.616649 0.428624 +0.954574 0.619137 0.437582 +0.958244 0.621671 0.446604 +0.961696 0.624282 0.455702 +0.964943 0.626934 0.464860 +0.967983 0.629639 0.474057 +0.970804 0.632394 0.483290 +0.973424 0.635183 0.492547 +0.975835 0.638012 0.501826 +0.978052 0.640868 0.511090 +0.980079 0.643752 0.520350 +0.981918 0.646664 0.529602 +0.983574 0.649590 0.538819 +0.985066 0.652522 0.547998 +0.986392 0.655470 0.557142 +0.987567 0.658422 0.566226 +0.988596 0.661378 0.575265 +0.989496 0.664329 0.584246 +0.990268 0.667280 0.593174 +0.990926 0.670230 0.602031 +0.991479 0.673165 0.610835 +0.991935 0.676091 0.619575 +0.992305 0.679007 0.628251 +0.992595 0.681914 0.636869 +0.992813 0.684815 0.645423 +0.992967 0.687705 0.653934 +0.993064 0.690579 0.662398 +0.993111 0.693451 0.670810 +0.993112 0.696314 0.679177 +0.993074 0.699161 0.687519 +0.993002 0.702006 0.695831 +0.992900 0.704852 0.704114 +0.992771 0.707689 0.712380 +0.992619 0.710530 0.720639 +0.992447 0.713366 0.728892 +0.992258 0.716210 0.737146 +0.992054 0.719049 0.745403 +0.991837 0.721893 0.753673 +0.991607 0.724754 0.761959 +0.991367 0.727614 0.770270 +0.991116 0.730489 0.778606 +0.990855 0.733373 0.786976 +0.990586 0.736265 0.795371 +0.990307 0.739184 0.803810 +0.990018 0.742102 0.812285 +0.989720 0.745039 0.820804 +0.989411 0.747997 0.829372 +0.989089 0.750968 0.837979 +0.988754 0.753949 0.846627 +0.988406 0.756949 0.855332 +0.988046 0.759964 0.864078 +0.987672 0.762996 0.872864 +0.987280 0.766047 0.881699 +0.986868 0.769105 0.890573 +0.986435 0.772184 0.899493 +0.985980 0.775272 0.908448 +0.985503 0.778378 0.917444 +0.985002 0.781495 0.926468 +0.984473 0.784624 0.935531 +0.983913 0.787757 0.944626 +0.983322 0.790905 0.953748 +0.982703 0.794068 0.962895 +0.982048 0.797228 0.972070 +0.981354 0.800406 0.981267 diff --git a/newt/colormaps/batlow/batlow.xcmap b/newt/colormaps/batlow/batlow.xcmap new file mode 100644 index 0000000..ebd4e11 --- /dev/null +++ b/newt/colormaps/batlow/batlow.xcmap @@ -0,0 +1,268 @@ + + + + + + scientific-batlow + linear + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/newt/colormaps/batlow/batlow_PARAVIEW.xml b/newt/colormaps/batlow/batlow_PARAVIEW.xml new file mode 100644 index 0000000..78aceee --- /dev/null +++ b/newt/colormaps/batlow/batlow_PARAVIEW.xml @@ -0,0 +1,260 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/newt/colormaps/batlow/batlow_QGIS.xml b/newt/colormaps/batlow/batlow_QGIS.xml new file mode 100644 index 0000000..8718efa --- /dev/null +++ b/newt/colormaps/batlow/batlow_QGIS.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin10.gpl b/newt/colormaps/berlin/DiscretePalettes/berlin10.gpl new file mode 100644 index 0000000..8ad0479 --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin10.gpl @@ -0,0 +1,14 @@ +GIMP Palette +Name: berlin 10 Swatches +Columns: 1 +# +158 176 255 berlin-1 #9EB0FF + 91 164 219 berlin-29 #5BA4DB + 45 117 151 berlin-58 #2D7597 + 26 66 86 berlin-86 #1A4256 + 17 25 30 berlin-114 #11191E + 40 13 1 berlin-143 #280D01 + 80 24 3 berlin-171 #501803 +138 63 42 berlin-199 #8A3F2A +196 117 106 berlin-228 #C4756A +255 173 173 berlin-256 #FFADAD diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin10.mat b/newt/colormaps/berlin/DiscretePalettes/berlin10.mat new file mode 100644 index 0000000000000000000000000000000000000000..0594b52b7b172bfe48217da287f305c9241f384f GIT binary patch literal 437 zcmeZu4DoSvQZUssQ1EpO(M`+DN!3vZ$Vn_o%P-2c0*X0%nwjV*I2WZRmZYXAl$oN=WvuUhl^f2jKUoAx1R zzgF-3IivQ(W2W+%&#m98Pcb|Db5nYAZRgd4Uy7b9f6J_Yti0HC{SA4MKQjX*|6KSN z`sZWD-bnp7^(^J3^}oO0`)7CBYp?vG^KYU{7Bho6Tm1t7-ju(U literal 0 HcmV?d00001 diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin10.spk b/newt/colormaps/berlin/DiscretePalettes/berlin10.spk new file mode 100644 index 0000000..ec1989c --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin10.spk @@ -0,0 +1,10 @@ + 0.00 62.11 69.02 99.95 + 10.98 35.61 64.16 85.88 + 22.35 17.74 45.71 59.10 + 33.33 10.12 26.01 33.60 + 44.31 6.54 9.65 11.72 + 55.69 15.83 5.21 0.29 + 66.67 31.50 9.25 1.33 + 77.65 54.14 24.58 16.42 + 89.02 76.88 45.91 41.58 +100.00 99.99 68.01 68.00 diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin10.txt b/newt/colormaps/berlin/DiscretePalettes/berlin10.txt new file mode 100644 index 0000000..0cf4cd9 --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin10.txt @@ -0,0 +1,12 @@ +Scientific Colour Map Categorical Palette +berlin 10 Swatches +158 176 255 berlin-1 #9EB0FF + 91 164 219 berlin-29 #5BA4DB + 45 117 151 berlin-58 #2D7597 + 26 66 86 berlin-86 #1A4256 + 17 25 30 berlin-114 #11191E + 40 13 1 berlin-143 #280D01 + 80 24 3 berlin-171 #501803 +138 63 42 berlin-199 #8A3F2A +196 117 106 berlin-228 #C4756A +255 173 173 berlin-256 #FFADAD diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin100.gpl b/newt/colormaps/berlin/DiscretePalettes/berlin100.gpl new file mode 100644 index 0000000..ac83ba9 --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin100.gpl @@ -0,0 +1,104 @@ +GIMP Palette +Name: berlin 100 Swatches +Columns: 1 +# +158 176 255 berlin-1 #9EB0FF +152 175 252 berlin-4 #98AFFC +147 175 250 berlin-6 #93AFFA +140 174 246 berlin-9 #8CAEF6 +135 173 244 berlin-11 #87ADF4 +128 172 241 berlin-14 #80ACF1 +123 172 238 berlin-16 #7BACEE +116 170 235 berlin-19 #74AAEB +108 169 230 berlin-22 #6CA9E6 +103 168 227 berlin-24 #67A8E3 + 96 165 223 berlin-27 #60A5DF + 91 164 219 berlin-29 #5BA4DB + 84 160 213 berlin-32 #54A0D5 + 79 158 209 berlin-34 #4F9ED1 + 72 154 202 berlin-37 #489ACA + 67 149 195 berlin-40 #4395C3 + 63 146 190 berlin-42 #3F92BE + 59 141 183 berlin-45 #3B8DB7 + 56 137 178 berlin-47 #3889B2 + 53 132 170 berlin-50 #3584AA + 50 126 163 berlin-53 #327EA3 + 48 122 158 berlin-55 #307A9E + 45 117 151 berlin-58 #2D7597 + 44 113 146 berlin-60 #2C7192 + 41 107 139 berlin-63 #296B8B + 40 104 134 berlin-65 #286886 + 38 98 127 berlin-68 #26627F + 36 93 120 berlin-71 #245D78 + 34 89 115 berlin-73 #225973 + 32 84 108 berlin-76 #20546C + 31 80 104 berlin-78 #1F5068 + 29 75 97 berlin-81 #1D4B61 + 28 71 92 berlin-83 #1C475C + 26 66 86 berlin-86 #1A4256 + 24 61 79 berlin-89 #183D4F + 23 58 75 berlin-91 #173A4B + 21 53 68 berlin-94 #153544 + 20 50 64 berlin-96 #143240 + 19 45 58 berlin-99 #132D3A + 18 42 54 berlin-101 #122A36 + 17 38 48 berlin-104 #112630 + 17 33 42 berlin-107 #11212A + 16 31 38 berlin-109 #101F26 + 17 27 33 berlin-112 #111B21 + 17 25 30 berlin-114 #11191E + 17 21 25 berlin-117 #111519 + 17 19 23 berlin-119 #111317 + 19 17 18 berlin-122 #131112 + 21 14 14 berlin-125 #150E0E + 23 13 11 berlin-127 #170D0B + 26 12 8 berlin-130 #1A0C08 + 28 11 6 berlin-132 #1C0B06 + 32 11 4 berlin-135 #200B04 + 35 12 2 berlin-138 #230C02 + 37 12 1 berlin-140 #250C01 + 40 13 1 berlin-143 #280D01 + 43 14 1 berlin-145 #2B0E01 + 47 14 0 berlin-148 #2F0E00 + 49 15 0 berlin-150 #310F00 + 53 16 0 berlin-153 #351000 + 57 17 0 berlin-156 #391100 + 60 17 1 berlin-158 #3C1101 + 65 18 1 berlin-161 #411201 + 68 19 1 berlin-163 #441301 + 72 21 2 berlin-166 #481502 + 75 22 2 berlin-168 #4B1602 + 80 24 3 berlin-171 #501803 + 86 26 5 berlin-174 #561A05 + 89 28 7 berlin-176 #591C07 + 95 31 10 berlin-179 #5F1F0A + 99 33 12 berlin-181 #63210C +106 37 16 berlin-184 #6A2510 +110 40 19 berlin-186 #6E2813 +117 45 23 berlin-189 #752D17 +123 50 28 berlin-192 #7B321C +128 54 32 berlin-194 #803620 +134 59 38 berlin-197 #863B26 +138 63 42 berlin-199 #8A3F2A +144 68 48 berlin-202 #904430 +148 72 52 berlin-204 #944834 +154 77 59 berlin-207 #9A4D3B +160 83 65 berlin-210 #A05341 +164 87 70 berlin-212 #A45746 +170 92 76 berlin-215 #AA5C4C +174 96 81 berlin-217 #AE6051 +180 102 88 berlin-220 #B46658 +186 107 95 berlin-223 #BA6B5F +190 111 99 berlin-225 #BE6F63 +196 117 106 berlin-228 #C4756A +200 121 111 berlin-230 #C8796F +206 127 118 berlin-233 #CE7F76 +210 131 122 berlin-235 #D2837A +217 137 130 berlin-238 #D98982 +223 143 137 berlin-241 #DF8F89 +227 147 142 berlin-243 #E3938E +234 153 149 berlin-246 #EA9995 +238 157 154 berlin-248 #EE9D9A +244 163 161 berlin-251 #F4A3A1 +249 167 166 berlin-253 #F9A7A6 +255 173 173 berlin-256 #FFADAD diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin100.mat b/newt/colormaps/berlin/DiscretePalettes/berlin100.mat new file mode 100644 index 0000000000000000000000000000000000000000..f587c559bd85ff24a3d2e86ccead8403b4dfc374 GIT binary patch literal 2512 zcmV;>2`~0dK~zjZLLfCRFd$7qR4ry{Y-KDUP;6mzW^ZzBIv`C!LrFF?EFeR2Wnpw> zWFT*DIv`YNbs$e;bRaS`ATc#MFf}?fIUq7HF*zV0ARr(hARr(hARr(hARr(hARr(h zARr(hARr(hARr(hARr(hARr(B00000000000ZB~{0000;2><|ioGn&)G!%*#p2@=5*{Vb_Ou{Lsm3JRgt3f$8QT~JGh=4# zjEKBQKkxkBJ?FdMx%Yg3+;h+Q?zyW71i~{Z0zq<-mlOWa|HRUNYw{u_7C~IZqKx3- z`P+G)i+Z}c|LXqfxi6;ZLdlZ@Tmfjkvw9Q>0lu2Vh1*=}!-DNtf=m=2#SNrp=@}kq zf@?kIt9eiT|sTJ2ZJ-YN#rv)mL{JWL~OjvGL@Hw5^gg^6eA@E!S`s`d<BOfpaZd&oEn4yQ<;ZPda5aRRzArg(E}nE1~3|r}1u>fojjqtoqstq>VczO021X z$ToRqVZIDE4&OSuYN8azXB;a}?kv87yTl?2qG~#PIxjW()CS+t{X?fF3+)$A$_@vT;)FH+Fqk}Ct`_1iy%hooK-svm1 z-e?EAJ%*%pfrY;kzF!yjcfd7&k~})j#s{Yt#{0s%@JFIS!84i=e@pdRWvV!dCT9Cx>JD;j=kz)ce=~?)_oaYs?zJuv?Xj zPSPM!YZP(|RfoWpIYcoL4nbp(cKDdjFlxQCZtT$?L0D49y@2@<*a^gmvv)@k{+8;z z(RK`uzhpJmvByw-eQQ$Gi*eW}U(uA;6=T*uJYr@+4DJt^IMpzY|_Z<5*q=8Jv@6^S|37X*Aocz0?MZ5BFFt0WPq{+JD|X!D>%bii$`v)L;r7B>3C@75%?WA|1^-kuF@ zFyA$K`A9zV;=F){7l z-4}3`ycj*>_X6jpB3tOG86c%yQcCK72{#YFowbx#xboH6_Bi7;-kR4-TvvDlQX=o` zgXuTO{O!tr)vvw7RF}Q}({u`6Yq`4U9L>g(;(@BS7pSOS*qEAY{vLtTC8N=l91QqJ zRb8q209EJn=4&Z=_?zEquSP6{;y&kn%qAKZ9%|aneJO^*sq?xmR63^ngR|Bbl;KcV zx{IYz1!fKuWz8j5;?*x);{vT3WN!KUbAY%O`KL2&4bvK6Y57b~Nzw*@EBp>$?+uTVYZ0&Sti;4G!Exz8=Nxh+|BB9@XqXT2HoWuOb^eb4N?rp`9>e z^MeMbyAbJhj-)xp!5gKf6V@I**elUQrnvUPV$8#p$>M@yGv=_vmj~HIqr#V4`ACxT zz4`kv9|H>OI{VJ`A)d3;Mrwxutta15+1UcwEN3KU{lJ$*iHPKX4Lijx}UBIIbh zlc(pzDDMtY-xDW>i_E6V>Z4*f-Ava`8X8Br+m-g@)Nwd_2J@bf#$jq0>9>4v312?D)O62QiW~KA|QWz#l86Pd%FH$1Il+{rz}9c5H6M0$Bv%(%}(le<2u= zG7J(+fGAm$n989(u;(v?Mjq~ib?)4tz9%2^Mm4ri>UgMMzG;(3BNx6Sy5<4Wy=a%x zycNgoM#yzVX|JC-I4R$0X1VAi!<02sSFPDdyT1@GyS)P^2CdEh6Vi^9-KT%`<+S1% z$3dLWXn{RTp>}>-Gj7MdPu!u(#INx!@lh%busNm)fr%KD0gYDdjjeHzMSacIB;L7%%9s62{q!WTZ-F5P#Qc;bZYj8x{}h(;$$y4 zM2qTg%DZEx?vJR<&&Tnl(8=-jx?|W?@T-AU=5a7xY16wpO}D<$4j5s)A5M6$?(JR`<^#f6X1zH!FWYYiRescLP? z-^v#6&i9n~r3@H;XK|f17XN{BKV<0dtwpF_o#EW=21K3iEF^m}ksvl^bcHlSXYWPgYQijTVxQW|Kry_y3j&Ah^8 zkv(|)XO`M}S}!&;sO#;`dC>cE?9BTue8g3D6L*&LVG*8g;h)uqfW4Khek%c1-iX05&j299>g$7THk&E literal 0 HcmV?d00001 diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin100.spk b/newt/colormaps/berlin/DiscretePalettes/berlin100.spk new file mode 100644 index 0000000..d874b5b --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin100.spk @@ -0,0 +1,100 @@ + 0.00 62.11 69.02 99.95 + 1.18 59.42 68.73 98.71 + 1.96 57.61 68.52 97.87 + 3.14 54.86 68.20 96.60 + 3.92 53.00 67.97 95.74 + 5.10 50.19 67.59 94.39 + 5.88 48.28 67.31 93.46 + 7.06 45.39 66.82 91.97 + 8.24 42.46 66.21 90.36 + 9.02 40.50 65.73 89.19 + 10.20 37.55 64.85 87.28 + 10.98 35.61 64.16 85.88 + 12.16 32.76 62.92 83.60 + 12.94 30.95 61.97 81.96 + 14.12 28.41 60.35 79.35 + 15.29 26.13 58.53 76.58 + 16.08 24.78 57.23 74.67 + 17.25 22.98 55.18 71.76 + 18.04 21.95 53.76 69.80 + 19.22 20.59 51.59 66.86 + 20.39 19.42 49.39 63.93 + 21.18 18.71 47.92 61.99 + 22.35 17.74 45.71 59.10 + 23.14 17.12 44.25 57.20 + 24.31 16.24 42.06 54.36 + 25.10 15.67 40.61 52.49 + 26.27 14.83 38.46 49.70 + 27.45 14.01 36.33 46.94 + 28.24 13.48 34.92 45.11 + 29.41 12.67 32.82 42.40 + 30.20 12.15 31.43 40.61 + 31.37 11.37 29.38 37.95 + 32.16 10.86 28.02 36.20 + 33.33 10.12 26.01 33.60 + 34.51 9.41 24.03 31.03 + 35.29 8.95 22.72 29.33 + 36.47 8.31 20.80 26.82 + 37.25 7.91 19.54 25.18 + 38.43 7.40 17.68 22.74 + 39.22 7.10 16.48 21.15 + 40.39 6.76 14.73 18.81 + 41.57 6.53 13.07 16.55 + 42.35 6.46 12.01 15.09 + 43.53 6.47 10.54 13.02 + 44.31 6.54 9.65 11.72 + 45.49 6.64 8.41 9.93 + 46.27 6.82 7.61 8.85 + 47.45 7.40 6.51 7.21 + 48.63 8.30 5.57 5.57 + 49.41 9.03 5.07 4.50 + 50.59 10.25 4.56 3.01 + 51.37 11.10 4.40 2.24 + 52.55 12.38 4.39 1.38 + 53.73 13.65 4.62 0.79 + 54.51 14.49 4.86 0.53 + 55.69 15.83 5.21 0.29 + 56.47 16.78 5.42 0.20 + 57.65 18.29 5.68 0.13 + 58.43 19.32 5.85 0.12 + 59.61 20.91 6.15 0.13 + 60.78 22.53 6.50 0.17 + 61.57 23.64 6.76 0.21 + 62.75 25.34 7.20 0.30 + 63.53 26.50 7.52 0.40 + 64.71 28.30 8.08 0.62 + 65.49 29.55 8.51 0.83 + 66.67 31.50 9.25 1.33 + 67.84 33.58 10.16 2.04 + 68.63 35.04 10.86 2.68 + 69.80 37.35 12.10 3.91 + 70.59 38.96 13.05 4.85 + 71.76 41.47 14.63 6.27 + 72.55 43.18 15.79 7.30 + 73.73 45.77 17.67 9.09 + 74.90 48.34 19.67 11.10 + 75.69 50.03 21.04 12.55 + 76.86 52.52 23.15 14.83 + 77.65 54.14 24.58 16.42 + 78.82 56.54 26.73 18.86 + 79.61 58.11 28.17 20.52 + 80.78 60.45 30.33 23.05 + 81.96 62.77 32.51 25.62 + 82.75 64.32 33.97 27.35 + 83.92 66.64 36.18 29.96 + 84.71 68.20 37.65 31.72 + 85.88 70.54 39.88 34.38 + 87.06 72.91 42.13 37.06 + 87.84 74.49 43.63 38.86 + 89.02 76.88 45.91 41.58 + 89.80 78.49 47.44 43.41 + 90.98 80.91 49.75 46.17 + 91.76 82.54 51.30 48.02 + 92.94 84.99 53.63 50.81 + 94.12 87.46 55.99 53.62 + 94.90 89.11 57.57 55.51 + 96.08 91.60 59.95 58.36 + 96.86 93.27 61.55 60.26 + 98.04 95.78 63.96 63.15 + 98.82 97.46 65.57 65.08 +100.00 99.99 68.01 68.00 diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin100.txt b/newt/colormaps/berlin/DiscretePalettes/berlin100.txt new file mode 100644 index 0000000..44e906b --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin100.txt @@ -0,0 +1,102 @@ +Scientific Colour Map Categorical Palette +berlin 100 Swatches +158 176 255 berlin-1 #9EB0FF +152 175 252 berlin-4 #98AFFC +147 175 250 berlin-6 #93AFFA +140 174 246 berlin-9 #8CAEF6 +135 173 244 berlin-11 #87ADF4 +128 172 241 berlin-14 #80ACF1 +123 172 238 berlin-16 #7BACEE +116 170 235 berlin-19 #74AAEB +108 169 230 berlin-22 #6CA9E6 +103 168 227 berlin-24 #67A8E3 + 96 165 223 berlin-27 #60A5DF + 91 164 219 berlin-29 #5BA4DB + 84 160 213 berlin-32 #54A0D5 + 79 158 209 berlin-34 #4F9ED1 + 72 154 202 berlin-37 #489ACA + 67 149 195 berlin-40 #4395C3 + 63 146 190 berlin-42 #3F92BE + 59 141 183 berlin-45 #3B8DB7 + 56 137 178 berlin-47 #3889B2 + 53 132 170 berlin-50 #3584AA + 50 126 163 berlin-53 #327EA3 + 48 122 158 berlin-55 #307A9E + 45 117 151 berlin-58 #2D7597 + 44 113 146 berlin-60 #2C7192 + 41 107 139 berlin-63 #296B8B + 40 104 134 berlin-65 #286886 + 38 98 127 berlin-68 #26627F + 36 93 120 berlin-71 #245D78 + 34 89 115 berlin-73 #225973 + 32 84 108 berlin-76 #20546C + 31 80 104 berlin-78 #1F5068 + 29 75 97 berlin-81 #1D4B61 + 28 71 92 berlin-83 #1C475C + 26 66 86 berlin-86 #1A4256 + 24 61 79 berlin-89 #183D4F + 23 58 75 berlin-91 #173A4B + 21 53 68 berlin-94 #153544 + 20 50 64 berlin-96 #143240 + 19 45 58 berlin-99 #132D3A + 18 42 54 berlin-101 #122A36 + 17 38 48 berlin-104 #112630 + 17 33 42 berlin-107 #11212A + 16 31 38 berlin-109 #101F26 + 17 27 33 berlin-112 #111B21 + 17 25 30 berlin-114 #11191E + 17 21 25 berlin-117 #111519 + 17 19 23 berlin-119 #111317 + 19 17 18 berlin-122 #131112 + 21 14 14 berlin-125 #150E0E + 23 13 11 berlin-127 #170D0B + 26 12 8 berlin-130 #1A0C08 + 28 11 6 berlin-132 #1C0B06 + 32 11 4 berlin-135 #200B04 + 35 12 2 berlin-138 #230C02 + 37 12 1 berlin-140 #250C01 + 40 13 1 berlin-143 #280D01 + 43 14 1 berlin-145 #2B0E01 + 47 14 0 berlin-148 #2F0E00 + 49 15 0 berlin-150 #310F00 + 53 16 0 berlin-153 #351000 + 57 17 0 berlin-156 #391100 + 60 17 1 berlin-158 #3C1101 + 65 18 1 berlin-161 #411201 + 68 19 1 berlin-163 #441301 + 72 21 2 berlin-166 #481502 + 75 22 2 berlin-168 #4B1602 + 80 24 3 berlin-171 #501803 + 86 26 5 berlin-174 #561A05 + 89 28 7 berlin-176 #591C07 + 95 31 10 berlin-179 #5F1F0A + 99 33 12 berlin-181 #63210C +106 37 16 berlin-184 #6A2510 +110 40 19 berlin-186 #6E2813 +117 45 23 berlin-189 #752D17 +123 50 28 berlin-192 #7B321C +128 54 32 berlin-194 #803620 +134 59 38 berlin-197 #863B26 +138 63 42 berlin-199 #8A3F2A +144 68 48 berlin-202 #904430 +148 72 52 berlin-204 #944834 +154 77 59 berlin-207 #9A4D3B +160 83 65 berlin-210 #A05341 +164 87 70 berlin-212 #A45746 +170 92 76 berlin-215 #AA5C4C +174 96 81 berlin-217 #AE6051 +180 102 88 berlin-220 #B46658 +186 107 95 berlin-223 #BA6B5F +190 111 99 berlin-225 #BE6F63 +196 117 106 berlin-228 #C4756A +200 121 111 berlin-230 #C8796F +206 127 118 berlin-233 #CE7F76 +210 131 122 berlin-235 #D2837A +217 137 130 berlin-238 #D98982 +223 143 137 berlin-241 #DF8F89 +227 147 142 berlin-243 #E3938E +234 153 149 berlin-246 #EA9995 +238 157 154 berlin-248 #EE9D9A +244 163 161 berlin-251 #F4A3A1 +249 167 166 berlin-253 #F9A7A6 +255 173 173 berlin-256 #FFADAD diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin25.gpl b/newt/colormaps/berlin/DiscretePalettes/berlin25.gpl new file mode 100644 index 0000000..7f51040 --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin25.gpl @@ -0,0 +1,29 @@ +GIMP Palette +Name: berlin 25 Swatches +Columns: 1 +# +158 176 255 berlin-1 #9EB0FF +133 173 243 berlin-12 #85ADF3 +108 169 230 berlin-22 #6CA9E6 + 81 159 211 berlin-33 #519FD3 + 60 142 185 berlin-44 #3C8EB9 + 49 124 161 berlin-54 #317CA1 + 40 104 134 berlin-65 #286886 + 33 85 110 berlin-75 #21556E + 26 66 86 berlin-86 #1A4256 + 20 48 62 berlin-97 #14303E + 17 33 42 berlin-107 #11212A + 17 20 24 berlin-118 #111418 + 25 12 9 berlin-129 #190C09 + 36 12 2 berlin-139 #240C02 + 49 15 0 berlin-150 #310F00 + 63 18 1 berlin-160 #3F1201 + 80 24 3 berlin-171 #501803 +101 35 14 berlin-182 #65230E +123 50 28 berlin-192 #7B321C +146 70 50 berlin-203 #924632 +168 90 74 berlin-214 #A85A4A +188 109 97 berlin-224 #BC6D61 +210 131 122 berlin-235 #D2837A +231 151 146 berlin-245 #E79792 +255 173 173 berlin-256 #FFADAD diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin25.mat b/newt/colormaps/berlin/DiscretePalettes/berlin25.mat new file mode 100644 index 0000000000000000000000000000000000000000..0553e41ee93f8257f5eee0d100e2bc9a2a985567 GIT binary patch literal 802 zcmeZu4DoSvQZUssQ1EpO(M`+DN!3vZ$Vn_o%P-2c0*X0%nwjV*I2WZRmZYXA; z-`zMD>yh?uN3mac%)}tMQr-J+H^2Y>`S^DQM}g4a9EXlw`rY~eul^I;PRf(?YhM$KjZ%p39!Ivvm|D!wU`V~(D&4sH}RtZ_E z_C}TW$>^?LW2Mb5dCKPaujbWk2d)(B%b0p4d<);Fq`j-4YhKU;1yRSE6&EYpuQATO z-_rM7xv@a=wC*&CredAN&X={1Z`}T>Y*K~HOY4&#JMT^INIKUVt2e{ZxU|^n<7&2> z@87T_>d5|iTl@c=k)3tv&C26o+h^5=|yOmyK(KoqenVU z{z(${5z+T^Tq4-^ir1)rSI67QT{n&=fBvyXe9iju^GbD=S|)o+xY94FtIW@Md_yB; z{=3D!3JW6cJE-q(Ts|u)SuV%zU6x?-uAgRgUXg`w*ZCB9?hnbFnwjBy{<@cb=mOtc zk@|c&?Vmk-Pc^==E7?Bj=Hf=KlAvqLzBGL|usAo>%(cE{!MB~iC&uw+O;MdveZ5rU QiELN}pX3MeP4^7c0e8+{i2wiq literal 0 HcmV?d00001 diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin25.spk b/newt/colormaps/berlin/DiscretePalettes/berlin25.spk new file mode 100644 index 0000000..2366775 --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin25.spk @@ -0,0 +1,25 @@ + 0.00 62.11 69.02 99.95 + 4.31 52.07 67.85 95.29 + 8.24 42.46 66.21 90.36 + 12.55 31.85 62.46 82.79 + 16.86 23.55 55.87 72.74 + 20.78 19.06 48.65 62.96 + 25.10 15.67 40.61 52.49 + 29.02 12.94 33.52 43.30 + 33.33 10.12 26.01 33.60 + 37.65 7.73 18.91 24.36 + 41.57 6.53 13.07 16.55 + 45.88 6.71 8.01 9.38 + 50.20 9.83 4.70 3.47 + 54.12 14.06 4.74 0.65 + 58.43 19.32 5.85 0.12 + 62.35 24.77 7.05 0.26 + 66.67 31.50 9.25 1.33 + 70.98 39.78 13.55 5.31 + 74.90 48.34 19.67 11.10 + 79.22 57.33 27.45 19.69 + 83.53 65.87 35.44 29.09 + 87.45 73.70 42.88 37.96 + 91.76 82.54 51.30 48.02 + 95.69 90.77 59.15 57.40 +100.00 99.99 68.01 68.00 diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin25.txt b/newt/colormaps/berlin/DiscretePalettes/berlin25.txt new file mode 100644 index 0000000..8240bcc --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin25.txt @@ -0,0 +1,27 @@ +Scientific Colour Map Categorical Palette +berlin 25 Swatches +158 176 255 berlin-1 #9EB0FF +133 173 243 berlin-12 #85ADF3 +108 169 230 berlin-22 #6CA9E6 + 81 159 211 berlin-33 #519FD3 + 60 142 185 berlin-44 #3C8EB9 + 49 124 161 berlin-54 #317CA1 + 40 104 134 berlin-65 #286886 + 33 85 110 berlin-75 #21556E + 26 66 86 berlin-86 #1A4256 + 20 48 62 berlin-97 #14303E + 17 33 42 berlin-107 #11212A + 17 20 24 berlin-118 #111418 + 25 12 9 berlin-129 #190C09 + 36 12 2 berlin-139 #240C02 + 49 15 0 berlin-150 #310F00 + 63 18 1 berlin-160 #3F1201 + 80 24 3 berlin-171 #501803 +101 35 14 berlin-182 #65230E +123 50 28 berlin-192 #7B321C +146 70 50 berlin-203 #924632 +168 90 74 berlin-214 #A85A4A +188 109 97 berlin-224 #BC6D61 +210 131 122 berlin-235 #D2837A +231 151 146 berlin-245 #E79792 +255 173 173 berlin-256 #FFADAD diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin50.gpl b/newt/colormaps/berlin/DiscretePalettes/berlin50.gpl new file mode 100644 index 0000000..4bc7bf7 --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin50.gpl @@ -0,0 +1,54 @@ +GIMP Palette +Name: berlin 50 Swatches +Columns: 1 +# +158 176 255 berlin-1 #9EB0FF +147 175 250 berlin-6 #93AFFA +135 173 244 berlin-11 #87ADF4 +121 171 237 berlin-17 #79ABED +108 169 230 berlin-22 #6CA9E6 + 96 165 223 berlin-27 #60A5DF + 84 160 213 berlin-32 #54A0D5 + 72 154 202 berlin-37 #489ACA + 62 144 188 berlin-43 #3E90BC + 55 135 175 berlin-48 #3787AF + 50 126 163 berlin-53 #327EA3 + 45 117 151 berlin-58 #2D7597 + 41 107 139 berlin-63 #296B8B + 37 96 124 berlin-69 #25607C + 34 87 113 berlin-74 #225771 + 30 78 101 berlin-79 #1E4E65 + 27 70 90 berlin-84 #1B465A + 24 61 79 berlin-89 #183D4F + 21 51 66 berlin-95 #153342 + 18 44 56 berlin-100 #122C38 + 17 36 46 berlin-105 #11242E + 16 29 37 berlin-110 #101D25 + 17 24 28 berlin-115 #11181C + 18 18 20 berlin-121 #121214 + 22 14 13 berlin-126 #160E0D + 27 11 7 berlin-131 #1B0B07 + 33 11 3 berlin-136 #210B03 + 39 13 1 berlin-142 #270D01 + 45 14 0 berlin-147 #2D0E00 + 52 15 0 berlin-152 #340F00 + 59 17 0 berlin-157 #3B1100 + 66 19 1 berlin-162 #421301 + 75 22 2 berlin-168 #4B1602 + 84 25 5 berlin-173 #541905 + 93 30 9 berlin-178 #5D1E09 +104 36 15 berlin-183 #68240F +115 43 22 berlin-188 #732B16 +128 54 32 berlin-194 #803620 +138 63 42 berlin-199 #8A3F2A +148 72 52 berlin-204 #944834 +158 81 63 berlin-209 #9E513F +168 90 74 berlin-214 #A85A4A +180 102 88 berlin-220 #B46658 +190 111 99 berlin-225 #BE6F63 +200 121 111 berlin-230 #C8796F +210 131 122 berlin-235 #D2837A +221 141 134 berlin-240 #DD8D86 +234 153 149 berlin-246 #EA9995 +244 163 161 berlin-251 #F4A3A1 +255 173 173 berlin-256 #FFADAD diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin50.mat b/newt/colormaps/berlin/DiscretePalettes/berlin50.mat new file mode 100644 index 0000000000000000000000000000000000000000..360592b694581b5b3ee7439eeed53a7de665e648 GIT binary patch literal 1378 zcmV-o1)cg$K~zjZLLfCRFd$7qR4ry{Y-KDUP;6mzW^ZzBIv`C!LrFF?EFeR2Wnpw> zWFT*DIv`YNbs$e;bRaS`ATc#MFf}?fIUq7HF*zV0ARr(hARr(hARr(hARr(hARr(h zARr(hARr(hARr(hARr(hARr(B00000000000ZB~{0002m1ONbdoGnvpG!zIJooEzE z618G$kIkbrNGg+4S36pd62+=Sq46e($+WA~wy7k0qErr*M@XfP z7-Nh{O^9l>vuFFxx#!;d<2&a&=YCg3HClJ(>ypZre|2M>M-dYJ&CFXte0f8qX zLXT~-Qdeq*a_`RyHDAfV5I&L9liZ>P@v_;bU1M5^(zhDD$MT>^g+#c%x|3{c|i?P3}jP}I>sO1+p^m_Sc+GA&1x$W^y_UnL054<@Su zSy-uBq?jzK#yS@g#hP6U+x!GdkX|FA?YH%nBsas@cR=Bl-i93McaGzu9SAnmUZSbd zg-wZPzR-jI#-tnKe*fAHzdKBNYgI2swCI#P6(L?PoeeqVA%efk@oRCn2H_R`aFWj( z!iL4>uUN~(Si9!E?^x;xE?#x&x8+HY)3Bnv;(IBKM=M=DL&tFRsfY9UGZ~VKnchFR z%TXV{HX|kfEoMBDu1wEOU|@cs#f;tvrVdDU@0W`q6_o8+z!Jl~%lwAc4@xyReuSAP(5L!phXvqA(|w^8*o2QYO(Eh1<~ z9|~@_G;fpjppk45=V``AXz{OC8^vAtsU`HU$j6;n8ZZ_;bG97|Uf8Qovs%!?A=5Nl z8<1$QG1jG*gXnz2lP*SVIQ{GrX-BKVn?nE5_}r&xAK9w2Kdl6`;;5QKMnzbsUUTo- zP(EY_d&HFLe84ki<95S`V76T+1Qb64TOk+C&K83!kJ-$hVIV5yu0*4U2?;HwE~f4Y z&YT;Q(5#-L_7}e<$G$3j%oMIqC$h2ZxvGN?mxDsH*~ya=jc{U1P*B$)zV1q;BOD9aE|DP#+J;2HJLnQ+xtg{2|97N z?E2dB+%{O5)|>h*Z-SZ6T=^3HI_T=>%}s}~vHE04gKGE-tj^;sk#v_r`-&Aghf|0V z-ug^wP6kR8HI{jm7ZI;nMm_u~LfJ2ODoy7AE@f1>Q0{wzm^Qn;_|Rd@85qghe~iGQ zB4Ay2T|K@AjlWV@o2R{Wk1Z{ kyS!uxgrFqdr_de^V)6OZO`mOsaf-IG!GtZwzceslZfxz1LjV8( literal 0 HcmV?d00001 diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin50.spk b/newt/colormaps/berlin/DiscretePalettes/berlin50.spk new file mode 100644 index 0000000..4352560 --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin50.spk @@ -0,0 +1,50 @@ + 0.00 62.11 69.02 99.95 + 1.96 57.61 68.52 97.87 + 3.92 53.00 67.97 95.74 + 6.27 47.32 67.15 92.98 + 8.24 42.46 66.21 90.36 + 10.20 37.55 64.85 87.28 + 12.16 32.76 62.92 83.60 + 14.12 28.41 60.35 79.35 + 16.47 24.15 56.56 73.71 + 18.43 21.47 53.04 68.82 + 20.39 19.42 49.39 63.93 + 22.35 17.74 45.71 59.10 + 24.31 16.24 42.06 54.36 + 26.67 14.56 37.75 48.78 + 28.63 13.21 34.22 44.21 + 30.59 11.89 30.75 39.72 + 32.55 10.62 27.35 35.33 + 34.51 9.41 24.03 31.03 + 36.86 8.11 20.16 26.00 + 38.82 7.24 17.08 21.94 + 40.78 6.67 14.17 18.05 + 42.75 6.45 11.51 14.39 + 44.71 6.56 9.23 11.10 + 47.06 7.16 6.87 7.75 + 49.02 8.65 5.30 5.03 + 50.98 10.67 4.47 2.60 + 52.94 12.81 4.45 1.16 + 55.29 15.37 5.10 0.35 + 57.25 17.78 5.60 0.15 + 59.22 20.38 6.05 0.13 + 61.18 23.09 6.63 0.19 + 63.14 25.92 7.36 0.34 + 65.49 29.55 8.51 0.83 + 67.45 32.87 9.84 1.78 + 69.41 36.56 11.67 3.46 + 71.37 40.62 14.08 5.78 + 73.33 44.91 17.03 8.46 + 75.69 50.03 21.04 12.55 + 77.65 54.14 24.58 16.42 + 79.61 58.11 28.17 20.52 + 81.57 62.00 31.79 24.76 + 83.53 65.87 35.44 29.09 + 85.88 70.54 39.88 34.38 + 87.84 74.49 43.63 38.86 + 89.80 78.49 47.44 43.41 + 91.76 82.54 51.30 48.02 + 93.73 86.63 55.20 52.68 + 96.08 91.60 59.95 58.36 + 98.04 95.78 63.96 63.15 +100.00 99.99 68.01 68.00 diff --git a/newt/colormaps/berlin/DiscretePalettes/berlin50.txt b/newt/colormaps/berlin/DiscretePalettes/berlin50.txt new file mode 100644 index 0000000..8bd6957 --- /dev/null +++ b/newt/colormaps/berlin/DiscretePalettes/berlin50.txt @@ -0,0 +1,52 @@ +Scientific Colour Map Categorical Palette +berlin 50 Swatches +158 176 255 berlin-1 #9EB0FF +147 175 250 berlin-6 #93AFFA +135 173 244 berlin-11 #87ADF4 +121 171 237 berlin-17 #79ABED +108 169 230 berlin-22 #6CA9E6 + 96 165 223 berlin-27 #60A5DF + 84 160 213 berlin-32 #54A0D5 + 72 154 202 berlin-37 #489ACA + 62 144 188 berlin-43 #3E90BC + 55 135 175 berlin-48 #3787AF + 50 126 163 berlin-53 #327EA3 + 45 117 151 berlin-58 #2D7597 + 41 107 139 berlin-63 #296B8B + 37 96 124 berlin-69 #25607C + 34 87 113 berlin-74 #225771 + 30 78 101 berlin-79 #1E4E65 + 27 70 90 berlin-84 #1B465A + 24 61 79 berlin-89 #183D4F + 21 51 66 berlin-95 #153342 + 18 44 56 berlin-100 #122C38 + 17 36 46 berlin-105 #11242E + 16 29 37 berlin-110 #101D25 + 17 24 28 berlin-115 #11181C + 18 18 20 berlin-121 #121214 + 22 14 13 berlin-126 #160E0D + 27 11 7 berlin-131 #1B0B07 + 33 11 3 berlin-136 #210B03 + 39 13 1 berlin-142 #270D01 + 45 14 0 berlin-147 #2D0E00 + 52 15 0 berlin-152 #340F00 + 59 17 0 berlin-157 #3B1100 + 66 19 1 berlin-162 #421301 + 75 22 2 berlin-168 #4B1602 + 84 25 5 berlin-173 #541905 + 93 30 9 berlin-178 #5D1E09 +104 36 15 berlin-183 #68240F +115 43 22 berlin-188 #732B16 +128 54 32 berlin-194 #803620 +138 63 42 berlin-199 #8A3F2A +148 72 52 berlin-204 #944834 +158 81 63 berlin-209 #9E513F +168 90 74 berlin-214 #A85A4A +180 102 88 berlin-220 #B46658 +190 111 99 berlin-225 #BE6F63 +200 121 111 berlin-230 #C8796F +210 131 122 berlin-235 #D2837A +221 141 134 berlin-240 #DD8D86 +234 153 149 berlin-246 #EA9995 +244 163 161 berlin-251 #F4A3A1 +255 173 173 berlin-256 #FFADAD diff --git a/newt/colormaps/berlin/berlin.alut b/newt/colormaps/berlin/berlin.alut new file mode 100644 index 0000000..bab1eea --- /dev/null +++ b/newt/colormaps/berlin/berlin.alut @@ -0,0 +1,256 @@ +158,176,255,255 +156,176,254,255 +154,176,253,255 +152,175,252,255 +149,175,251,255 +147,175,250,255 +145,174,249,255 +142,174,247,255 +140,174,246,255 +138,174,245,255 +135,173,244,255 +133,173,243,255 +130,173,242,255 +128,172,241,255 +126,172,240,255 +123,172,238,255 +121,171,237,255 +118,171,236,255 +116,170,235,255 +113,170,233,255 +111,169,232,255 +108,169,230,255 +106,168,229,255 +103,168,227,255 +101,167,226,255 +98,166,224,255 +96,165,223,255 +93,165,221,255 +91,164,219,255 +88,163,217,255 +86,162,215,255 +84,160,213,255 +81,159,211,255 +79,158,209,255 +77,157,207,255 +75,155,205,255 +72,154,202,255 +70,152,200,255 +68,151,198,255 +67,149,195,255 +65,148,193,255 +63,146,190,255 +62,144,188,255 +60,142,185,255 +59,141,183,255 +57,139,180,255 +56,137,178,255 +55,135,175,255 +54,133,173,255 +53,132,170,255 +51,130,168,255 +50,128,166,255 +50,126,163,255 +49,124,161,255 +48,122,158,255 +47,120,156,255 +46,118,153,255 +45,117,151,255 +44,115,148,255 +44,113,146,255 +43,111,143,255 +42,109,141,255 +41,107,139,255 +41,105,136,255 +40,104,134,255 +39,102,131,255 +39,100,129,255 +38,98,127,255 +37,96,124,255 +36,94,122,255 +36,93,120,255 +35,91,117,255 +34,89,115,255 +34,87,113,255 +33,85,110,255 +32,84,108,255 +32,82,106,255 +31,80,104,255 +30,78,101,255 +30,77,99,255 +29,75,97,255 +28,73,95,255 +28,71,92,255 +27,70,90,255 +26,68,88,255 +26,66,86,255 +25,65,83,255 +25,63,81,255 +24,61,79,255 +23,60,77,255 +23,58,75,255 +22,56,73,255 +22,55,71,255 +21,53,68,255 +21,51,66,255 +20,50,64,255 +20,48,62,255 +19,47,60,255 +19,45,58,255 +18,44,56,255 +18,42,54,255 +18,41,52,255 +17,39,50,255 +17,38,48,255 +17,36,46,255 +17,35,44,255 +17,33,42,255 +17,32,40,255 +16,31,38,255 +16,29,37,255 +16,28,35,255 +17,27,33,255 +17,26,32,255 +17,25,30,255 +17,24,28,255 +17,22,27,255 +17,21,25,255 +17,20,24,255 +17,19,23,255 +18,18,21,255 +18,18,20,255 +19,17,18,255 +20,16,17,255 +20,15,16,255 +21,14,14,255 +22,14,13,255 +23,13,11,255 +24,12,10,255 +25,12,9,255 +26,12,8,255 +27,11,7,255 +28,11,6,255 +29,11,5,255 +30,11,4,255 +32,11,4,255 +33,11,3,255 +34,12,2,255 +35,12,2,255 +36,12,2,255 +37,12,1,255 +38,13,1,255 +39,13,1,255 +40,13,1,255 +42,14,1,255 +43,14,1,255 +44,14,0,255 +45,14,0,255 +47,14,0,255 +48,15,0,255 +49,15,0,255 +51,15,0,255 +52,15,0,255 +53,16,0,255 +55,16,0,255 +56,16,0,255 +57,17,0,255 +59,17,0,255 +60,17,1,255 +62,18,1,255 +63,18,1,255 +65,18,1,255 +66,19,1,255 +68,19,1,255 +69,20,1,255 +71,20,1,255 +72,21,2,255 +74,21,2,255 +75,22,2,255 +77,22,2,255 +79,23,3,255 +80,24,3,255 +82,24,4,255 +84,25,5,255 +86,26,5,255 +87,27,6,255 +89,28,7,255 +91,29,8,255 +93,30,9,255 +95,31,10,255 +97,32,11,255 +99,33,12,255 +101,35,14,255 +104,36,15,255 +106,37,16,255 +108,39,17,255 +110,40,19,255 +112,42,20,255 +115,43,22,255 +117,45,23,255 +119,47,25,255 +121,48,27,255 +123,50,28,255 +125,52,30,255 +128,54,32,255 +130,55,34,255 +132,57,36,255 +134,59,38,255 +136,61,40,255 +138,63,42,255 +140,64,44,255 +142,66,46,255 +144,68,48,255 +146,70,50,255 +148,72,52,255 +150,74,54,255 +152,76,57,255 +154,77,59,255 +156,79,61,255 +158,81,63,255 +160,83,65,255 +162,85,68,255 +164,87,70,255 +166,89,72,255 +168,90,74,255 +170,92,76,255 +172,94,79,255 +174,96,81,255 +176,98,83,255 +178,100,85,255 +180,102,88,255 +182,104,90,255 +184,106,92,255 +186,107,95,255 +188,109,97,255 +190,111,99,255 +192,113,101,255 +194,115,104,255 +196,117,106,255 +198,119,108,255 +200,121,111,255 +202,123,113,255 +204,125,115,255 +206,127,118,255 +208,129,120,255 +210,131,122,255 +213,133,125,255 +215,135,127,255 +217,137,130,255 +219,139,132,255 +221,141,134,255 +223,143,137,255 +225,145,139,255 +227,147,142,255 +229,149,144,255 +231,151,146,255 +234,153,149,255 +236,155,151,255 +238,157,154,255 +240,159,156,255 +242,161,159,255 +244,163,161,255 +246,165,163,255 +249,167,166,255 +251,169,168,255 +253,171,171,255 +255,173,173,255 diff --git a/newt/colormaps/berlin/berlin.clm b/newt/colormaps/berlin/berlin.clm new file mode 100644 index 0000000..7f88325 --- /dev/null +++ b/newt/colormaps/berlin/berlin.clm @@ -0,0 +1,201 @@ +158 176 255 +155 176 254 +153 175 252 +150 175 251 +147 175 250 +144 174 248 +141 174 246 +138 174 245 +135 173 244 +132 173 243 +129 172 241 +126 172 240 +122 172 238 +119 171 236 +116 170 235 +113 170 233 +110 169 231 +107 168 229 +103 168 227 +100 167 226 +97 166 224 +94 165 221 +91 164 219 +87 163 216 +85 161 214 +81 159 211 +79 158 209 +76 156 206 +73 154 203 +70 152 200 +68 151 197 +66 148 194 +63 146 191 +62 144 188 +60 142 184 +58 140 181 +56 137 178 +55 135 175 +54 133 172 +52 131 169 +50 128 166 +50 125 162 +48 123 159 +47 120 156 +46 118 153 +45 116 150 +44 114 147 +43 111 143 +42 109 141 +41 106 138 +40 104 135 +39 102 131 +39 99 128 +37 97 125 +36 94 122 +36 93 120 +35 90 116 +34 88 114 +33 85 110 +32 84 108 +32 81 105 +30 78 102 +30 77 99 +29 74 96 +28 72 93 +27 70 90 +26 68 88 +26 66 85 +25 64 82 +24 61 79 +23 60 77 +22 57 74 +22 55 71 +21 53 68 +21 51 65 +20 49 63 +19 47 60 +19 45 58 +18 43 55 +18 41 53 +17 39 50 +17 37 47 +17 35 45 +17 33 42 +17 32 40 +16 30 38 +16 28 36 +17 27 33 +17 26 32 +17 25 29 +17 23 27 +17 21 25 +17 20 24 +18 18 22 +18 18 20 +19 17 18 +20 16 17 +21 14 15 +22 14 13 +23 13 11 +25 12 10 +26 12 8 +27 11 7 +28 11 6 +30 11 4 +32 11 4 +33 11 3 +34 12 2 +36 12 2 +37 12 1 +38 13 1 +40 13 1 +42 14 1 +43 14 1 +44 14 0 +46 14 0 +48 15 0 +49 15 0 +51 15 0 +53 16 0 +55 16 0 +56 16 0 +58 17 0 +60 17 1 +62 18 1 +64 18 1 +66 19 1 +68 19 1 +69 20 1 +71 20 1 +74 21 2 +75 22 2 +78 22 2 +80 24 3 +82 24 4 +84 25 5 +86 26 5 +88 28 7 +91 29 8 +93 30 9 +96 32 11 +99 33 12 +101 35 14 +105 36 15 +107 38 17 +110 40 19 +112 42 20 +116 44 22 +118 46 24 +121 48 27 +124 51 29 +127 53 31 +130 55 34 +132 57 36 +135 60 39 +137 62 41 +140 64 44 +142 66 46 +145 69 49 +147 71 51 +150 74 54 +153 76 58 +155 78 60 +158 81 63 +160 83 65 +163 86 69 +165 88 71 +168 90 74 +170 92 77 +173 95 80 +176 98 83 +178 100 85 +181 103 89 +183 105 91 +186 107 95 +188 109 97 +191 112 100 +193 114 103 +196 117 106 +198 119 109 +201 122 112 +204 125 115 +206 127 118 +209 130 121 +212 132 124 +215 135 127 +217 137 130 +220 140 133 +222 142 136 +225 145 139 +228 148 143 +230 150 145 +233 153 148 +236 155 151 +239 158 155 +241 160 158 +244 163 161 +247 165 164 +250 168 167 +252 170 170 +255 173 173 diff --git a/newt/colormaps/berlin/berlin.clr b/newt/colormaps/berlin/berlin.clr new file mode 100644 index 0000000..0dc0dbd --- /dev/null +++ b/newt/colormaps/berlin/berlin.clr @@ -0,0 +1,102 @@ +ColorMap 2 1 +0 158 176 255 255 +1 153 175 252 255 +2 147 175 250 255 +3 141 174 246 255 +4 135 173 244 255 +5 129 172 241 255 +6 122 172 238 255 +7 116 170 235 255 +8 110 169 231 255 +9 103 168 227 255 +10 97 166 224 255 +11 91 164 219 255 +12 85 161 214 255 +13 79 158 209 255 +14 73 154 203 255 +15 68 151 197 255 +16 63 146 191 255 +17 60 142 184 255 +18 56 137 178 255 +19 54 133 172 255 +20 50 128 166 255 +21 48 123 159 255 +22 46 118 153 255 +23 44 114 147 255 +24 42 109 141 255 +25 40 104 135 255 +26 39 99 128 255 +27 36 94 122 255 +28 35 90 116 255 +29 33 85 110 255 +30 32 81 105 255 +31 30 77 99 255 +32 28 72 93 255 +33 26 68 88 255 +34 25 64 82 255 +35 23 60 77 255 +36 22 55 71 255 +37 21 51 65 255 +38 19 47 60 255 +39 18 43 55 255 +40 17 39 50 255 +41 17 35 45 255 +42 17 32 40 255 +43 16 28 36 255 +44 17 26 32 255 +45 17 23 27 255 +46 17 20 24 255 +47 18 18 20 255 +48 20 16 17 255 +49 22 14 13 255 +50 25 12 10 255 +51 27 11 7 255 +52 30 11 4 255 +53 33 11 3 255 +54 36 12 2 255 +55 38 13 1 255 +56 42 14 1 255 +57 44 14 0 255 +58 48 15 0 255 +59 51 15 0 255 +60 55 16 0 255 +61 58 17 0 255 +62 62 18 1 255 +63 66 19 1 255 +64 69 20 1 255 +65 74 21 2 255 +66 78 22 2 255 +67 82 24 4 255 +68 86 26 5 255 +69 91 29 8 255 +70 96 32 11 255 +71 101 35 14 255 +72 107 38 17 255 +73 112 42 20 255 +74 118 46 24 255 +75 124 51 29 255 +76 130 55 34 255 +77 135 60 39 255 +78 140 64 44 255 +79 145 69 49 255 +80 150 74 54 255 +81 155 78 60 255 +82 160 83 65 255 +83 165 88 71 255 +84 170 92 77 255 +85 176 98 83 255 +86 181 103 89 255 +87 186 107 95 255 +88 191 112 100 255 +89 196 117 106 255 +90 201 122 112 255 +91 206 127 118 255 +92 212 132 124 255 +93 217 137 130 255 +94 222 142 136 255 +95 228 148 143 255 +96 233 153 148 255 +97 239 158 155 255 +98 244 163 161 255 +99 250 168 167 255 +100 255 173 173 255 diff --git a/newt/colormaps/berlin/berlin.cpt b/newt/colormaps/berlin/berlin.cpt new file mode 100644 index 0000000..fbdb8a4 --- /dev/null +++ b/newt/colormaps/berlin/berlin.cpt @@ -0,0 +1,261 @@ +# +# berlin +# www.fabiocrameri.ch/colourmaps +0.000000 158 176 255 0.003922 156 176 254 +0.003922 156 176 254 0.007843 154 176 253 +0.007843 154 176 253 0.011765 152 175 252 +0.011765 152 175 252 0.015686 149 175 251 +0.015686 149 175 251 0.019608 147 175 250 +0.019608 147 175 250 0.023529 145 174 249 +0.023529 145 174 249 0.027451 142 174 247 +0.027451 142 174 247 0.031373 140 174 246 +0.031373 140 174 246 0.035294 138 174 245 +0.035294 138 174 245 0.039216 135 173 244 +0.039216 135 173 244 0.043137 133 173 243 +0.043137 133 173 243 0.047059 130 173 242 +0.047059 130 173 242 0.050980 128 172 241 +0.050980 128 172 241 0.054902 126 172 240 +0.054902 126 172 240 0.058824 123 172 238 +0.058824 123 172 238 0.062745 121 171 237 +0.062745 121 171 237 0.066667 118 171 236 +0.066667 118 171 236 0.070588 116 170 235 +0.070588 116 170 235 0.074510 113 170 233 +0.074510 113 170 233 0.078431 111 169 232 +0.078431 111 169 232 0.082353 108 169 230 +0.082353 108 169 230 0.086275 106 168 229 +0.086275 106 168 229 0.090196 103 168 227 +0.090196 103 168 227 0.094118 101 167 226 +0.094118 101 167 226 0.098039 98 166 224 +0.098039 98 166 224 0.101961 96 165 223 +0.101961 96 165 223 0.105882 93 165 221 +0.105882 93 165 221 0.109804 91 164 219 +0.109804 91 164 219 0.113725 88 163 217 +0.113725 88 163 217 0.117647 86 162 215 +0.117647 86 162 215 0.121569 84 160 213 +0.121569 84 160 213 0.125490 81 159 211 +0.125490 81 159 211 0.129412 79 158 209 +0.129412 79 158 209 0.133333 77 157 207 +0.133333 77 157 207 0.137255 75 155 205 +0.137255 75 155 205 0.141176 72 154 202 +0.141176 72 154 202 0.145098 70 152 200 +0.145098 70 152 200 0.149020 68 151 198 +0.149020 68 151 198 0.152941 67 149 195 +0.152941 67 149 195 0.156863 65 148 193 +0.156863 65 148 193 0.160784 63 146 190 +0.160784 63 146 190 0.164706 62 144 188 +0.164706 62 144 188 0.168627 60 142 185 +0.168627 60 142 185 0.172549 59 141 183 +0.172549 59 141 183 0.176471 57 139 180 +0.176471 57 139 180 0.180392 56 137 178 +0.180392 56 137 178 0.184314 55 135 175 +0.184314 55 135 175 0.188235 54 133 173 +0.188235 54 133 173 0.192157 53 132 170 +0.192157 53 132 170 0.196078 51 130 168 +0.196078 51 130 168 0.200000 50 128 166 +0.200000 50 128 166 0.203922 50 126 163 +0.203922 50 126 163 0.207843 49 124 161 +0.207843 49 124 161 0.211765 48 122 158 +0.211765 48 122 158 0.215686 47 120 156 +0.215686 47 120 156 0.219608 46 118 153 +0.219608 46 118 153 0.223529 45 117 151 +0.223529 45 117 151 0.227451 44 115 148 +0.227451 44 115 148 0.231373 44 113 146 +0.231373 44 113 146 0.235294 43 111 143 +0.235294 43 111 143 0.239216 42 109 141 +0.239216 42 109 141 0.243137 41 107 139 +0.243137 41 107 139 0.247059 41 105 136 +0.247059 41 105 136 0.250980 40 104 134 +0.250980 40 104 134 0.254902 39 102 131 +0.254902 39 102 131 0.258824 39 100 129 +0.258824 39 100 129 0.262745 38 98 127 +0.262745 38 98 127 0.266667 37 96 124 +0.266667 37 96 124 0.270588 36 94 122 +0.270588 36 94 122 0.274510 36 93 120 +0.274510 36 93 120 0.278431 35 91 117 +0.278431 35 91 117 0.282353 34 89 115 +0.282353 34 89 115 0.286275 34 87 113 +0.286275 34 87 113 0.290196 33 85 110 +0.290196 33 85 110 0.294118 32 84 108 +0.294118 32 84 108 0.298039 32 82 106 +0.298039 32 82 106 0.301961 31 80 104 +0.301961 31 80 104 0.305882 30 78 101 +0.305882 30 78 101 0.309804 30 77 99 +0.309804 30 77 99 0.313725 29 75 97 +0.313725 29 75 97 0.317647 28 73 95 +0.317647 28 73 95 0.321569 28 71 92 +0.321569 28 71 92 0.325490 27 70 90 +0.325490 27 70 90 0.329412 26 68 88 +0.329412 26 68 88 0.333333 26 66 86 +0.333333 26 66 86 0.337255 25 65 83 +0.337255 25 65 83 0.341176 25 63 81 +0.341176 25 63 81 0.345098 24 61 79 +0.345098 24 61 79 0.349020 23 60 77 +0.349020 23 60 77 0.352941 23 58 75 +0.352941 23 58 75 0.356863 22 56 73 +0.356863 22 56 73 0.360784 22 55 71 +0.360784 22 55 71 0.364706 21 53 68 +0.364706 21 53 68 0.368627 21 51 66 +0.368627 21 51 66 0.372549 20 50 64 +0.372549 20 50 64 0.376471 20 48 62 +0.376471 20 48 62 0.380392 19 47 60 +0.380392 19 47 60 0.384314 19 45 58 +0.384314 19 45 58 0.388235 18 44 56 +0.388235 18 44 56 0.392157 18 42 54 +0.392157 18 42 54 0.396078 18 41 52 +0.396078 18 41 52 0.400000 17 39 50 +0.400000 17 39 50 0.403922 17 38 48 +0.403922 17 38 48 0.407843 17 36 46 +0.407843 17 36 46 0.411765 17 35 44 +0.411765 17 35 44 0.415686 17 33 42 +0.415686 17 33 42 0.419608 17 32 40 +0.419608 17 32 40 0.423529 16 31 38 +0.423529 16 31 38 0.427451 16 29 37 +0.427451 16 29 37 0.431373 16 28 35 +0.431373 16 28 35 0.435294 17 27 33 +0.435294 17 27 33 0.439216 17 26 32 +0.439216 17 26 32 0.443137 17 25 30 +0.443137 17 25 30 0.447059 17 24 28 +0.447059 17 24 28 0.450980 17 22 27 +0.450980 17 22 27 0.454902 17 21 25 +0.454902 17 21 25 0.458824 17 20 24 +0.458824 17 20 24 0.462745 17 19 23 +0.462745 17 19 23 0.466667 18 18 21 +0.466667 18 18 21 0.470588 18 18 20 +0.470588 18 18 20 0.474510 19 17 18 +0.474510 19 17 18 0.478431 20 16 17 +0.478431 20 16 17 0.482353 20 15 16 +0.482353 20 15 16 0.486275 21 14 14 +0.486275 21 14 14 0.490196 22 14 13 +0.490196 22 14 13 0.494118 23 13 11 +0.494118 23 13 11 0.498039 24 12 10 +0.498039 24 12 10 0.501961 25 12 9 +0.501961 25 12 9 0.505882 26 12 8 +0.505882 26 12 8 0.509804 27 11 7 +0.509804 27 11 7 0.513725 28 11 6 +0.513725 28 11 6 0.517647 29 11 5 +0.517647 29 11 5 0.521569 30 11 4 +0.521569 30 11 4 0.525490 32 11 4 +0.525490 32 11 4 0.529412 33 11 3 +0.529412 33 11 3 0.533333 34 12 2 +0.533333 34 12 2 0.537255 35 12 2 +0.537255 35 12 2 0.541176 36 12 2 +0.541176 36 12 2 0.545098 37 12 1 +0.545098 37 12 1 0.549020 38 13 1 +0.549020 38 13 1 0.552941 39 13 1 +0.552941 39 13 1 0.556863 40 13 1 +0.556863 40 13 1 0.560784 42 14 1 +0.560784 42 14 1 0.564706 43 14 1 +0.564706 43 14 1 0.568627 44 14 0 +0.568627 44 14 0 0.572549 45 14 0 +0.572549 45 14 0 0.576471 47 14 0 +0.576471 47 14 0 0.580392 48 15 0 +0.580392 48 15 0 0.584314 49 15 0 +0.584314 49 15 0 0.588235 51 15 0 +0.588235 51 15 0 0.592157 52 15 0 +0.592157 52 15 0 0.596078 53 16 0 +0.596078 53 16 0 0.600000 55 16 0 +0.600000 55 16 0 0.603922 56 16 0 +0.603922 56 16 0 0.607843 57 17 0 +0.607843 57 17 0 0.611765 59 17 0 +0.611765 59 17 0 0.615686 60 17 1 +0.615686 60 17 1 0.619608 62 18 1 +0.619608 62 18 1 0.623529 63 18 1 +0.623529 63 18 1 0.627451 65 18 1 +0.627451 65 18 1 0.631373 66 19 1 +0.631373 66 19 1 0.635294 68 19 1 +0.635294 68 19 1 0.639216 69 20 1 +0.639216 69 20 1 0.643137 71 20 1 +0.643137 71 20 1 0.647059 72 21 2 +0.647059 72 21 2 0.650980 74 21 2 +0.650980 74 21 2 0.654902 75 22 2 +0.654902 75 22 2 0.658824 77 22 2 +0.658824 77 22 2 0.662745 79 23 3 +0.662745 79 23 3 0.666667 80 24 3 +0.666667 80 24 3 0.670588 82 24 4 +0.670588 82 24 4 0.674510 84 25 5 +0.674510 84 25 5 0.678431 86 26 5 +0.678431 86 26 5 0.682353 87 27 6 +0.682353 87 27 6 0.686275 89 28 7 +0.686275 89 28 7 0.690196 91 29 8 +0.690196 91 29 8 0.694118 93 30 9 +0.694118 93 30 9 0.698039 95 31 10 +0.698039 95 31 10 0.701961 97 32 11 +0.701961 97 32 11 0.705882 99 33 12 +0.705882 99 33 12 0.709804 101 35 14 +0.709804 101 35 14 0.713725 104 36 15 +0.713725 104 36 15 0.717647 106 37 16 +0.717647 106 37 16 0.721569 108 39 17 +0.721569 108 39 17 0.725490 110 40 19 +0.725490 110 40 19 0.729412 112 42 20 +0.729412 112 42 20 0.733333 115 43 22 +0.733333 115 43 22 0.737255 117 45 23 +0.737255 117 45 23 0.741176 119 47 25 +0.741176 119 47 25 0.745098 121 48 27 +0.745098 121 48 27 0.749020 123 50 28 +0.749020 123 50 28 0.752941 125 52 30 +0.752941 125 52 30 0.756863 128 54 32 +0.756863 128 54 32 0.760784 130 55 34 +0.760784 130 55 34 0.764706 132 57 36 +0.764706 132 57 36 0.768627 134 59 38 +0.768627 134 59 38 0.772549 136 61 40 +0.772549 136 61 40 0.776471 138 63 42 +0.776471 138 63 42 0.780392 140 64 44 +0.780392 140 64 44 0.784314 142 66 46 +0.784314 142 66 46 0.788235 144 68 48 +0.788235 144 68 48 0.792157 146 70 50 +0.792157 146 70 50 0.796078 148 72 52 +0.796078 148 72 52 0.800000 150 74 54 +0.800000 150 74 54 0.803922 152 76 57 +0.803922 152 76 57 0.807843 154 77 59 +0.807843 154 77 59 0.811765 156 79 61 +0.811765 156 79 61 0.815686 158 81 63 +0.815686 158 81 63 0.819608 160 83 65 +0.819608 160 83 65 0.823529 162 85 68 +0.823529 162 85 68 0.827451 164 87 70 +0.827451 164 87 70 0.831373 166 89 72 +0.831373 166 89 72 0.835294 168 90 74 +0.835294 168 90 74 0.839216 170 92 76 +0.839216 170 92 76 0.843137 172 94 79 +0.843137 172 94 79 0.847059 174 96 81 +0.847059 174 96 81 0.850980 176 98 83 +0.850980 176 98 83 0.854902 178 100 85 +0.854902 178 100 85 0.858824 180 102 88 +0.858824 180 102 88 0.862745 182 104 90 +0.862745 182 104 90 0.866667 184 106 92 +0.866667 184 106 92 0.870588 186 107 95 +0.870588 186 107 95 0.874510 188 109 97 +0.874510 188 109 97 0.878431 190 111 99 +0.878431 190 111 99 0.882353 192 113 101 +0.882353 192 113 101 0.886275 194 115 104 +0.886275 194 115 104 0.890196 196 117 106 +0.890196 196 117 106 0.894118 198 119 108 +0.894118 198 119 108 0.898039 200 121 111 +0.898039 200 121 111 0.901961 202 123 113 +0.901961 202 123 113 0.905882 204 125 115 +0.905882 204 125 115 0.909804 206 127 118 +0.909804 206 127 118 0.913725 208 129 120 +0.913725 208 129 120 0.917647 210 131 122 +0.917647 210 131 122 0.921569 213 133 125 +0.921569 213 133 125 0.925490 215 135 127 +0.925490 215 135 127 0.929412 217 137 130 +0.929412 217 137 130 0.933333 219 139 132 +0.933333 219 139 132 0.937255 221 141 134 +0.937255 221 141 134 0.941176 223 143 137 +0.941176 223 143 137 0.945098 225 145 139 +0.945098 225 145 139 0.949020 227 147 142 +0.949020 227 147 142 0.952941 229 149 144 +0.952941 229 149 144 0.956863 231 151 146 +0.956863 231 151 146 0.960784 234 153 149 +0.960784 234 153 149 0.964706 236 155 151 +0.964706 236 155 151 0.968627 238 157 154 +0.968627 238 157 154 0.972549 240 159 156 +0.972549 240 159 156 0.976471 242 161 159 +0.976471 242 161 159 0.980392 244 163 161 +0.980392 244 163 161 0.984314 246 165 163 +0.984314 246 165 163 0.988235 249 167 166 +0.988235 249 167 166 0.992157 251 169 168 +0.992157 251 169 168 0.996078 253 171 171 +0.996078 253 171 171 1.000000 255 173 173 +N 255 255 255 +B 158 176 255 +F 255 173 173 diff --git a/newt/colormaps/berlin/berlin.ct b/newt/colormaps/berlin/berlin.ct new file mode 100644 index 0000000..7f8c5de --- /dev/null +++ b/newt/colormaps/berlin/berlin.ct @@ -0,0 +1,1030 @@ + + + 1.11.0 + + + 158 176 255 255 + 0 + + + 156 176 254 255 + 0.0039216 + + + 154 176 253 255 + 0.0078431 + + + 152 175 252 255 + 0.011765 + + + 149 175 251 255 + 0.015686 + + + 147 175 250 255 + 0.019608 + + + 145 174 249 255 + 0.023529 + + + 142 174 247 255 + 0.027451 + + + 140 174 246 255 + 0.031373 + + + 138 174 245 255 + 0.035294 + + + 135 173 244 255 + 0.039216 + + + 133 173 243 255 + 0.043137 + + + 130 173 242 255 + 0.047059 + + + 128 172 241 255 + 0.05098 + + + 126 172 240 255 + 0.054902 + + + 123 172 238 255 + 0.058824 + + + 121 171 237 255 + 0.062745 + + + 118 171 236 255 + 0.066667 + + + 116 170 235 255 + 0.070588 + + + 113 170 233 255 + 0.07451 + + + 111 169 232 255 + 0.078431 + + + 108 169 230 255 + 0.082353 + + + 106 168 229 255 + 0.086275 + + + 103 168 227 255 + 0.090196 + + + 101 167 226 255 + 0.094118 + + + 98 166 224 255 + 0.098039 + + + 96 165 223 255 + 0.10196 + + + 93 165 221 255 + 0.10588 + + + 91 164 219 255 + 0.1098 + + + 88 163 217 255 + 0.11373 + + + 86 162 215 255 + 0.11765 + + + 84 160 213 255 + 0.12157 + + + 81 159 211 255 + 0.12549 + + + 79 158 209 255 + 0.12941 + + + 77 157 207 255 + 0.13333 + + + 75 155 205 255 + 0.13725 + + + 72 154 202 255 + 0.14118 + + + 70 152 200 255 + 0.1451 + + + 68 151 198 255 + 0.14902 + + + 67 149 195 255 + 0.15294 + + + 65 148 193 255 + 0.15686 + + + 63 146 190 255 + 0.16078 + + + 62 144 188 255 + 0.16471 + + + 60 142 185 255 + 0.16863 + + + 59 141 183 255 + 0.17255 + + + 57 139 180 255 + 0.17647 + + + 56 137 178 255 + 0.18039 + + + 55 135 175 255 + 0.18431 + + + 54 133 173 255 + 0.18824 + + + 53 132 170 255 + 0.19216 + + + 51 130 168 255 + 0.19608 + + + 50 128 166 255 + 0.2 + + + 50 126 163 255 + 0.20392 + + + 49 124 161 255 + 0.20784 + + + 48 122 158 255 + 0.21176 + + + 47 120 156 255 + 0.21569 + + + 46 118 153 255 + 0.21961 + + + 45 117 151 255 + 0.22353 + + + 44 115 148 255 + 0.22745 + + + 44 113 146 255 + 0.23137 + + + 43 111 143 255 + 0.23529 + + + 42 109 141 255 + 0.23922 + + + 41 107 139 255 + 0.24314 + + + 41 105 136 255 + 0.24706 + + + 40 104 134 255 + 0.25098 + + + 39 102 131 255 + 0.2549 + + + 39 100 129 255 + 0.25882 + + + 38 98 127 255 + 0.26275 + + + 37 96 124 255 + 0.26667 + + + 36 94 122 255 + 0.27059 + + + 36 93 120 255 + 0.27451 + + + 35 91 117 255 + 0.27843 + + + 34 89 115 255 + 0.28235 + + + 34 87 113 255 + 0.28627 + + + 33 85 110 255 + 0.2902 + + + 32 84 108 255 + 0.29412 + + + 32 82 106 255 + 0.29804 + + + 31 80 104 255 + 0.30196 + + + 30 78 101 255 + 0.30588 + + + 30 77 99 255 + 0.3098 + + + 29 75 97 255 + 0.31373 + + + 28 73 95 255 + 0.31765 + + + 28 71 92 255 + 0.32157 + + + 27 70 90 255 + 0.32549 + + + 26 68 88 255 + 0.32941 + + + 26 66 86 255 + 0.33333 + + + 25 65 83 255 + 0.33725 + + + 25 63 81 255 + 0.34118 + + + 24 61 79 255 + 0.3451 + + + 23 60 77 255 + 0.34902 + + + 23 58 75 255 + 0.35294 + + + 22 56 73 255 + 0.35686 + + + 22 55 71 255 + 0.36078 + + + 21 53 68 255 + 0.36471 + + + 21 51 66 255 + 0.36863 + + + 20 50 64 255 + 0.37255 + + + 20 48 62 255 + 0.37647 + + + 19 47 60 255 + 0.38039 + + + 19 45 58 255 + 0.38431 + + + 18 44 56 255 + 0.38824 + + + 18 42 54 255 + 0.39216 + + + 18 41 52 255 + 0.39608 + + + 17 39 50 255 + 0.4 + + + 17 38 48 255 + 0.40392 + + + 17 36 46 255 + 0.40784 + + + 17 35 44 255 + 0.41176 + + + 17 33 42 255 + 0.41569 + + + 17 32 40 255 + 0.41961 + + + 16 31 38 255 + 0.42353 + + + 16 29 37 255 + 0.42745 + + + 16 28 35 255 + 0.43137 + + + 17 27 33 255 + 0.43529 + + + 17 26 32 255 + 0.43922 + + + 17 25 30 255 + 0.44314 + + + 17 24 28 255 + 0.44706 + + + 17 22 27 255 + 0.45098 + + + 17 21 25 255 + 0.4549 + + + 17 20 24 255 + 0.45882 + + + 17 19 23 255 + 0.46275 + + + 18 18 21 255 + 0.46667 + + + 18 18 20 255 + 0.47059 + + + 19 17 18 255 + 0.47451 + + + 20 16 17 255 + 0.47843 + + + 20 15 16 255 + 0.48235 + + + 21 14 14 255 + 0.48627 + + + 22 14 13 255 + 0.4902 + + + 23 13 11 255 + 0.49412 + + + 24 12 10 255 + 0.49804 + + + 25 12 9 255 + 0.50196 + + + 26 12 8 255 + 0.50588 + + + 27 11 7 255 + 0.5098 + + + 28 11 6 255 + 0.51373 + + + 29 11 5 255 + 0.51765 + + + 30 11 4 255 + 0.52157 + + + 32 11 4 255 + 0.52549 + + + 33 11 3 255 + 0.52941 + + + 34 12 2 255 + 0.53333 + + + 35 12 2 255 + 0.53725 + + + 36 12 2 255 + 0.54118 + + + 37 12 1 255 + 0.5451 + + + 38 13 1 255 + 0.54902 + + + 39 13 1 255 + 0.55294 + + + 40 13 1 255 + 0.55686 + + + 42 14 1 255 + 0.56078 + + + 43 14 1 255 + 0.56471 + + + 44 14 0 255 + 0.56863 + + + 45 14 0 255 + 0.57255 + + + 47 14 0 255 + 0.57647 + + + 48 15 0 255 + 0.58039 + + + 49 15 0 255 + 0.58431 + + + 51 15 0 255 + 0.58824 + + + 52 15 0 255 + 0.59216 + + + 53 16 0 255 + 0.59608 + + + 55 16 0 255 + 0.6 + + + 56 16 0 255 + 0.60392 + + + 57 17 0 255 + 0.60784 + + + 59 17 0 255 + 0.61176 + + + 60 17 1 255 + 0.61569 + + + 62 18 1 255 + 0.61961 + + + 63 18 1 255 + 0.62353 + + + 65 18 1 255 + 0.62745 + + + 66 19 1 255 + 0.63137 + + + 68 19 1 255 + 0.63529 + + + 69 20 1 255 + 0.63922 + + + 71 20 1 255 + 0.64314 + + + 72 21 2 255 + 0.64706 + + + 74 21 2 255 + 0.65098 + + + 75 22 2 255 + 0.6549 + + + 77 22 2 255 + 0.65882 + + + 79 23 3 255 + 0.66275 + + + 80 24 3 255 + 0.66667 + + + 82 24 4 255 + 0.67059 + + + 84 25 5 255 + 0.67451 + + + 86 26 5 255 + 0.67843 + + + 87 27 6 255 + 0.68235 + + + 89 28 7 255 + 0.68627 + + + 91 29 8 255 + 0.6902 + + + 93 30 9 255 + 0.69412 + + + 95 31 10 255 + 0.69804 + + + 97 32 11 255 + 0.70196 + + + 99 33 12 255 + 0.70588 + + + 101 35 14 255 + 0.7098 + + + 104 36 15 255 + 0.71373 + + + 106 37 16 255 + 0.71765 + + + 108 39 17 255 + 0.72157 + + + 110 40 19 255 + 0.72549 + + + 112 42 20 255 + 0.72941 + + + 115 43 22 255 + 0.73333 + + + 117 45 23 255 + 0.73725 + + + 119 47 25 255 + 0.74118 + + + 121 48 27 255 + 0.7451 + + + 123 50 28 255 + 0.74902 + + + 125 52 30 255 + 0.75294 + + + 128 54 32 255 + 0.75686 + + + 130 55 34 255 + 0.76078 + + + 132 57 36 255 + 0.76471 + + + 134 59 38 255 + 0.76863 + + + 136 61 40 255 + 0.77255 + + + 138 63 42 255 + 0.77647 + + + 140 64 44 255 + 0.78039 + + + 142 66 46 255 + 0.78431 + + + 144 68 48 255 + 0.78824 + + + 146 70 50 255 + 0.79216 + + + 148 72 52 255 + 0.79608 + + + 150 74 54 255 + 0.8 + + + 152 76 57 255 + 0.80392 + + + 154 77 59 255 + 0.80784 + + + 156 79 61 255 + 0.81176 + + + 158 81 63 255 + 0.81569 + + + 160 83 65 255 + 0.81961 + + + 162 85 68 255 + 0.82353 + + + 164 87 70 255 + 0.82745 + + + 166 89 72 255 + 0.83137 + + + 168 90 74 255 + 0.83529 + + + 170 92 76 255 + 0.83922 + + + 172 94 79 255 + 0.84314 + + + 174 96 81 255 + 0.84706 + + + 176 98 83 255 + 0.85098 + + + 178 100 85 255 + 0.8549 + + + 180 102 88 255 + 0.85882 + + + 182 104 90 255 + 0.86275 + + + 184 106 92 255 + 0.86667 + + + 186 107 95 255 + 0.87059 + + + 188 109 97 255 + 0.87451 + + + 190 111 99 255 + 0.87843 + + + 192 113 101 255 + 0.88235 + + + 194 115 104 255 + 0.88627 + + + 196 117 106 255 + 0.8902 + + + 198 119 108 255 + 0.89412 + + + 200 121 111 255 + 0.89804 + + + 202 123 113 255 + 0.90196 + + + 204 125 115 255 + 0.90588 + + + 206 127 118 255 + 0.9098 + + + 208 129 120 255 + 0.91373 + + + 210 131 122 255 + 0.91765 + + + 213 133 125 255 + 0.92157 + + + 215 135 127 255 + 0.92549 + + + 217 137 130 255 + 0.92941 + + + 219 139 132 255 + 0.93333 + + + 221 141 134 255 + 0.93725 + + + 223 143 137 255 + 0.94118 + + + 225 145 139 255 + 0.9451 + + + 227 147 142 255 + 0.94902 + + + 229 149 144 255 + 0.95294 + + + 231 151 146 255 + 0.95686 + + + 234 153 149 255 + 0.96078 + + + 236 155 151 255 + 0.96471 + + + 238 157 154 255 + 0.96863 + + + 240 159 156 255 + 0.97255 + + + 242 161 159 255 + 0.97647 + + + 244 163 161 255 + 0.98039 + + + 246 165 163 255 + 0.98431 + + + 249 167 166 255 + 0.98824 + + + 251 169 168 255 + 0.99216 + + + 253 171 171 255 + 0.99608 + + + 255 173 173 255 + 1 + + + \ No newline at end of file diff --git a/newt/colormaps/berlin/berlin.lut b/newt/colormaps/berlin/berlin.lut new file mode 100644 index 0000000..da6f763 --- /dev/null +++ b/newt/colormaps/berlin/berlin.lut @@ -0,0 +1,256 @@ +158 176 255 +156 176 254 +154 176 253 +152 175 252 +149 175 251 +147 175 250 +145 174 249 +142 174 247 +140 174 246 +138 174 245 +135 173 244 +133 173 243 +130 173 242 +128 172 241 +126 172 240 +123 172 238 +121 171 237 +118 171 236 +116 170 235 +113 170 233 +111 169 232 +108 169 230 +106 168 229 +103 168 227 +101 167 226 +98 166 224 +96 165 223 +93 165 221 +91 164 219 +88 163 217 +86 162 215 +84 160 213 +81 159 211 +79 158 209 +77 157 207 +75 155 205 +72 154 202 +70 152 200 +68 151 198 +67 149 195 +65 148 193 +63 146 190 +62 144 188 +60 142 185 +59 141 183 +57 139 180 +56 137 178 +55 135 175 +54 133 173 +53 132 170 +51 130 168 +50 128 166 +50 126 163 +49 124 161 +48 122 158 +47 120 156 +46 118 153 +45 117 151 +44 115 148 +44 113 146 +43 111 143 +42 109 141 +41 107 139 +41 105 136 +40 104 134 +39 102 131 +39 100 129 +38 98 127 +37 96 124 +36 94 122 +36 93 120 +35 91 117 +34 89 115 +34 87 113 +33 85 110 +32 84 108 +32 82 106 +31 80 104 +30 78 101 +30 77 99 +29 75 97 +28 73 95 +28 71 92 +27 70 90 +26 68 88 +26 66 86 +25 65 83 +25 63 81 +24 61 79 +23 60 77 +23 58 75 +22 56 73 +22 55 71 +21 53 68 +21 51 66 +20 50 64 +20 48 62 +19 47 60 +19 45 58 +18 44 56 +18 42 54 +18 41 52 +17 39 50 +17 38 48 +17 36 46 +17 35 44 +17 33 42 +17 32 40 +16 31 38 +16 29 37 +16 28 35 +17 27 33 +17 26 32 +17 25 30 +17 24 28 +17 22 27 +17 21 25 +17 20 24 +17 19 23 +18 18 21 +18 18 20 +19 17 18 +20 16 17 +20 15 16 +21 14 14 +22 14 13 +23 13 11 +24 12 10 +25 12 9 +26 12 8 +27 11 7 +28 11 6 +29 11 5 +30 11 4 +32 11 4 +33 11 3 +34 12 2 +35 12 2 +36 12 2 +37 12 1 +38 13 1 +39 13 1 +40 13 1 +42 14 1 +43 14 1 +44 14 0 +45 14 0 +47 14 0 +48 15 0 +49 15 0 +51 15 0 +52 15 0 +53 16 0 +55 16 0 +56 16 0 +57 17 0 +59 17 0 +60 17 1 +62 18 1 +63 18 1 +65 18 1 +66 19 1 +68 19 1 +69 20 1 +71 20 1 +72 21 2 +74 21 2 +75 22 2 +77 22 2 +79 23 3 +80 24 3 +82 24 4 +84 25 5 +86 26 5 +87 27 6 +89 28 7 +91 29 8 +93 30 9 +95 31 10 +97 32 11 +99 33 12 +101 35 14 +104 36 15 +106 37 16 +108 39 17 +110 40 19 +112 42 20 +115 43 22 +117 45 23 +119 47 25 +121 48 27 +123 50 28 +125 52 30 +128 54 32 +130 55 34 +132 57 36 +134 59 38 +136 61 40 +138 63 42 +140 64 44 +142 66 46 +144 68 48 +146 70 50 +148 72 52 +150 74 54 +152 76 57 +154 77 59 +156 79 61 +158 81 63 +160 83 65 +162 85 68 +164 87 70 +166 89 72 +168 90 74 +170 92 76 +172 94 79 +174 96 81 +176 98 83 +178 100 85 +180 102 88 +182 104 90 +184 106 92 +186 107 95 +188 109 97 +190 111 99 +192 113 101 +194 115 104 +196 117 106 +198 119 108 +200 121 111 +202 123 113 +204 125 115 +206 127 118 +208 129 120 +210 131 122 +213 133 125 +215 135 127 +217 137 130 +219 139 132 +221 141 134 +223 143 137 +225 145 139 +227 147 142 +229 149 144 +231 151 146 +234 153 149 +236 155 151 +238 157 154 +240 159 156 +242 161 159 +244 163 161 +246 165 163 +249 167 166 +251 169 168 +253 171 171 +255 173 173 diff --git a/newt/colormaps/berlin/berlin.mat b/newt/colormaps/berlin/berlin.mat new file mode 100644 index 0000000000000000000000000000000000000000..c405576a51c7847fe35d4a6ac582fc76aa6284f3 GIT binary patch literal 6023 zcma*bV_X~#z`*fZwr$(4Rm<)cFWYw4vaz__g|+?D(k;7%%UG^uceyU}dEPwlpYN;R z=cgs7uPG-_FT%@DuO+9?Nur-vlH zzLP&az}AmmP?%moM3P@bQdojskY7N8{{IW%{~)Sq;r(yFlOiC13VD_SKW2Nr$X+Tq zt8zTrwb<#8?)0<2otw(oI77r8@SF-M6Ux1C#<}N@l>fPSQpt49a57xM_+Jv6IO09S13uh{KuH) z-3rVEWA48;N;%NoDYX5G;R=S}IcFA1^Z7kr?muLC`P=Xv90Nji*OtG2Y|QT{bCqUy zB)dDUI~h`L=|&EQdAK?CdL8Y2<@IXsaVMcYQ9QI&)jkf0dfRic9s^=_$+)|08>{q1 zjZwTe0wvQu3a&0p4Wtx<-J8lTlZwwfu^6U_8Mhq-HRB2_{$8a!T#E8=n&}^~{OZ~oP(#vY z8m&jo3s6R_7VvtTE1-wFAg70iKmFB>{S~+-_i@O;>9?y};`y&ZoXyI&qPuJewSL!k zA#=psH77lZcg(IYm!ll&kPpJYT`QDl$NGv3tSUl(QGa0dvEle>Zv?7*hcC?uzH~6< zdO}*UvvCQ}nitBpFzrO&drXq@)EZl>MRRweyZ;7OH4)yh)v@yB_Z=skS2 z>iZnzc#=-oQBifM2wGFM>$vqX5~&ouy}H+re%kFuIrkqH^wRw&_Oo4_rup{Al{+QO zYILE(OTSERMX%clK|xIk29$MpxN%jCxE#P0H|r8RH9TM>5$aQDhkIdM~1)S=M4h#uvpw zzQ(!75?Z3|@6>-f@a2(B*H%zgJr$ALWs?g=+2qIJl^w1m1WA2kt;tUb zl7Yk*mN0!UqB{-r&Was3nJPe&5Q!d%MX!pcDnt_~UVoVxGqz28kre0*%FEdtbj?kb zc6}y(#B;PNss|>dDze^XQ@2!{jmQIuMg9Z@iXI8__yvwqJ6)%gIaKZMpiV}#PVwW} zHLim#r4DwbbVj(Y!VnXrz(TSrr2QWQE6#9d@$ip(rijJUGbXJKa;6w}Bi;p|3dKsQ zX|U$0LbIQ;4@y#yE3%q!E8wpOXVpsBbMIiuokOPE7Ln+U z0nb2_K%DmwiW#Ptf4c)*hy6nax-*WZ-$ji-66J43X@u8Ko1VJHaz5KCOeYwck#0;( zLRV0s#6f+eC87`WOAp0`&PZiEMmonPQo1l;PdAN6{OoS9hDEOACGSp&m@5RSl^NHc zpN0c6QQSaR^53WFwtGmA3Y&xW0DQYw+b$1URS4QZff0~&BdvC1^kXmSq`>0fV%Mnt zE40fY^g$xTH}o5Cx~B!1-ABfUl+!9OsA@7|)p8GB3G~g+=;d^PskAiQBk)>~~)4blc>IN(|cBDcWzWi~5PbzC$B zYa2Un?L}0<+0m-AZmk7r71gTcZZI3|5EY^)Y!pc;iO^H)YFuH0M&GJ)jTXd3r0{WF z+S3R>O2YD4}^J=okmt0)p z^l8QI(_wUbNxH%xOE}k6*}&c&)hLq5zGFC|<24XGq#AnmRt`vA;a9HZ;>K0Pjb`Wh zD>W~je8kx^v3ACko~o`j)j}J2sAF-zG0k*zILr16H+kzXzNuEy1N&ec96OmtrIa3I zu7CW@4szxTx`Qa|gzk5y#l4X04mvyBuMJz}+W#k2EFD*oK?C@hGxUegThDfS#+zKw z$n*UnCAwd&@y%EOmjvFVBwB~@5x|;%At@AzGd9*URZ7G!{m|VLCm6Z^A2!D8S?01i z{Y~}74l`%>y;4sSc#weR&3t%>)!XE+K_M^)MK<-hglyf&iZC-Id>?W-d=&I&O%Us~ znh5vh(G!0uWz#%!SvWz7LHnUkA{`0gH7&R-BN6$=0pJw46MyRBl+83#5*F9 zr||SvwllxYhL1qa^znUPhMJbZKr`8|qQpeQfwB24X1`dIb;zBl@C#A?cQs77cbLOY z*=@MYif7)@n4Xqv=+I!k=&0B?8O<8f`QqB%KICkdqQl^f#OBU7`!hkiALnNK@^Pp{ zZk+X-?`-2Sa#b+CmNU(mYxIJTX2XNs1v2AvdPr8iQVe&Mt_UJ=Xq3kzNa;B@#@`ZY zB6{bYfP{%_X^0o5!X;(j&=8ery-=>+Tu4_L!Lr$j_fJXos<7m}XPw*p@b zY|z1jEDUbFrMqXelQxI_;EX9Dc5Y`yv_5KP?^9M1T%~M!eFU-*C*OI3Ju}}n?gq6uL&+lqK_)}U`$K1+!L$$DnU7~|HSzA2Up6Vl# z?wNd3iX%A#QtF^-r80vF5aFOhq2#QD&mHoL`ToAh6F## zfXp^}E*!P_9A1~iEoYCQ@jb6TaR$uQ&ZA96k(w+DGPkfqV$k@(#m|(T}Q_ zo_ll8V65B2NGPS2MBZ+VU>2kOPm%2%NT4R^u|uneFxF%Yxct4d!;u1_?2)?G?f2hW zMF+>GmV(v#b;4gH!UEIG(rShU@TMrrj`3ixrNpXs?O1(r81)#*~D}qmxkF@@Ua{)1g%OS z%L@ELGg{DG-Szm*>zziW@~()Py{C*9d6Jdk-(LKHbBP@>KG{$MH z0?Fu-YTMQEF23VZQb|srlA}v`Uy=5#BLJWfJXOIrIt#ljd=;Qo)r^1ZdtU9wbFdnD zHHdXpLQU!h4(F`cGj5Dt44f?(FMFKVv}dLG6gzxOtL<3XrQS8|51w2b*QsB&IU@FX zb36|{Lf2pGzhJY7k;Cyo74|k2nDAz|Mjw%ZXuH&n-3Uxv!J$IumUul zMvwY&rzrsG7h`JeZG&Yel$UA}HvuSDH!+FHL?la=w`c+IgKn>{FL(dF;`vz=dSBFwYAXe{z1xCKJx~SHxj>e@nC3Pr8 zvvDv?Mag!AesLh)Z4nn)=oNja&5PY+#FD{YgK>xJ<)_j<4SlE>9Paq(NO4Jn${8>fWpIsvO9o`~YprXyp2%e;C`OroDN)$@uH>0XyD9=iW; zFL2vUH2l1!PO!bJ6-$xwqz}{ER%gIv$hWtz^e{eE%pA$I_Wvx=O*-!>AEl*d-OG8JU~= zHb5@ph4t~AA$hw$E$JKRn%$2(wY6iNO6kP6-VxV@%LC4}{uJPV%-{|NdTLN2J_80E z7@i3xjMa1#J}?dW5HaJTXNTeDp36tOI^^MLGxwKE)C0CqKnBlF(X0>BJ{_Qp$^F6&p zfrXEV#7Fdtc^MM<7CFtX9~}bxmOH+48DY%AE1RPZ6m-TX8uZ479nyE+`NWCc_3sCz znT-7KI(2>^-sX&)amT@Xe+2INcdUDgcc-mgykA00Gv9OD#kns&DIAQ0s4nLq|C#w8 zDBzIPEhRm%gjOYBektmac>Y1|2OY39+lx z?Hx-7?tlUshc`Shvt&Grdw#0)wI->>`?hnzU)&@@zS^OQ<0(VGDmU-)mrTK1QqII> zYXXl(UPWWjUc)#<)-mT`_sOFgwGSaTJ6aXzq=V>F!{Cf<53#asfKwX>&vi`dRlSxzAV40Xb+T0)%DBn`Yhxm_P~ zLeHhcVcl3uDp~YJR}AFxUHjL!p2l`c;dLYxc`+bEn5A1Ql_$%c39q9Mmj2eoPqBec zr7?lvRb6Sz4s4m3kP6h)EE^50MGX4^!b{8$<6>VIP|Nk9<{S>y5AV z4KrLpk`{iIzU>H7(slZ^9R+D(4GeOomF-8BGun9pW0C#RLsG`6Ze1eWO+~4bgC?Cy ziR+XU%a3Kr!Ii%I#U}M$WzeD+k&>5-y7fZL(rzGK7OY~+vM9oI3x2UoOKaHL>}|z*{=Z^bRSZs?c84Ga_ag9_PK4cN%0mxERWLemtdD7Q#akDLTV zG4E?gA2FJvb^LGH?U9Dz;)Cwtl+k&iDZfU}%w{$$HO-+!NsPK9<>!ez`({ljq%T(c znh+F0CLrZ;Mgbq-4vr4ad?pHM-&-uHrb6ING`sFtiUd*LCNKX-j=fV>Q3?o;ljkY_ z^CJpBa_e+QqnBoLoBD8HOi7SVuC+l#5b%ucBVwG;et@NYHLm{JjURuEgox1z?%8qW z7hfCiE(AA;o&Ror1GC1>2NbmUeI`yyQtZCeBOpP$v0(&|3Xixc|BQohX+a@y;2zm zU9U_U-$?rMn;H!+Jq65gpLJ?5%D6V(?j;I;Vuwpbb*ZojlmNrTWl!bbl3agYAA^Kw-hA$<(exoKSofOQGRiZ=B4%Fgd@rPM{Sg5xfrtUmW za)Dm=4W)8iNoIusbHS#iZiCltzCUr!^j!9AN-JUL`+GtQ2Z~gFo<0=Zk4v8D2 z{_MfJ->;ZaRDx}5)}J`ULEt>P^M8plIyMnU9lZ0LsFGK`7xIEEhdPxnV?l;{RZPY! zIKb|g(Ri{ipW8#Va&O%5CI%aQmU@n$k?Ja*ozkJ8_k(Bc(k__^7xu(U^4sVyQ_Z}N zHFSjb%nVFgoaY+_0K4uV^4GZ*4DDdd_RXOH=U`|0avxv3&N+O;je&9td4-p?E~QBa z$*7Z1lo!6grMDjq$3vk=U~prm4By{v@2;-U&+&-R0i`ao0Sa_s+?o~skw414R%J?1 yL_1U1`QxEj1P01J{0k4#-!RZUC=Q)S3}c5nwKcEpjMs?8Vq)+eA)J_S>wf@K6XYcT literal 0 HcmV?d00001 diff --git a/newt/colormaps/berlin/berlin.ncmap b/newt/colormaps/berlin/berlin.ncmap new file mode 100644 index 0000000..919c0a8 --- /dev/null +++ b/newt/colormaps/berlin/berlin.ncmap @@ -0,0 +1,256 @@ +158 176 255 +156 176 254 +154 176 253 +152 175 252 +149 175 251 +147 175 250 +145 174 249 +142 174 247 +140 174 246 +138 174 245 +135 173 244 +133 173 243 +130 173 242 +128 172 241 +126 172 240 +123 172 238 +121 171 237 +118 171 236 +116 170 235 +113 170 233 +111 169 232 +108 169 230 +106 168 229 +103 168 227 +101 167 226 +98 166 224 +96 165 223 +93 165 221 +91 164 219 +88 163 217 +86 162 215 +84 160 213 +81 159 211 +79 158 209 +77 157 207 +75 155 205 +72 154 202 +70 152 200 +68 151 198 +67 149 195 +65 148 193 +63 146 190 +62 144 188 +60 142 185 +59 141 183 +57 139 180 +56 137 178 +55 135 175 +54 133 173 +53 132 170 +51 130 168 +50 128 166 +50 126 163 +49 124 161 +48 122 158 +47 120 156 +46 118 153 +45 117 151 +44 115 148 +44 113 146 +43 111 143 +42 109 141 +41 107 139 +41 105 136 +40 104 134 +39 102 131 +39 100 129 +38 98 127 +37 96 124 +36 94 122 +36 93 120 +35 91 117 +34 89 115 +34 87 113 +33 85 110 +32 84 108 +32 82 106 +31 80 104 +30 78 101 +30 77 99 +29 75 97 +28 73 95 +28 71 92 +27 70 90 +26 68 88 +26 66 86 +25 65 83 +25 63 81 +24 61 79 +23 60 77 +23 58 75 +22 56 73 +22 55 71 +21 53 68 +21 51 66 +20 50 64 +20 48 62 +19 47 60 +19 45 58 +18 44 56 +18 42 54 +18 41 52 +17 39 50 +17 38 48 +17 36 46 +17 35 44 +17 33 42 +17 32 40 +16 31 38 +16 29 37 +16 28 35 +17 27 33 +17 26 32 +17 25 30 +17 24 28 +17 22 27 +17 21 25 +17 20 24 +17 19 23 +18 18 21 +18 18 20 +19 17 18 +20 16 17 +20 15 16 +21 14 14 +22 14 13 +23 13 11 +24 12 10 +25 12 9 +26 12 8 +27 11 7 +28 11 6 +29 11 5 +30 11 4 +32 11 4 +33 11 3 +34 12 2 +35 12 2 +36 12 2 +37 12 1 +38 13 1 +39 13 1 +40 13 1 +42 14 1 +43 14 1 +44 14 0 +45 14 0 +47 14 0 +48 15 0 +49 15 0 +51 15 0 +52 15 0 +53 16 0 +55 16 0 +56 16 0 +57 17 0 +59 17 0 +60 17 1 +62 18 1 +63 18 1 +65 18 1 +66 19 1 +68 19 1 +69 20 1 +71 20 1 +72 21 2 +74 21 2 +75 22 2 +77 22 2 +79 23 3 +80 24 3 +82 24 4 +84 25 5 +86 26 5 +87 27 6 +89 28 7 +91 29 8 +93 30 9 +95 31 10 +97 32 11 +99 33 12 +101 35 14 +104 36 15 +106 37 16 +108 39 17 +110 40 19 +112 42 20 +115 43 22 +117 45 23 +119 47 25 +121 48 27 +123 50 28 +125 52 30 +128 54 32 +130 55 34 +132 57 36 +134 59 38 +136 61 40 +138 63 42 +140 64 44 +142 66 46 +144 68 48 +146 70 50 +148 72 52 +150 74 54 +152 76 57 +154 77 59 +156 79 61 +158 81 63 +160 83 65 +162 85 68 +164 87 70 +166 89 72 +168 90 74 +170 92 76 +172 94 79 +174 96 81 +176 98 83 +178 100 85 +180 102 88 +182 104 90 +184 106 92 +186 107 95 +188 109 97 +190 111 99 +192 113 101 +194 115 104 +196 117 106 +198 119 108 +200 121 111 +202 123 113 +204 125 115 +206 127 118 +208 129 120 +210 131 122 +213 133 125 +215 135 127 +217 137 130 +219 139 132 +221 141 134 +223 143 137 +225 145 139 +227 147 142 +229 149 144 +231 151 146 +234 153 149 +236 155 151 +238 157 154 +240 159 156 +242 161 159 +244 163 161 +246 165 163 +249 167 166 +251 169 168 +253 171 171 +255 173 173 diff --git a/newt/colormaps/berlin/berlin.pal b/newt/colormaps/berlin/berlin.pal new file mode 100644 index 0000000..8bc7774 --- /dev/null +++ b/newt/colormaps/berlin/berlin.pal @@ -0,0 +1,257 @@ +set palette defined (\ +0 0.621082 0.690182 0.999507, \ +1 0.612157 0.689228 0.995374, \ +2 0.603202 0.688250 0.991239, \ +3 0.594200 0.687257 0.987092, \ +4 0.585165 0.686248 0.982922, \ +5 0.576088 0.685222 0.978733, \ +6 0.566961 0.684166 0.974524, \ +7 0.557791 0.683098 0.970288, \ +8 0.548590 0.681992 0.966016, \ +9 0.539327 0.680859 0.961704, \ +10 0.530034 0.679691 0.957350, \ +11 0.520687 0.678484 0.952942, \ +12 0.511295 0.677230 0.948466, \ +13 0.501863 0.675908 0.943923, \ +14 0.492368 0.674526 0.939297, \ +15 0.482832 0.673075 0.934574, \ +16 0.473239 0.671530 0.929751, \ +17 0.463610 0.669898 0.924806, \ +18 0.453931 0.668152 0.919735, \ +19 0.444213 0.666275 0.914518, \ +20 0.434440 0.664271 0.909136, \ +21 0.424645 0.662120 0.903586, \ +22 0.414818 0.659791 0.897845, \ +23 0.404975 0.657289 0.891905, \ +24 0.395137 0.654579 0.885750, \ +25 0.385296 0.651674 0.879368, \ +26 0.375493 0.648536 0.872757, \ +27 0.365742 0.645164 0.865903, \ +28 0.356059 0.641552 0.858801, \ +29 0.346453 0.637692 0.851451, \ +30 0.336982 0.633574 0.843855, \ +31 0.327642 0.629189 0.836017, \ +32 0.318487 0.624551 0.827937, \ +33 0.309539 0.619657 0.819628, \ +34 0.300784 0.614497 0.811108, \ +35 0.292309 0.609115 0.802379, \ +36 0.284098 0.603485 0.793470, \ +37 0.276205 0.597634 0.784386, \ +38 0.268595 0.591580 0.775143, \ +39 0.261308 0.585335 0.765780, \ +40 0.254368 0.578908 0.756296, \ +41 0.247753 0.572328 0.746719, \ +42 0.241464 0.565596 0.737066, \ +43 0.235515 0.558748 0.727351, \ +44 0.229842 0.551802 0.717600, \ +45 0.224503 0.544750 0.707805, \ +46 0.219485 0.537628 0.697998, \ +47 0.214694 0.530433 0.688190, \ +48 0.210172 0.523193 0.678377, \ +49 0.205889 0.515897 0.668578, \ +50 0.201771 0.508598 0.658787, \ +51 0.197878 0.501258 0.649030, \ +52 0.194172 0.493903 0.639287, \ +53 0.190556 0.486541 0.629572, \ +54 0.187112 0.479181 0.619898, \ +55 0.183752 0.471826 0.610241, \ +56 0.180500 0.464474 0.600622, \ +57 0.177365 0.457117 0.591037, \ +58 0.174264 0.449788 0.581483, \ +59 0.171224 0.442474 0.571966, \ +60 0.168242 0.435172 0.562486, \ +61 0.165292 0.427884 0.553021, \ +62 0.162439 0.420608 0.543603, \ +63 0.159545 0.413370 0.534210, \ +64 0.156739 0.406147 0.524856, \ +65 0.153905 0.398932 0.515524, \ +66 0.151122 0.391757 0.506230, \ +67 0.148346 0.384591 0.496972, \ +68 0.145641 0.377462 0.487751, \ +69 0.142879 0.370343 0.478544, \ +70 0.140138 0.363257 0.469389, \ +71 0.137466 0.356204 0.460239, \ +72 0.134777 0.349162 0.451147, \ +73 0.132079 0.342150 0.442085, \ +74 0.129401 0.335173 0.433042, \ +75 0.126735 0.328195 0.424036, \ +76 0.124090 0.321259 0.415071, \ +77 0.121456 0.314347 0.406144, \ +78 0.118899 0.307460 0.397234, \ +79 0.116316 0.300608 0.388376, \ +80 0.113731 0.293781 0.379546, \ +81 0.111187 0.286980 0.370748, \ +82 0.108613 0.280217 0.362004, \ +83 0.106159 0.273497 0.353280, \ +84 0.103670 0.266776 0.344594, \ +85 0.101183 0.260108 0.335952, \ +86 0.098776 0.253467 0.327342, \ +87 0.096347 0.246850 0.318783, \ +88 0.094059 0.240264 0.310267, \ +89 0.091788 0.233727 0.301758, \ +90 0.089506 0.227245 0.293318, \ +91 0.087341 0.220800 0.284914, \ +92 0.085142 0.214360 0.276576, \ +93 0.083069 0.207981 0.268249, \ +94 0.081098 0.201631 0.259992, \ +95 0.079130 0.195361 0.251781, \ +96 0.077286 0.189136 0.243589, \ +97 0.075571 0.182943 0.235502, \ +98 0.073993 0.176835 0.227434, \ +99 0.072410 0.170785 0.219433, \ +100 0.071045 0.164795 0.211500, \ +101 0.069767 0.158901 0.203628, \ +102 0.068618 0.153040 0.195818, \ +103 0.067560 0.147319 0.188124, \ +104 0.066665 0.141671 0.180452, \ +105 0.065923 0.136076 0.172917, \ +106 0.065339 0.130695 0.165458, \ +107 0.064911 0.125349 0.158169, \ +108 0.064636 0.120132 0.150946, \ +109 0.064517 0.115070 0.143889, \ +110 0.064554 0.110222 0.136957, \ +111 0.064749 0.105427 0.130230, \ +112 0.065100 0.100849 0.123569, \ +113 0.065383 0.096469 0.117170, \ +114 0.065574 0.092338 0.111008, \ +115 0.065892 0.088201 0.104982, \ +116 0.066388 0.084134 0.099288, \ +117 0.067108 0.080051 0.093829, \ +118 0.068193 0.076099 0.088470, \ +119 0.069720 0.072283 0.083025, \ +120 0.071639 0.068654 0.077544, \ +121 0.073978 0.065058 0.072110, \ +122 0.076596 0.061657 0.066651, \ +123 0.079637 0.058550 0.061133, \ +124 0.082963 0.055666 0.055745, \ +125 0.086537 0.052997 0.050336, \ +126 0.090315 0.050699 0.045040, \ +127 0.094260 0.048753 0.039773, \ +128 0.098319 0.047041 0.034683, \ +129 0.102458 0.045624 0.030074, \ +130 0.106732 0.044705 0.026012, \ +131 0.110986 0.043972 0.022379, \ +132 0.115245 0.043596 0.019150, \ +133 0.119547 0.043567 0.016299, \ +134 0.123812 0.043861 0.013797, \ +135 0.128105 0.044459 0.011588, \ +136 0.132315 0.045229 0.009531, \ +137 0.136451 0.046164 0.007895, \ +138 0.140635 0.047374 0.006502, \ +139 0.144884 0.048634 0.005327, \ +140 0.149230 0.049836 0.004346, \ +141 0.153685 0.050997 0.003537, \ +142 0.158309 0.052130 0.002882, \ +143 0.163014 0.053218 0.002363, \ +144 0.167811 0.054240 0.001963, \ +145 0.172736 0.055172 0.001669, \ +146 0.177801 0.056018 0.001469, \ +147 0.182863 0.056820 0.001340, \ +148 0.188058 0.057574 0.001262, \ +149 0.193233 0.058514 0.001226, \ +150 0.198463 0.059550 0.001227, \ +151 0.203778 0.060501 0.001260, \ +152 0.209092 0.061486 0.001322, \ +153 0.214470 0.062710 0.001412, \ +154 0.219897 0.063823 0.001529, \ +155 0.225345 0.065027 0.001675, \ +156 0.230856 0.066297 0.001853, \ +157 0.236422 0.067645 0.002068, \ +158 0.242016 0.069092 0.002325, \ +159 0.247681 0.070458 0.002632, \ +160 0.253390 0.071986 0.002998, \ +161 0.259176 0.073640 0.003435, \ +162 0.264997 0.075237 0.003955, \ +163 0.270934 0.076965 0.004571, \ +164 0.276928 0.078822 0.005301, \ +165 0.283017 0.080819 0.006161, \ +166 0.289196 0.082879 0.007171, \ +167 0.295466 0.085075 0.008349, \ +168 0.301858 0.087460 0.009726, \ +169 0.308387 0.089912 0.011455, \ +170 0.315024 0.092530 0.013324, \ +171 0.321806 0.095392 0.015413, \ +172 0.328738 0.098396 0.017780, \ +173 0.335805 0.101580 0.020449, \ +174 0.343036 0.104977 0.023440, \ +175 0.350413 0.108640 0.026771, \ +176 0.357947 0.112564 0.030456, \ +177 0.365629 0.116658 0.034571, \ +178 0.373470 0.120971 0.039115, \ +179 0.381463 0.125606 0.043693, \ +180 0.389583 0.130457 0.048471, \ +181 0.397845 0.135474 0.053136, \ +182 0.406220 0.140795 0.057848, \ +183 0.414690 0.146274 0.062715, \ +184 0.423229 0.151979 0.067685, \ +185 0.431837 0.157906 0.073044, \ +186 0.440444 0.164028 0.078620, \ +187 0.449085 0.170269 0.084644, \ +188 0.457704 0.176666 0.090869, \ +189 0.466314 0.183213 0.097335, \ +190 0.474900 0.189888 0.104064, \ +191 0.483420 0.196677 0.111039, \ +192 0.491910 0.203516 0.118190, \ +193 0.500322 0.210433 0.125501, \ +194 0.508690 0.217425 0.132983, \ +195 0.516977 0.224432 0.140623, \ +196 0.525197 0.231543 0.148349, \ +197 0.533349 0.238624 0.156261, \ +198 0.541440 0.245755 0.164233, \ +199 0.549481 0.252923 0.172265, \ +200 0.557462 0.260091 0.180403, \ +201 0.565378 0.267255 0.188640, \ +202 0.573272 0.274461 0.196924, \ +203 0.581112 0.281673 0.205237, \ +204 0.588920 0.288894 0.213625, \ +205 0.596716 0.296114 0.222054, \ +206 0.604484 0.303345 0.230529, \ +207 0.612228 0.310617 0.239052, \ +208 0.619976 0.317867 0.247618, \ +209 0.627708 0.325132 0.256189, \ +210 0.635438 0.332443 0.264815, \ +211 0.643173 0.339745 0.273490, \ +212 0.650917 0.347064 0.282179, \ +213 0.658661 0.354395 0.290887, \ +214 0.666419 0.361751 0.299640, \ +215 0.674194 0.369121 0.308415, \ +216 0.681975 0.376518 0.317219, \ +217 0.689783 0.383920 0.326043, \ +218 0.697596 0.391354 0.334929, \ +219 0.705434 0.398794 0.343796, \ +220 0.713288 0.406271 0.352720, \ +221 0.721158 0.413757 0.361662, \ +222 0.729054 0.421259 0.370618, \ +223 0.736968 0.428796 0.379616, \ +224 0.744900 0.436349 0.388639, \ +225 0.752851 0.443923 0.397680, \ +226 0.760831 0.451512 0.406747, \ +227 0.768821 0.459124 0.415838, \ +228 0.776844 0.466756 0.424962, \ +229 0.784879 0.474407 0.434092, \ +230 0.792935 0.482080 0.443269, \ +231 0.801009 0.489763 0.452465, \ +232 0.809110 0.497486 0.461672, \ +233 0.817222 0.505207 0.470910, \ +234 0.825358 0.512962 0.480170, \ +235 0.833517 0.520732 0.489445, \ +236 0.841692 0.528527 0.498763, \ +237 0.849885 0.536335 0.508096, \ +238 0.858092 0.544161 0.517448, \ +239 0.866324 0.552013 0.526825, \ +240 0.874568 0.559879 0.536218, \ +241 0.882829 0.567761 0.545643, \ +242 0.891110 0.575670 0.555082, \ +243 0.899407 0.583585 0.564550, \ +244 0.907716 0.591530 0.574038, \ +245 0.916031 0.599492 0.583552, \ +246 0.924368 0.607473 0.593095, \ +247 0.932714 0.615460 0.602649, \ +248 0.941076 0.623483 0.612229, \ +249 0.949447 0.631512 0.621832, \ +250 0.957832 0.639563 0.631467, \ +251 0.966219 0.647628 0.641113, \ +252 0.974619 0.655718 0.650792, \ +253 0.983030 0.663823 0.660487, \ +254 0.991448 0.671939 0.670216, \ +255 0.999873 0.680072 0.679950) diff --git a/newt/colormaps/berlin/berlin.py b/newt/colormaps/berlin/berlin.py new file mode 100644 index 0000000..3aa2559 --- /dev/null +++ b/newt/colormaps/berlin/berlin.py @@ -0,0 +1,278 @@ +# +# berlin +# www.fabiocrameri.ch/colourmaps +from matplotlib.colors import LinearSegmentedColormap + +cm_data = [[0.62108, 0.69018, 0.99951], + [0.61216, 0.68923, 0.99537], + [0.6032, 0.68825, 0.99124], + [0.5942, 0.68726, 0.98709], + [0.58517, 0.68625, 0.98292], + [0.57609, 0.68522, 0.97873], + [0.56696, 0.68417, 0.97452], + [0.55779, 0.6831, 0.97029], + [0.54859, 0.68199, 0.96602], + [0.53933, 0.68086, 0.9617], + [0.53003, 0.67969, 0.95735], + [0.52069, 0.67848, 0.95294], + [0.51129, 0.67723, 0.94847], + [0.50186, 0.67591, 0.94392], + [0.49237, 0.67453, 0.9393], + [0.48283, 0.67308, 0.93457], + [0.47324, 0.67153, 0.92975], + [0.46361, 0.6699, 0.92481], + [0.45393, 0.66815, 0.91974], + [0.44421, 0.66628, 0.91452], + [0.43444, 0.66427, 0.90914], + [0.42465, 0.66212, 0.90359], + [0.41482, 0.65979, 0.89785], + [0.40498, 0.65729, 0.89191], + [0.39514, 0.65458, 0.88575], + [0.3853, 0.65167, 0.87937], + [0.37549, 0.64854, 0.87276], + [0.36574, 0.64516, 0.8659], + [0.35606, 0.64155, 0.8588], + [0.34645, 0.63769, 0.85145], + [0.33698, 0.63357, 0.84386], + [0.32764, 0.62919, 0.83602], + [0.31849, 0.62455, 0.82794], + [0.30954, 0.61966, 0.81963], + [0.30078, 0.6145, 0.81111], + [0.29231, 0.60911, 0.80238], + [0.2841, 0.60348, 0.79347], + [0.27621, 0.59763, 0.78439], + [0.26859, 0.59158, 0.77514], + [0.26131, 0.58534, 0.76578], + [0.25437, 0.57891, 0.7563], + [0.24775, 0.57233, 0.74672], + [0.24146, 0.5656, 0.73707], + [0.23552, 0.55875, 0.72735], + [0.22984, 0.5518, 0.7176], + [0.2245, 0.54475, 0.7078], + [0.21948, 0.53763, 0.698], + [0.21469, 0.53043, 0.68819], + [0.21017, 0.52319, 0.67838], + [0.20589, 0.5159, 0.66858], + [0.20177, 0.5086, 0.65879], + [0.19788, 0.50126, 0.64903], + [0.19417, 0.4939, 0.63929], + [0.19056, 0.48654, 0.62957], + [0.18711, 0.47918, 0.6199], + [0.18375, 0.47183, 0.61024], + [0.1805, 0.46447, 0.60062], + [0.17737, 0.45712, 0.59104], + [0.17426, 0.44979, 0.58148], + [0.17122, 0.44247, 0.57197], + [0.16824, 0.43517, 0.56249], + [0.16529, 0.42788, 0.55302], + [0.16244, 0.42061, 0.5436], + [0.15954, 0.41337, 0.53421], + [0.15674, 0.40615, 0.52486], + [0.15391, 0.39893, 0.51552], + [0.15112, 0.39176, 0.50623], + [0.14835, 0.38459, 0.49697], + [0.14564, 0.37746, 0.48775], + [0.14288, 0.37034, 0.47854], + [0.14014, 0.36326, 0.46939], + [0.13747, 0.3562, 0.46024], + [0.13478, 0.34916, 0.45115], + [0.13208, 0.34215, 0.44209], + [0.1294, 0.33517, 0.43304], + [0.12674, 0.3282, 0.42404], + [0.12409, 0.32126, 0.41507], + [0.12146, 0.31435, 0.40614], + [0.1189, 0.30746, 0.39723], + [0.11632, 0.30061, 0.38838], + [0.11373, 0.29378, 0.37955], + [0.11119, 0.28698, 0.37075], + [0.10861, 0.28022, 0.362], + [0.10616, 0.2735, 0.35328], + [0.10367, 0.26678, 0.34459], + [0.10118, 0.26011, 0.33595], + [0.098776, 0.25347, 0.32734], + [0.096347, 0.24685, 0.31878], + [0.094059, 0.24026, 0.31027], + [0.091788, 0.23373, 0.30176], + [0.089506, 0.22725, 0.29332], + [0.087341, 0.2208, 0.28491], + [0.085142, 0.21436, 0.27658], + [0.083069, 0.20798, 0.26825], + [0.081098, 0.20163, 0.25999], + [0.07913, 0.19536, 0.25178], + [0.077286, 0.18914, 0.24359], + [0.075571, 0.18294, 0.2355], + [0.073993, 0.17683, 0.22743], + [0.07241, 0.17079, 0.21943], + [0.071045, 0.1648, 0.2115], + [0.069767, 0.1589, 0.20363], + [0.068618, 0.15304, 0.19582], + [0.06756, 0.14732, 0.18812], + [0.066665, 0.14167, 0.18045], + [0.065923, 0.13608, 0.17292], + [0.065339, 0.1307, 0.16546], + [0.064911, 0.12535, 0.15817], + [0.064636, 0.12013, 0.15095], + [0.064517, 0.11507, 0.14389], + [0.064554, 0.11022, 0.13696], + [0.064749, 0.10543, 0.13023], + [0.0651, 0.10085, 0.12357], + [0.065383, 0.096469, 0.11717], + [0.065574, 0.092338, 0.11101], + [0.065892, 0.088201, 0.10498], + [0.066388, 0.084134, 0.099288], + [0.067108, 0.080051, 0.093829], + [0.068193, 0.076099, 0.08847], + [0.06972, 0.072283, 0.083025], + [0.071639, 0.068654, 0.077544], + [0.073978, 0.065058, 0.07211], + [0.076596, 0.061657, 0.066651], + [0.079637, 0.05855, 0.061133], + [0.082963, 0.055666, 0.055745], + [0.086537, 0.052997, 0.050336], + [0.090315, 0.050699, 0.04504], + [0.09426, 0.048753, 0.039773], + [0.098319, 0.047041, 0.034683], + [0.10246, 0.045624, 0.030074], + [0.10673, 0.044705, 0.026012], + [0.11099, 0.043972, 0.022379], + [0.11524, 0.043596, 0.01915], + [0.11955, 0.043567, 0.016299], + [0.12381, 0.043861, 0.013797], + [0.1281, 0.044459, 0.011588], + [0.13232, 0.045229, 0.0095315], + [0.13645, 0.046164, 0.0078947], + [0.14063, 0.047374, 0.006502], + [0.14488, 0.048634, 0.0053266], + [0.14923, 0.049836, 0.0043455], + [0.15369, 0.050997, 0.0035374], + [0.15831, 0.05213, 0.0028824], + [0.16301, 0.053218, 0.0023628], + [0.16781, 0.05424, 0.0019629], + [0.17274, 0.055172, 0.001669], + [0.1778, 0.056018, 0.0014692], + [0.18286, 0.05682, 0.0013401], + [0.18806, 0.057574, 0.0012617], + [0.19323, 0.058514, 0.0012261], + [0.19846, 0.05955, 0.0012271], + [0.20378, 0.060501, 0.0012601], + [0.20909, 0.061486, 0.0013221], + [0.21447, 0.06271, 0.0014116], + [0.2199, 0.063823, 0.0015287], + [0.22535, 0.065027, 0.0016748], + [0.23086, 0.066297, 0.0018529], + [0.23642, 0.067645, 0.0020675], + [0.24202, 0.069092, 0.0023247], + [0.24768, 0.070458, 0.0026319], + [0.25339, 0.071986, 0.0029984], + [0.25918, 0.07364, 0.003435], + [0.265, 0.075237, 0.0039545], + [0.27093, 0.076965, 0.004571], + [0.27693, 0.078822, 0.0053006], + [0.28302, 0.080819, 0.0061608], + [0.2892, 0.082879, 0.0071713], + [0.29547, 0.085075, 0.0083494], + [0.30186, 0.08746, 0.0097258], + [0.30839, 0.089912, 0.011455], + [0.31502, 0.09253, 0.013324], + [0.32181, 0.095392, 0.015413], + [0.32874, 0.098396, 0.01778], + [0.3358, 0.10158, 0.020449], + [0.34304, 0.10498, 0.02344], + [0.35041, 0.10864, 0.026771], + [0.35795, 0.11256, 0.030456], + [0.36563, 0.11666, 0.034571], + [0.37347, 0.12097, 0.039115], + [0.38146, 0.12561, 0.043693], + [0.38958, 0.13046, 0.048471], + [0.39785, 0.13547, 0.053136], + [0.40622, 0.1408, 0.057848], + [0.41469, 0.14627, 0.062715], + [0.42323, 0.15198, 0.067685], + [0.43184, 0.15791, 0.073044], + [0.44044, 0.16403, 0.07862], + [0.44909, 0.17027, 0.084644], + [0.4577, 0.17667, 0.090869], + [0.46631, 0.18321, 0.097335], + [0.4749, 0.18989, 0.10406], + [0.48342, 0.19668, 0.11104], + [0.49191, 0.20352, 0.11819], + [0.50032, 0.21043, 0.1255], + [0.50869, 0.21742, 0.13298], + [0.51698, 0.22443, 0.14062], + [0.5252, 0.23154, 0.14835], + [0.53335, 0.23862, 0.15626], + [0.54144, 0.24575, 0.16423], + [0.54948, 0.25292, 0.17226], + [0.55746, 0.26009, 0.1804], + [0.56538, 0.26726, 0.18864], + [0.57327, 0.27446, 0.19692], + [0.58111, 0.28167, 0.20524], + [0.58892, 0.28889, 0.21362], + [0.59672, 0.29611, 0.22205], + [0.60448, 0.30335, 0.23053], + [0.61223, 0.31062, 0.23905], + [0.61998, 0.31787, 0.24762], + [0.62771, 0.32513, 0.25619], + [0.63544, 0.33244, 0.26481], + [0.64317, 0.33975, 0.27349], + [0.65092, 0.34706, 0.28218], + [0.65866, 0.3544, 0.29089], + [0.66642, 0.36175, 0.29964], + [0.67419, 0.36912, 0.30842], + [0.68198, 0.37652, 0.31722], + [0.68978, 0.38392, 0.32604], + [0.6976, 0.39135, 0.33493], + [0.70543, 0.39879, 0.3438], + [0.71329, 0.40627, 0.35272], + [0.72116, 0.41376, 0.36166], + [0.72905, 0.42126, 0.37062], + [0.73697, 0.4288, 0.37962], + [0.7449, 0.43635, 0.38864], + [0.75285, 0.44392, 0.39768], + [0.76083, 0.45151, 0.40675], + [0.76882, 0.45912, 0.41584], + [0.77684, 0.46676, 0.42496], + [0.78488, 0.47441, 0.43409], + [0.79293, 0.48208, 0.44327], + [0.80101, 0.48976, 0.45246], + [0.80911, 0.49749, 0.46167], + [0.81722, 0.50521, 0.47091], + [0.82536, 0.51296, 0.48017], + [0.83352, 0.52073, 0.48945], + [0.84169, 0.52853, 0.49876], + [0.84988, 0.53634, 0.5081], + [0.85809, 0.54416, 0.51745], + [0.86632, 0.55201, 0.52683], + [0.87457, 0.55988, 0.53622], + [0.88283, 0.56776, 0.54564], + [0.89111, 0.57567, 0.55508], + [0.89941, 0.58358, 0.56455], + [0.90772, 0.59153, 0.57404], + [0.91603, 0.59949, 0.58355], + [0.92437, 0.60747, 0.59309], + [0.93271, 0.61546, 0.60265], + [0.94108, 0.62348, 0.61223], + [0.94945, 0.63151, 0.62183], + [0.95783, 0.63956, 0.63147], + [0.96622, 0.64763, 0.64111], + [0.97462, 0.65572, 0.65079], + [0.98303, 0.66382, 0.66049], + [0.99145, 0.67194, 0.67022], + [0.99987, 0.68007, 0.67995]] + +berlin_map = LinearSegmentedColormap.from_list('berlin', cm_data) +# For use of "viscm view" +test_cm = berlin_map + +if __name__ == "__main__": + import matplotlib.pyplot as plt + import numpy as np + + try: + from viscm import viscm + viscm(berlin_map) + except ImportError: + print("viscm not found, falling back on simple display") + plt.imshow(np.linspace(0, 100, 256)[None, :], aspect='auto', + cmap=berlin_map) + plt.show() diff --git a/newt/colormaps/berlin/berlin.spk b/newt/colormaps/berlin/berlin.spk new file mode 100644 index 0000000..4d55b2d --- /dev/null +++ b/newt/colormaps/berlin/berlin.spk @@ -0,0 +1,256 @@ + 0.00 62.11 69.02 99.95 + 0.39 61.22 68.92 99.54 + 0.78 60.32 68.82 99.12 + 1.18 59.42 68.73 98.71 + 1.57 58.52 68.62 98.29 + 1.96 57.61 68.52 97.87 + 2.35 56.70 68.42 97.45 + 2.75 55.78 68.31 97.03 + 3.14 54.86 68.20 96.60 + 3.53 53.93 68.09 96.17 + 3.92 53.00 67.97 95.74 + 4.31 52.07 67.85 95.29 + 4.71 51.13 67.72 94.85 + 5.10 50.19 67.59 94.39 + 5.49 49.24 67.45 93.93 + 5.88 48.28 67.31 93.46 + 6.27 47.32 67.15 92.98 + 6.67 46.36 66.99 92.48 + 7.06 45.39 66.82 91.97 + 7.45 44.42 66.63 91.45 + 7.84 43.44 66.43 90.91 + 8.24 42.46 66.21 90.36 + 8.63 41.48 65.98 89.78 + 9.02 40.50 65.73 89.19 + 9.41 39.51 65.46 88.58 + 9.80 38.53 65.17 87.94 + 10.20 37.55 64.85 87.28 + 10.59 36.57 64.52 86.59 + 10.98 35.61 64.16 85.88 + 11.37 34.65 63.77 85.15 + 11.76 33.70 63.36 84.39 + 12.16 32.76 62.92 83.60 + 12.55 31.85 62.46 82.79 + 12.94 30.95 61.97 81.96 + 13.33 30.08 61.45 81.11 + 13.73 29.23 60.91 80.24 + 14.12 28.41 60.35 79.35 + 14.51 27.62 59.76 78.44 + 14.90 26.86 59.16 77.51 + 15.29 26.13 58.53 76.58 + 15.69 25.44 57.89 75.63 + 16.08 24.78 57.23 74.67 + 16.47 24.15 56.56 73.71 + 16.86 23.55 55.87 72.74 + 17.25 22.98 55.18 71.76 + 17.65 22.45 54.47 70.78 + 18.04 21.95 53.76 69.80 + 18.43 21.47 53.04 68.82 + 18.82 21.02 52.32 67.84 + 19.22 20.59 51.59 66.86 + 19.61 20.18 50.86 65.88 + 20.00 19.79 50.13 64.90 + 20.39 19.42 49.39 63.93 + 20.78 19.06 48.65 62.96 + 21.18 18.71 47.92 61.99 + 21.57 18.38 47.18 61.02 + 21.96 18.05 46.45 60.06 + 22.35 17.74 45.71 59.10 + 22.75 17.43 44.98 58.15 + 23.14 17.12 44.25 57.20 + 23.53 16.82 43.52 56.25 + 23.92 16.53 42.79 55.30 + 24.31 16.24 42.06 54.36 + 24.71 15.95 41.34 53.42 + 25.10 15.67 40.61 52.49 + 25.49 15.39 39.89 51.55 + 25.88 15.11 39.18 50.62 + 26.27 14.83 38.46 49.70 + 26.67 14.56 37.75 48.78 + 27.06 14.29 37.03 47.85 + 27.45 14.01 36.33 46.94 + 27.84 13.75 35.62 46.02 + 28.24 13.48 34.92 45.11 + 28.63 13.21 34.22 44.21 + 29.02 12.94 33.52 43.30 + 29.41 12.67 32.82 42.40 + 29.80 12.41 32.13 41.51 + 30.20 12.15 31.43 40.61 + 30.59 11.89 30.75 39.72 + 30.98 11.63 30.06 38.84 + 31.37 11.37 29.38 37.95 + 31.76 11.12 28.70 37.07 + 32.16 10.86 28.02 36.20 + 32.55 10.62 27.35 35.33 + 32.94 10.37 26.68 34.46 + 33.33 10.12 26.01 33.60 + 33.73 9.88 25.35 32.73 + 34.12 9.63 24.68 31.88 + 34.51 9.41 24.03 31.03 + 34.90 9.18 23.37 30.18 + 35.29 8.95 22.72 29.33 + 35.69 8.73 22.08 28.49 + 36.08 8.51 21.44 27.66 + 36.47 8.31 20.80 26.82 + 36.86 8.11 20.16 26.00 + 37.25 7.91 19.54 25.18 + 37.65 7.73 18.91 24.36 + 38.04 7.56 18.29 23.55 + 38.43 7.40 17.68 22.74 + 38.82 7.24 17.08 21.94 + 39.22 7.10 16.48 21.15 + 39.61 6.98 15.89 20.36 + 40.00 6.86 15.30 19.58 + 40.39 6.76 14.73 18.81 + 40.78 6.67 14.17 18.05 + 41.18 6.59 13.61 17.29 + 41.57 6.53 13.07 16.55 + 41.96 6.49 12.53 15.82 + 42.35 6.46 12.01 15.09 + 42.75 6.45 11.51 14.39 + 43.14 6.46 11.02 13.70 + 43.53 6.47 10.54 13.02 + 43.92 6.51 10.08 12.36 + 44.31 6.54 9.65 11.72 + 44.71 6.56 9.23 11.10 + 45.10 6.59 8.82 10.50 + 45.49 6.64 8.41 9.93 + 45.88 6.71 8.01 9.38 + 46.27 6.82 7.61 8.85 + 46.67 6.97 7.23 8.30 + 47.06 7.16 6.87 7.75 + 47.45 7.40 6.51 7.21 + 47.84 7.66 6.17 6.67 + 48.24 7.96 5.85 6.11 + 48.63 8.30 5.57 5.57 + 49.02 8.65 5.30 5.03 + 49.41 9.03 5.07 4.50 + 49.80 9.43 4.88 3.98 + 50.20 9.83 4.70 3.47 + 50.59 10.25 4.56 3.01 + 50.98 10.67 4.47 2.60 + 51.37 11.10 4.40 2.24 + 51.76 11.52 4.36 1.92 + 52.16 11.95 4.36 1.63 + 52.55 12.38 4.39 1.38 + 52.94 12.81 4.45 1.16 + 53.33 13.23 4.52 0.95 + 53.73 13.65 4.62 0.79 + 54.12 14.06 4.74 0.65 + 54.51 14.49 4.86 0.53 + 54.90 14.92 4.98 0.43 + 55.29 15.37 5.10 0.35 + 55.69 15.83 5.21 0.29 + 56.08 16.30 5.32 0.24 + 56.47 16.78 5.42 0.20 + 56.86 17.27 5.52 0.17 + 57.25 17.78 5.60 0.15 + 57.65 18.29 5.68 0.13 + 58.04 18.81 5.76 0.13 + 58.43 19.32 5.85 0.12 + 58.82 19.85 5.95 0.12 + 59.22 20.38 6.05 0.13 + 59.61 20.91 6.15 0.13 + 60.00 21.45 6.27 0.14 + 60.39 21.99 6.38 0.15 + 60.78 22.53 6.50 0.17 + 61.18 23.09 6.63 0.19 + 61.57 23.64 6.76 0.21 + 61.96 24.20 6.91 0.23 + 62.35 24.77 7.05 0.26 + 62.75 25.34 7.20 0.30 + 63.14 25.92 7.36 0.34 + 63.53 26.50 7.52 0.40 + 63.92 27.09 7.70 0.46 + 64.31 27.69 7.88 0.53 + 64.71 28.30 8.08 0.62 + 65.10 28.92 8.29 0.72 + 65.49 29.55 8.51 0.83 + 65.88 30.19 8.75 0.97 + 66.27 30.84 8.99 1.15 + 66.67 31.50 9.25 1.33 + 67.06 32.18 9.54 1.54 + 67.45 32.87 9.84 1.78 + 67.84 33.58 10.16 2.04 + 68.24 34.30 10.50 2.34 + 68.63 35.04 10.86 2.68 + 69.02 35.79 11.26 3.05 + 69.41 36.56 11.67 3.46 + 69.80 37.35 12.10 3.91 + 70.20 38.15 12.56 4.37 + 70.59 38.96 13.05 4.85 + 70.98 39.78 13.55 5.31 + 71.37 40.62 14.08 5.78 + 71.76 41.47 14.63 6.27 + 72.16 42.32 15.20 6.77 + 72.55 43.18 15.79 7.30 + 72.94 44.04 16.40 7.86 + 73.33 44.91 17.03 8.46 + 73.73 45.77 17.67 9.09 + 74.12 46.63 18.32 9.73 + 74.51 47.49 18.99 10.41 + 74.90 48.34 19.67 11.10 + 75.29 49.19 20.35 11.82 + 75.69 50.03 21.04 12.55 + 76.08 50.87 21.74 13.30 + 76.47 51.70 22.44 14.06 + 76.86 52.52 23.15 14.83 + 77.25 53.33 23.86 15.63 + 77.65 54.14 24.58 16.42 + 78.04 54.95 25.29 17.23 + 78.43 55.75 26.01 18.04 + 78.82 56.54 26.73 18.86 + 79.22 57.33 27.45 19.69 + 79.61 58.11 28.17 20.52 + 80.00 58.89 28.89 21.36 + 80.39 59.67 29.61 22.21 + 80.78 60.45 30.33 23.05 + 81.18 61.22 31.06 23.91 + 81.57 62.00 31.79 24.76 + 81.96 62.77 32.51 25.62 + 82.35 63.54 33.24 26.48 + 82.75 64.32 33.97 27.35 + 83.14 65.09 34.71 28.22 + 83.53 65.87 35.44 29.09 + 83.92 66.64 36.18 29.96 + 84.31 67.42 36.91 30.84 + 84.71 68.20 37.65 31.72 + 85.10 68.98 38.39 32.60 + 85.49 69.76 39.14 33.49 + 85.88 70.54 39.88 34.38 + 86.27 71.33 40.63 35.27 + 86.67 72.12 41.38 36.17 + 87.06 72.91 42.13 37.06 + 87.45 73.70 42.88 37.96 + 87.84 74.49 43.63 38.86 + 88.24 75.29 44.39 39.77 + 88.63 76.08 45.15 40.67 + 89.02 76.88 45.91 41.58 + 89.41 77.68 46.68 42.50 + 89.80 78.49 47.44 43.41 + 90.20 79.29 48.21 44.33 + 90.59 80.10 48.98 45.25 + 90.98 80.91 49.75 46.17 + 91.37 81.72 50.52 47.09 + 91.76 82.54 51.30 48.02 + 92.16 83.35 52.07 48.94 + 92.55 84.17 52.85 49.88 + 92.94 84.99 53.63 50.81 + 93.33 85.81 54.42 51.74 + 93.73 86.63 55.20 52.68 + 94.12 87.46 55.99 53.62 + 94.51 88.28 56.78 54.56 + 94.90 89.11 57.57 55.51 + 95.29 89.94 58.36 56.46 + 95.69 90.77 59.15 57.40 + 96.08 91.60 59.95 58.36 + 96.47 92.44 60.75 59.31 + 96.86 93.27 61.55 60.26 + 97.25 94.11 62.35 61.22 + 97.65 94.94 63.15 62.18 + 98.04 95.78 63.96 63.15 + 98.43 96.62 64.76 64.11 + 98.82 97.46 65.57 65.08 + 99.22 98.30 66.38 66.05 + 99.61 99.14 67.19 67.02 +100.00 99.99 68.01 68.00 diff --git a/newt/colormaps/berlin/berlin.svg b/newt/colormaps/berlin/berlin.svg new file mode 100644 index 0000000..b5a3284 --- /dev/null +++ b/newt/colormaps/berlin/berlin.svg @@ -0,0 +1,528 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/newt/colormaps/berlin/berlin.txt b/newt/colormaps/berlin/berlin.txt new file mode 100644 index 0000000..201e505 --- /dev/null +++ b/newt/colormaps/berlin/berlin.txt @@ -0,0 +1,256 @@ +0.621082 0.690182 0.999507 +0.612157 0.689228 0.995374 +0.603202 0.688250 0.991239 +0.594200 0.687257 0.987092 +0.585165 0.686248 0.982922 +0.576088 0.685222 0.978733 +0.566961 0.684166 0.974524 +0.557791 0.683098 0.970288 +0.548590 0.681992 0.966016 +0.539327 0.680859 0.961704 +0.530034 0.679691 0.957350 +0.520687 0.678484 0.952942 +0.511295 0.677230 0.948466 +0.501863 0.675908 0.943923 +0.492368 0.674526 0.939297 +0.482832 0.673075 0.934574 +0.473239 0.671530 0.929751 +0.463610 0.669898 0.924806 +0.453931 0.668152 0.919735 +0.444213 0.666275 0.914518 +0.434440 0.664271 0.909136 +0.424645 0.662120 0.903586 +0.414818 0.659791 0.897845 +0.404975 0.657289 0.891905 +0.395137 0.654579 0.885750 +0.385296 0.651674 0.879368 +0.375493 0.648536 0.872757 +0.365742 0.645164 0.865903 +0.356059 0.641552 0.858801 +0.346453 0.637692 0.851451 +0.336982 0.633574 0.843855 +0.327642 0.629189 0.836017 +0.318487 0.624551 0.827937 +0.309539 0.619657 0.819628 +0.300784 0.614497 0.811108 +0.292309 0.609115 0.802379 +0.284098 0.603485 0.793470 +0.276205 0.597634 0.784386 +0.268595 0.591580 0.775143 +0.261308 0.585335 0.765780 +0.254368 0.578908 0.756296 +0.247753 0.572328 0.746719 +0.241464 0.565596 0.737066 +0.235515 0.558748 0.727351 +0.229842 0.551802 0.717600 +0.224503 0.544750 0.707805 +0.219485 0.537628 0.697998 +0.214694 0.530433 0.688190 +0.210172 0.523193 0.678377 +0.205889 0.515897 0.668578 +0.201771 0.508598 0.658787 +0.197878 0.501258 0.649030 +0.194172 0.493903 0.639287 +0.190556 0.486541 0.629572 +0.187112 0.479181 0.619898 +0.183752 0.471826 0.610241 +0.180500 0.464474 0.600622 +0.177365 0.457117 0.591037 +0.174264 0.449788 0.581483 +0.171224 0.442474 0.571966 +0.168242 0.435172 0.562486 +0.165292 0.427884 0.553021 +0.162439 0.420608 0.543603 +0.159545 0.413370 0.534210 +0.156739 0.406147 0.524856 +0.153905 0.398932 0.515524 +0.151122 0.391757 0.506230 +0.148346 0.384591 0.496972 +0.145641 0.377462 0.487751 +0.142879 0.370343 0.478544 +0.140138 0.363257 0.469389 +0.137466 0.356204 0.460239 +0.134777 0.349162 0.451147 +0.132079 0.342150 0.442085 +0.129401 0.335173 0.433042 +0.126735 0.328195 0.424036 +0.124090 0.321259 0.415071 +0.121456 0.314347 0.406144 +0.118899 0.307460 0.397234 +0.116316 0.300608 0.388376 +0.113731 0.293781 0.379546 +0.111187 0.286980 0.370748 +0.108613 0.280217 0.362004 +0.106159 0.273497 0.353280 +0.103670 0.266776 0.344594 +0.101183 0.260108 0.335952 +0.098776 0.253467 0.327342 +0.096347 0.246850 0.318783 +0.094059 0.240264 0.310267 +0.091788 0.233727 0.301758 +0.089506 0.227245 0.293318 +0.087341 0.220800 0.284914 +0.085142 0.214360 0.276576 +0.083069 0.207981 0.268249 +0.081098 0.201631 0.259992 +0.079130 0.195361 0.251781 +0.077286 0.189136 0.243589 +0.075571 0.182943 0.235502 +0.073993 0.176835 0.227434 +0.072410 0.170785 0.219433 +0.071045 0.164795 0.211500 +0.069767 0.158901 0.203628 +0.068618 0.153040 0.195818 +0.067560 0.147319 0.188124 +0.066665 0.141671 0.180452 +0.065923 0.136076 0.172917 +0.065339 0.130695 0.165458 +0.064911 0.125349 0.158169 +0.064636 0.120132 0.150946 +0.064517 0.115070 0.143889 +0.064554 0.110222 0.136957 +0.064749 0.105427 0.130230 +0.065100 0.100849 0.123569 +0.065383 0.096469 0.117170 +0.065574 0.092338 0.111008 +0.065892 0.088201 0.104982 +0.066388 0.084134 0.099288 +0.067108 0.080051 0.093829 +0.068193 0.076099 0.088470 +0.069720 0.072283 0.083025 +0.071639 0.068654 0.077544 +0.073978 0.065058 0.072110 +0.076596 0.061657 0.066651 +0.079637 0.058550 0.061133 +0.082963 0.055666 0.055745 +0.086537 0.052997 0.050336 +0.090315 0.050699 0.045040 +0.094260 0.048753 0.039773 +0.098319 0.047041 0.034683 +0.102458 0.045624 0.030074 +0.106732 0.044705 0.026012 +0.110986 0.043972 0.022379 +0.115245 0.043596 0.019150 +0.119547 0.043567 0.016299 +0.123812 0.043861 0.013797 +0.128105 0.044459 0.011588 +0.132315 0.045229 0.009531 +0.136451 0.046164 0.007895 +0.140635 0.047374 0.006502 +0.144884 0.048634 0.005327 +0.149230 0.049836 0.004346 +0.153685 0.050997 0.003537 +0.158309 0.052130 0.002882 +0.163014 0.053218 0.002363 +0.167811 0.054240 0.001963 +0.172736 0.055172 0.001669 +0.177801 0.056018 0.001469 +0.182863 0.056820 0.001340 +0.188058 0.057574 0.001262 +0.193233 0.058514 0.001226 +0.198463 0.059550 0.001227 +0.203778 0.060501 0.001260 +0.209092 0.061486 0.001322 +0.214470 0.062710 0.001412 +0.219897 0.063823 0.001529 +0.225345 0.065027 0.001675 +0.230856 0.066297 0.001853 +0.236422 0.067645 0.002068 +0.242016 0.069092 0.002325 +0.247681 0.070458 0.002632 +0.253390 0.071986 0.002998 +0.259176 0.073640 0.003435 +0.264997 0.075237 0.003955 +0.270934 0.076965 0.004571 +0.276928 0.078822 0.005301 +0.283017 0.080819 0.006161 +0.289196 0.082879 0.007171 +0.295466 0.085075 0.008349 +0.301858 0.087460 0.009726 +0.308387 0.089912 0.011455 +0.315024 0.092530 0.013324 +0.321806 0.095392 0.015413 +0.328738 0.098396 0.017780 +0.335805 0.101580 0.020449 +0.343036 0.104977 0.023440 +0.350413 0.108640 0.026771 +0.357947 0.112564 0.030456 +0.365629 0.116658 0.034571 +0.373470 0.120971 0.039115 +0.381463 0.125606 0.043693 +0.389583 0.130457 0.048471 +0.397845 0.135474 0.053136 +0.406220 0.140795 0.057848 +0.414690 0.146274 0.062715 +0.423229 0.151979 0.067685 +0.431837 0.157906 0.073044 +0.440444 0.164028 0.078620 +0.449085 0.170269 0.084644 +0.457704 0.176666 0.090869 +0.466314 0.183213 0.097335 +0.474900 0.189888 0.104064 +0.483420 0.196677 0.111039 +0.491910 0.203516 0.118190 +0.500322 0.210433 0.125501 +0.508690 0.217425 0.132983 +0.516977 0.224432 0.140623 +0.525197 0.231543 0.148349 +0.533349 0.238624 0.156261 +0.541440 0.245755 0.164233 +0.549481 0.252923 0.172265 +0.557462 0.260091 0.180403 +0.565378 0.267255 0.188640 +0.573272 0.274461 0.196924 +0.581112 0.281673 0.205237 +0.588920 0.288894 0.213625 +0.596716 0.296114 0.222054 +0.604484 0.303345 0.230529 +0.612228 0.310617 0.239052 +0.619976 0.317867 0.247618 +0.627708 0.325132 0.256189 +0.635438 0.332443 0.264815 +0.643173 0.339745 0.273490 +0.650917 0.347064 0.282179 +0.658661 0.354395 0.290887 +0.666419 0.361751 0.299640 +0.674194 0.369121 0.308415 +0.681975 0.376518 0.317219 +0.689783 0.383920 0.326043 +0.697596 0.391354 0.334929 +0.705434 0.398794 0.343796 +0.713288 0.406271 0.352720 +0.721158 0.413757 0.361662 +0.729054 0.421259 0.370618 +0.736968 0.428796 0.379616 +0.744900 0.436349 0.388639 +0.752851 0.443923 0.397680 +0.760831 0.451512 0.406747 +0.768821 0.459124 0.415838 +0.776844 0.466756 0.424962 +0.784879 0.474407 0.434092 +0.792935 0.482080 0.443269 +0.801009 0.489763 0.452465 +0.809110 0.497486 0.461672 +0.817222 0.505207 0.470910 +0.825358 0.512962 0.480170 +0.833517 0.520732 0.489445 +0.841692 0.528527 0.498763 +0.849885 0.536335 0.508096 +0.858092 0.544161 0.517448 +0.866324 0.552013 0.526825 +0.874568 0.559879 0.536218 +0.882829 0.567761 0.545643 +0.891110 0.575670 0.555082 +0.899407 0.583585 0.564550 +0.907716 0.591530 0.574038 +0.916031 0.599492 0.583552 +0.924368 0.607473 0.593095 +0.932714 0.615460 0.602649 +0.941076 0.623483 0.612229 +0.949447 0.631512 0.621832 +0.957832 0.639563 0.631467 +0.966219 0.647628 0.641113 +0.974619 0.655718 0.650792 +0.983030 0.663823 0.660487 +0.991448 0.671939 0.670216 +0.999873 0.680072 0.679950 diff --git a/newt/colormaps/berlin/berlin.xcmap b/newt/colormaps/berlin/berlin.xcmap new file mode 100644 index 0000000..2d183e9 --- /dev/null +++ b/newt/colormaps/berlin/berlin.xcmap @@ -0,0 +1,268 @@ + + + + + + scientific-berlin + linear + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/newt/colormaps/berlin/berlin_PARAVIEW.xml b/newt/colormaps/berlin/berlin_PARAVIEW.xml new file mode 100644 index 0000000..bb5420d --- /dev/null +++ b/newt/colormaps/berlin/berlin_PARAVIEW.xml @@ -0,0 +1,260 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/newt/colormaps/berlin/berlin_QGIS.xml b/newt/colormaps/berlin/berlin_QGIS.xml new file mode 100644 index 0000000..d526c99 --- /dev/null +++ b/newt/colormaps/berlin/berlin_QGIS.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/newt/colormaps/vik/DiscretePalettes/vik10.gpl b/newt/colormaps/vik/DiscretePalettes/vik10.gpl new file mode 100644 index 0000000..a5a38ae --- /dev/null +++ b/newt/colormaps/vik/DiscretePalettes/vik10.gpl @@ -0,0 +1,14 @@ +GIMP Palette +Name: vik 10 Swatches +Columns: 1 +# + 0 18 97 vik-1 #001261 + 3 62 125 vik-29 #033E7D + 30 111 157 vik-58 #1E6F9D +113 168 196 vik-86 #71A8C4 +201 221 231 vik-114 #C9DDE7 +234 206 189 vik-143 #EACEBD +211 151 116 vik-171 #D39774 +190 101 51 vik-199 #BE6533 +139 39 6 vik-228 #8B2706 + 89 0 8 vik-256 #590008 diff --git a/newt/colormaps/vik/DiscretePalettes/vik10.mat b/newt/colormaps/vik/DiscretePalettes/vik10.mat new file mode 100644 index 0000000000000000000000000000000000000000..e9897482ac37bc3078b38bc10b88d7548a532438 GIT binary patch literal 435 zcmeZu4DoSvQZUssQ1EpO(M`+DN!3vZ$Vn_o%P-2c0*X0%nwjV*I2WZRmZYXAl$oNDC+Ue`B94 zU%cb|zVr8uGy2ZT{+=h#=6+Wu?pFAwr@p%0HQE2S>fL-1by~Z6;Ut5}JJsji_MTm7 zY`?A|W4~wZrJWP*WPf9ijHu|dxsmbz+lTuOU)`U-(Z70kzwnO-+iLI1-FC3Oxcpn| ztlehI-&E(W-!#Ac>$zb4w?`j+&+Gm1^F&>L_SW-vFU|fYSj2RDGN1gm|7t6CJGR9h zs9XAL?%QK==5ORHitK#8@6P}4(39-`Y1;8`c~k2?{i)S|b3G>ZW5MI==HHI@&AGL& y>U(kZM9IlZx2}D6Yx_Cfn2LvQJb!;*GyC>;Ro~6?gTMH>AFtYH%P{RgWdH!UkioqG literal 0 HcmV?d00001 diff --git a/newt/colormaps/vik/DiscretePalettes/vik10.spk b/newt/colormaps/vik/DiscretePalettes/vik10.spk new file mode 100644 index 0000000..df98119 --- /dev/null +++ b/newt/colormaps/vik/DiscretePalettes/vik10.spk @@ -0,0 +1,10 @@ + 0.00 0.13 6.98 37.95 + 10.98 1.00 24.13 48.92 + 22.35 11.77 43.56 61.58 + 33.33 44.12 65.88 76.73 + 44.31 78.91 86.70 90.50 + 55.69 91.81 80.60 74.17 + 66.67 82.87 59.29 45.67 + 77.65 74.32 39.78 20.16 + 89.02 54.60 15.44 2.19 +100.00 35.04 0.01 3.05 diff --git a/newt/colormaps/vik/DiscretePalettes/vik10.txt b/newt/colormaps/vik/DiscretePalettes/vik10.txt new file mode 100644 index 0000000..ca6be9e --- /dev/null +++ b/newt/colormaps/vik/DiscretePalettes/vik10.txt @@ -0,0 +1,12 @@ +Scientific Colour Map Categorical Palette +vik 10 Swatches + 0 18 97 vik-1 #001261 + 3 62 125 vik-29 #033E7D + 30 111 157 vik-58 #1E6F9D +113 168 196 vik-86 #71A8C4 +201 221 231 vik-114 #C9DDE7 +234 206 189 vik-143 #EACEBD +211 151 116 vik-171 #D39774 +190 101 51 vik-199 #BE6533 +139 39 6 vik-228 #8B2706 + 89 0 8 vik-256 #590008 diff --git a/newt/colormaps/vik/DiscretePalettes/vik100.gpl b/newt/colormaps/vik/DiscretePalettes/vik100.gpl new file mode 100644 index 0000000..0553d9d --- /dev/null +++ b/newt/colormaps/vik/DiscretePalettes/vik100.gpl @@ -0,0 +1,104 @@ +GIMP Palette +Name: vik 100 Swatches +Columns: 1 +# + 0 18 97 vik-1 #001261 + 1 23 100 vik-4 #011764 + 1 26 102 vik-6 #011A66 + 2 31 105 vik-9 #021F69 + 2 34 107 vik-11 #02226B + 2 39 110 vik-14 #02276E + 2 42 112 vik-16 #022A70 + 2 46 115 vik-19 #022E73 + 2 51 118 vik-22 #023376 + 2 54 120 vik-24 #023678 + 2 58 123 vik-27 #023A7B + 3 62 125 vik-29 #033E7D + 3 66 128 vik-32 #034280 + 3 69 130 vik-34 #034582 + 3 74 133 vik-37 #034A85 + 4 79 136 vik-40 #044F88 + 5 82 138 vik-42 #05528A + 7 87 141 vik-45 #07578D + 9 91 144 vik-47 #095B90 + 14 96 147 vik-50 #0E6093 + 19 102 151 vik-53 #136697 + 23 105 153 vik-55 #176999 + 30 111 157 vik-58 #1E6F9D + 35 115 160 vik-60 #2373A0 + 43 121 164 vik-63 #2B79A4 + 48 125 166 vik-65 #307DA6 + 57 131 171 vik-68 #3983AB + 66 137 175 vik-71 #4289AF + 72 141 178 vik-73 #488DB2 + 81 148 182 vik-76 #5194B6 + 87 152 185 vik-78 #5798B9 + 97 158 189 vik-81 #619EBD +103 162 192 vik-83 #67A2C0 +113 168 196 vik-86 #71A8C4 +122 174 200 vik-89 #7AAEC8 +128 178 202 vik-91 #80B2CA +138 184 206 vik-94 #8AB8CE +144 188 209 vik-96 #90BCD1 +154 194 213 vik-99 #9AC2D5 +160 197 216 vik-101 #A0C5D8 +170 203 220 vik-104 #AACBDC +179 209 223 vik-107 #B3D1DF +186 213 226 vik-109 #BAD5E2 +195 218 229 vik-112 #C3DAE5 +201 221 231 vik-114 #C9DDE7 +210 225 233 vik-117 #D2E1E9 +216 228 233 vik-119 #D8E4E9 +224 230 233 vik-122 #E0E6E9 +231 231 231 vik-125 #E7E7E7 +234 230 228 vik-127 #EAE6E4 +237 228 222 vik-130 #EDE4DE +238 225 218 vik-132 #EEE1DA +238 221 211 vik-135 #EEDDD3 +237 215 203 vik-138 #EDD7CB +236 211 197 vik-140 #ECD3C5 +234 206 189 vik-143 #EACEBD +233 202 184 vik-145 #E9CAB8 +230 196 176 vik-148 #E6C4B0 +228 191 170 vik-150 #E4BFAA +226 186 162 vik-153 #E2BAA2 +223 180 154 vik-156 #DFB49A +222 176 149 vik-158 #DEB095 +219 170 141 vik-161 #DBAA8D +218 166 136 vik-163 #DAA688 +215 160 129 vik-166 #D7A081 +214 157 124 vik-168 #D69D7C +211 151 116 vik-171 #D39774 +209 146 109 vik-174 #D1926D +207 142 104 vik-176 #CF8E68 +205 137 97 vik-179 #CD8961 +204 133 93 vik-181 #CC855D +201 128 86 vik-184 #C98056 +200 124 81 vik-186 #C87C51 +198 119 74 vik-189 #C6774A +195 114 67 vik-192 #C37243 +194 110 63 vik-194 #C26E3F +191 105 56 vik-197 #BF6938 +190 101 51 vik-199 #BE6533 +187 96 45 vik-202 #BB602D +184 92 40 vik-204 #B85C28 +181 85 33 vik-207 #B55521 +176 79 27 vik-210 #B04F1B +173 74 22 vik-212 #AD4A16 +167 67 16 vik-215 #A74310 +163 62 13 vik-217 #A33E0D +156 55 9 vik-220 #9C3709 +150 49 7 vik-223 #963107 +145 45 6 vik-225 #912D06 +139 39 6 vik-228 #8B2706 +135 36 6 vik-230 #872406 +129 31 6 vik-233 #811F06 +126 29 6 vik-235 #7E1D06 +120 24 6 vik-238 #781806 +115 20 6 vik-241 #731406 +111 17 7 vik-243 #6F1107 +106 13 7 vik-246 #6A0D07 +103 10 7 vik-248 #670A07 + 98 6 7 vik-251 #620607 + 94 3 8 vik-253 #5E0308 + 89 0 8 vik-256 #590008 diff --git a/newt/colormaps/vik/DiscretePalettes/vik100.mat b/newt/colormaps/vik/DiscretePalettes/vik100.mat new file mode 100644 index 0000000000000000000000000000000000000000..8388de45d8b1dbbe1d0b07bf43579c3d88f8c73b GIT binary patch literal 2492 zcmV;t2}AZxK~zjZLLfCRFd$7qR4ry{Y-KDUP;6mzW^ZzBIv`C!LrFF?EFeR2Wnpw> zWFT*DIv`YNbs$e;bRaS`ATc#MFg7|cI3O}GF*zV0ARr(hARr(hARr(hARr(hARr(h zARr(hARr(hARr(hARr(hARr(B00000000000ZB~{0000q2><|ioGny&G!=*&eUUXw z_AzKBS&FGtA~z*cVr-KlS}fHd3{pmfvQ)B%p(2x{gcOArS(D}2;yv4Y&)b)%B&PiO z`JLZ<=iKj}@7(kKb?^7d5eS4FDFQ)!i6sgD=l_-1zq9ueM3=DaUoHIX@4pxu69`Ky zgcD0`cZT$YShFi)t^D*XzXgF4Sy>tWHXN-E=@-6jjKUoynZ|CV7`$2bv}Ww?6|7iv zDadn*#UDY;;85{6gckb*e7O*Z?-`HM&7Q>}$W;5Fs8T#?WtD%>1LNT~Z6X>Pa22!n zZcrahUBeZFZdskEWaxe_G<*`Dilge8LXn>{LC<;TcBwWW**(Xc<;X>-PA1Rp>?(uw z#%LHiK7%nWHz3!q22P7b_J(VTAaty?2#apQs6&LUk8dk{44=;J(eA*fHEq6ufn7M> z=&%1Igai|-t#aw}eaQa6=*zMjL~^Ws?9{{%=0DH0SYZUBBQN5%43m-KVC$4`K8ABD zS#SO+nt+I*uUB=X;ld%)Wno%$lwNc(%XgTj$K z{7@{-36#fnpQ}a7C=gvzteh%kLZ&! zDq7e0X#8uFf14v8l{Ud|HZJGmN1p%sX%Y`#E}!1=KAwk%E%VGYQyv7*8Z;OZJmebc zdW#de!01hJjhkG^{~8~-*@}x}BdVP>N?epU%^Ht(a?qc$JE8#oBjP;y;0q+g&>P4Ghp}uebkE&@RD3sIh z-6fk4zotI~8^Y#A zM_qakN@P=V;Ta#6j3i=UdmGHrKcmR={0A^3wAnsE}rYSQ+XgD)> zY(vj`9AQz*Ro0E*?S}W-Ct}EmUM6)&GKhly%adb{)KRRLc1buoI)<`idVS9B6QCM? z&#?8HMCgchy*dG=vrZ8Z$tILNpg%=BH z{uCz$F4ymN+^oU`?LldO*ANq(dvzRbTv?DIyt`_k#73e=%TZw}8&4eMw;i_QV23|j z(V>Hbx~Np7$8h0qKw#ZB;o<888K;CY9y%ju=NDc02-bV`%$vxE2$Olj{Gb30QRcA& zSpr;o+p6SE7hoCXx2m+S0?4SbeHx|&5Gi2%l(MAH^gm$BSqqSx$MO2Rf{z6fe@Bxj z9}ip83x$b1Xup;%d)dhab5@6_dXS6mch*X`5;;ixcAofEf`bDLwbNL-(-WNv$Mcd| zD3UEzjgVzQtk>pFrZW@u%j9EJ$PB~{+zT~c&wzWCzS!=vDX4liJa=25V{`sbC$HPm z!RYNXC>o+ce>&inl^zYdQs(_!YpA$qGyHN?mI_Kj@tWmkllUl0wD`aq$5UbUzNox0 z*zHfbTV6Yg>#yb{=_e?t-ydl=9!5s)10Bysi4h#rd?-(n7{&}M_}cHmLpV-*v4h+& zfQr!-jK!Nv`EdC3S3^AsEU$5C9fe+m+*NBX$$JOa4I$4*&vby2GtNY48{Af{tU1-( z43C^Ht<#!s;3Y5B+nn+mPD7U})$i5g?v@sgp+_y^5;k<4+*l3rw{~OgQ*e7+(>- z+*@QkxL;y%!-;}Zb!ZT7)fGb>WPXDJgu#i;=uZ%{c8CX4qi@INq8u5@k?6EcH1*tlrdR)!&#we{JoJ!O z`9*;Jmt2C6mz%{i#-$WGS4(=I4*bQtgb#4dUaNs z@ye4>&dJa)*fkET=ReHUd`GdMy5+OuFBA|(@7J05jljI4uie=rVv+>;eUh*1B6^YHSt+tysS8&Nf(uFAZy{Zv>{IB|hG>z- z!xNe<@DoN?k5o0{;8X2EuZORYc|E^A)#C-Cs*>MC_*DRM-&Ym$N)dy4Wy7IDc;|D< zyNmN+B)1rqM!Jn@v1bXyQ`zX9xa#C*kqyOwsz#|7*|0Y<^s;uk4dY*BPF;`6L8>L~ z>`YHC45amvrk>})g(#azlgvj#d|X!ecs>->wCZ*&6oBgTYFa1j4)Q}j7#f=0#eV^| GQm~g2da(uo literal 0 HcmV?d00001 diff --git a/newt/colormaps/vik/DiscretePalettes/vik100.spk b/newt/colormaps/vik/DiscretePalettes/vik100.spk new file mode 100644 index 0000000..e32b127 --- /dev/null +++ b/newt/colormaps/vik/DiscretePalettes/vik100.spk @@ -0,0 +1,100 @@ + 0.00 0.13 6.98 37.95 + 1.18 0.41 8.96 39.15 + 1.96 0.56 10.23 39.94 + 3.14 0.71 12.08 41.13 + 3.92 0.79 13.31 41.92 + 5.10 0.86 15.11 43.09 + 5.88 0.89 16.32 43.87 + 7.06 0.93 18.11 45.03 + 8.24 0.95 19.90 46.20 + 9.02 0.96 21.10 46.97 + 10.20 0.98 22.91 48.14 + 10.98 1.00 24.13 48.92 + 12.16 1.05 25.97 50.10 + 12.94 1.12 27.20 50.89 + 14.12 1.29 29.09 52.10 + 15.29 1.61 31.00 53.33 + 16.08 1.96 32.30 54.17 + 17.25 2.80 34.28 55.45 + 18.04 3.68 35.63 56.33 + 19.22 5.43 37.71 57.69 + 20.39 7.55 39.85 59.10 + 21.18 9.14 41.31 60.08 + 22.35 11.77 43.56 61.58 + 23.14 13.67 45.09 62.61 + 24.31 16.72 47.43 64.19 + 25.10 18.86 49.02 65.26 + 26.27 22.21 51.43 66.89 + 27.45 25.70 53.85 68.54 + 28.24 28.08 55.47 69.64 + 29.41 31.72 57.90 71.29 + 30.20 34.17 59.51 72.39 + 31.37 37.89 61.92 74.03 + 32.16 40.38 63.51 75.12 + 33.33 44.12 65.88 76.73 + 34.51 47.87 68.23 78.34 + 35.29 50.37 69.79 79.40 + 36.47 54.11 72.11 80.98 + 37.25 56.61 73.64 82.02 + 38.43 60.35 75.93 83.58 + 39.22 62.84 77.45 84.61 + 40.39 66.56 79.69 86.12 + 41.57 70.29 81.90 87.59 + 42.35 72.76 83.34 88.51 + 43.53 76.47 85.41 89.78 + 44.31 78.91 86.70 90.50 + 45.49 82.49 88.41 91.28 + 46.27 84.76 89.33 91.52 + 47.45 87.87 90.26 91.34 + 48.63 90.42 90.52 90.40 + 49.41 91.72 90.28 89.34 + 50.59 92.98 89.33 87.18 + 51.37 93.39 88.36 85.45 + 52.55 93.46 86.53 82.56 + 53.73 93.07 84.42 79.47 + 54.51 92.63 82.92 77.36 + 55.69 91.81 80.60 74.17 + 56.47 91.21 79.03 72.04 + 57.65 90.25 76.67 68.86 + 58.43 89.60 75.10 66.75 + 59.61 88.61 72.76 63.63 + 60.78 87.63 70.44 60.54 + 61.57 86.98 68.91 58.50 + 62.75 86.01 66.65 55.47 + 63.53 85.37 65.15 53.48 + 64.71 84.43 62.93 50.51 + 65.49 83.80 61.47 48.56 + 66.67 82.87 59.29 45.67 + 67.84 81.95 57.15 42.81 + 68.63 81.35 55.73 40.93 + 69.80 80.44 53.63 38.13 + 70.59 79.85 52.24 36.28 + 71.76 78.96 50.17 33.54 + 72.55 78.37 48.80 31.73 + 73.73 77.48 46.75 29.04 + 74.90 76.58 44.69 26.36 + 75.69 75.96 43.31 24.59 + 76.86 75.01 41.22 21.93 + 77.65 74.32 39.78 20.16 + 78.82 73.20 37.55 17.50 + 79.61 72.35 35.99 15.71 + 80.78 70.88 33.50 13.05 + 81.96 69.12 30.85 10.41 + 82.75 67.77 28.99 8.73 + 83.92 65.50 26.15 6.41 + 84.71 63.87 24.27 5.06 + 85.88 61.31 21.57 3.48 + 87.06 58.74 19.07 2.60 + 87.84 57.05 17.54 2.33 + 89.02 54.60 15.44 2.19 + 89.80 53.02 14.15 2.17 + 90.98 50.72 12.33 2.22 + 91.76 49.23 11.18 2.28 + 92.94 47.06 9.52 2.39 + 94.12 44.95 7.87 2.52 + 94.90 43.57 6.79 2.61 + 96.08 41.54 5.11 2.74 + 96.86 40.21 3.94 2.82 + 98.04 38.25 2.26 2.93 + 98.82 36.96 1.29 2.99 +100.00 35.04 0.01 3.05 diff --git a/newt/colormaps/vik/DiscretePalettes/vik100.txt b/newt/colormaps/vik/DiscretePalettes/vik100.txt new file mode 100644 index 0000000..8fba65f --- /dev/null +++ b/newt/colormaps/vik/DiscretePalettes/vik100.txt @@ -0,0 +1,102 @@ +Scientific Colour Map Categorical Palette +vik 100 Swatches + 0 18 97 vik-1 #001261 + 1 23 100 vik-4 #011764 + 1 26 102 vik-6 #011A66 + 2 31 105 vik-9 #021F69 + 2 34 107 vik-11 #02226B + 2 39 110 vik-14 #02276E + 2 42 112 vik-16 #022A70 + 2 46 115 vik-19 #022E73 + 2 51 118 vik-22 #023376 + 2 54 120 vik-24 #023678 + 2 58 123 vik-27 #023A7B + 3 62 125 vik-29 #033E7D + 3 66 128 vik-32 #034280 + 3 69 130 vik-34 #034582 + 3 74 133 vik-37 #034A85 + 4 79 136 vik-40 #044F88 + 5 82 138 vik-42 #05528A + 7 87 141 vik-45 #07578D + 9 91 144 vik-47 #095B90 + 14 96 147 vik-50 #0E6093 + 19 102 151 vik-53 #136697 + 23 105 153 vik-55 #176999 + 30 111 157 vik-58 #1E6F9D + 35 115 160 vik-60 #2373A0 + 43 121 164 vik-63 #2B79A4 + 48 125 166 vik-65 #307DA6 + 57 131 171 vik-68 #3983AB + 66 137 175 vik-71 #4289AF + 72 141 178 vik-73 #488DB2 + 81 148 182 vik-76 #5194B6 + 87 152 185 vik-78 #5798B9 + 97 158 189 vik-81 #619EBD +103 162 192 vik-83 #67A2C0 +113 168 196 vik-86 #71A8C4 +122 174 200 vik-89 #7AAEC8 +128 178 202 vik-91 #80B2CA +138 184 206 vik-94 #8AB8CE +144 188 209 vik-96 #90BCD1 +154 194 213 vik-99 #9AC2D5 +160 197 216 vik-101 #A0C5D8 +170 203 220 vik-104 #AACBDC +179 209 223 vik-107 #B3D1DF +186 213 226 vik-109 #BAD5E2 +195 218 229 vik-112 #C3DAE5 +201 221 231 vik-114 #C9DDE7 +210 225 233 vik-117 #D2E1E9 +216 228 233 vik-119 #D8E4E9 +224 230 233 vik-122 #E0E6E9 +231 231 231 vik-125 #E7E7E7 +234 230 228 vik-127 #EAE6E4 +237 228 222 vik-130 #EDE4DE +238 225 218 vik-132 #EEE1DA +238 221 211 vik-135 #EEDDD3 +237 215 203 vik-138 #EDD7CB +236 211 197 vik-140 #ECD3C5 +234 206 189 vik-143 #EACEBD +233 202 184 vik-145 #E9CAB8 +230 196 176 vik-148 #E6C4B0 +228 191 170 vik-150 #E4BFAA +226 186 162 vik-153 #E2BAA2 +223 180 154 vik-156 #DFB49A +222 176 149 vik-158 #DEB095 +219 170 141 vik-161 #DBAA8D +218 166 136 vik-163 #DAA688 +215 160 129 vik-166 #D7A081 +214 157 124 vik-168 #D69D7C +211 151 116 vik-171 #D39774 +209 146 109 vik-174 #D1926D +207 142 104 vik-176 #CF8E68 +205 137 97 vik-179 #CD8961 +204 133 93 vik-181 #CC855D +201 128 86 vik-184 #C98056 +200 124 81 vik-186 #C87C51 +198 119 74 vik-189 #C6774A +195 114 67 vik-192 #C37243 +194 110 63 vik-194 #C26E3F +191 105 56 vik-197 #BF6938 +190 101 51 vik-199 #BE6533 +187 96 45 vik-202 #BB602D +184 92 40 vik-204 #B85C28 +181 85 33 vik-207 #B55521 +176 79 27 vik-210 #B04F1B +173 74 22 vik-212 #AD4A16 +167 67 16 vik-215 #A74310 +163 62 13 vik-217 #A33E0D +156 55 9 vik-220 #9C3709 +150 49 7 vik-223 #963107 +145 45 6 vik-225 #912D06 +139 39 6 vik-228 #8B2706 +135 36 6 vik-230 #872406 +129 31 6 vik-233 #811F06 +126 29 6 vik-235 #7E1D06 +120 24 6 vik-238 #781806 +115 20 6 vik-241 #731406 +111 17 7 vik-243 #6F1107 +106 13 7 vik-246 #6A0D07 +103 10 7 vik-248 #670A07 + 98 6 7 vik-251 #620607 + 94 3 8 vik-253 #5E0308 + 89 0 8 vik-256 #590008 diff --git a/newt/colormaps/vik/DiscretePalettes/vik25.gpl b/newt/colormaps/vik/DiscretePalettes/vik25.gpl new file mode 100644 index 0000000..4669c7c --- /dev/null +++ b/newt/colormaps/vik/DiscretePalettes/vik25.gpl @@ -0,0 +1,29 @@ +GIMP Palette +Name: vik 25 Swatches +Columns: 1 +# + 0 18 97 vik-1 #001261 + 2 35 108 vik-12 #02236C + 2 51 118 vik-22 #023376 + 3 68 129 vik-33 #034481 + 6 86 140 vik-44 #06568C + 21 103 152 vik-54 #156798 + 48 125 166 vik-65 #307DA6 + 78 146 180 vik-75 #4E92B4 +113 168 196 vik-86 #71A8C4 +148 190 210 vik-97 #94BED2 +179 209 223 vik-107 #B3D1DF +213 227 233 vik-118 #D5E3E9 +236 229 224 vik-129 #ECE5E0 +237 213 200 vik-139 #EDD5C8 +228 191 170 vik-150 #E4BFAA +220 172 144 vik-160 #DCAC90 +211 151 116 vik-171 #D39774 +203 131 90 vik-182 #CB835A +195 114 67 vik-192 #C37243 +186 94 42 vik-203 #BA5E2A +169 69 18 vik-214 #A94512 +148 47 6 vik-224 #942F06 +126 29 6 vik-235 #7E1D06 +108 14 7 vik-245 #6C0E07 + 89 0 8 vik-256 #590008 diff --git a/newt/colormaps/vik/DiscretePalettes/vik25.mat b/newt/colormaps/vik/DiscretePalettes/vik25.mat new file mode 100644 index 0000000000000000000000000000000000000000..f4c47eb220444abaa35bfafa6f9a7e6da8181bc4 GIT binary patch literal 804 zcmeZu4DoSvQZUssQ1EpO(M`+DN!3vZ$Vn_o%P-2c0*X0%nwjV*I2WZRmZYXA6FN($>)@>PpdQ$^eSJ+u2{73P}$_` zv)(P)J>&A+({pzhPtW?S5fyuS``>T3zvteUV`>c8I@w`^(e1Vm_x~sDyn8ZD=ggZM z_ttH^Q5Ukge5R+#f2XYS`fW0P3}36KdB6Ic^kkdS=J3y64<-maut*B+`@%N+L3j__ zlhAEfu3vOX2-4@x!tB;+VcbmOf z#hc(ODJ$Kk~i>9tN)AHrqw#7bD za-G{n2adbt``O-wx%$8NnpSXe_5#rW-W|E+?QbV0yWCkknVnThd0m;vo;V591xcST zUD&mi?L=)(`?8l?WAXOj^Ec`p=lU zWFT*DIv`YNbs$e;bRaS`ATc#MFg7|cI3O}GF*zV0ARr(hARr(hARr(hARr(hARr(h zARr(hARr(hARr(hARr(hARr(B00000000000ZB~{0002d1ONbdoGp-fG!zIN$DOfq zM3(I0)stgtYAor{=DVfrQt25*%XI40h1E2YV@12#Hc6oxM^myo$RLU|(I}f?+{`fM zFwB{32}ySLZNK+E@AJOj_rCx9OtrMMWO`a!voto;`rrSZ{+H~gL05zJmyUY)h=;4j zUuk%#*{#Vi66n`&gDkeRl-fu|?p$LQXKo_us;(|6$WB7^?fu8bPMt->s)c^KbCa=s z#6vgwSPFh!%rS9{JCDz&HjQ45$wc|(g?lD*3t;4u0AfHH$kPme;(}W6x6pj|A7Dea z%Dk*(h=bQYu2I&tZD5wXnr~Rt1(pdflzNMY@UO4dePjp_+uzOhtrTI_-6p@}3JG#z z+C8(FQsmU+Z(dzIjD%!y9qE=FL4j5-tsEsH>Bp-bUDPN()JPR|s8RKkaKcZd2Jr&x z&LmBZiAzDxI;K?MJNdUwomAm*VWdJbq6Ep|P@edR5;ND2I*f1?*b#UkESs!=@XdD9 zj4nCWj`R0MQsmgPGVH@ji40rns~3#<%U}^5v@GY{+5w6fJ&*@!+k|RhzGNKlQVgk;}wz0 zJ>3JQ^X2BP$GY%bPtW*;uoU~8LTwmJpMb{;>k2h|4ojz-Zok}NW2ORuEwn}yIGDH% z269kna`9SnE*CaFy;i9?ZP?RT;Qh08C)nS_XuErLqtU5r;qJsDyS%aP3fM?y5erx9=Kh*2EpGGo74fD*j-B@3JCcWy#=0?n z_`D3_Pesm$p2|^O&)@1URY3e++j~Is&j-HaOU^W_u&;FvW{s-Au21n<9-+czmj->^ z7$v5TKBv=-6nIIVk{UG1;Js|d;@xk_f*ZKPmlE zKNh`-%A__6kn@lE?;j0>u!~g-^zI8WK&V~sO%owLZcLT5SPb3VAJ1^*VhF7atHR19 zm|Su99O3*RB0?z{6<4Gfvsq{M19b?Usj;rX!NXv>B?y%sGCUq(8c@vTU=VcPrdcXr z^T68alT3jF(+%X}N+oQmMj0WeR0vl#FDo%sBh{G^?BJuu+bAP`30sAX@?iN$z7o-b z&uq1|0*t5gS??@lSi0?p@Dsa+kbgAqTzKgq@!_3?8Sv9Q%DLIihxtvBM=&&{mY+oQm zsly{kYG7XI-5KZ4L?UV|iJdn=GM$b~=VqZrkU~D_l?i_mahDG{3!cV|_&~}vY^^o9 bA~K}I!rHES#bgfXQE!OE)p__ATtr_Fs8)#3 literal 0 HcmV?d00001 diff --git a/newt/colormaps/vik/DiscretePalettes/vik50.spk b/newt/colormaps/vik/DiscretePalettes/vik50.spk new file mode 100644 index 0000000..0cb09ff --- /dev/null +++ b/newt/colormaps/vik/DiscretePalettes/vik50.spk @@ -0,0 +1,50 @@ + 0.00 0.13 6.98 37.95 + 1.96 0.56 10.23 39.94 + 3.92 0.79 13.31 41.92 + 6.27 0.91 16.91 44.26 + 8.24 0.95 19.90 46.20 + 10.20 0.98 22.91 48.14 + 12.16 1.05 25.97 50.10 + 14.12 1.29 29.09 52.10 + 16.47 2.19 32.96 54.59 + 18.43 4.22 36.32 56.78 + 20.39 7.55 39.85 59.10 + 22.35 11.77 43.56 61.58 + 24.31 16.72 47.43 64.19 + 26.67 23.36 52.23 67.44 + 28.63 29.29 56.28 70.19 + 30.59 35.41 60.32 72.94 + 32.55 41.62 64.30 75.66 + 34.51 47.87 68.23 78.34 + 36.86 55.36 72.88 81.50 + 38.82 61.59 76.69 84.09 + 40.78 67.81 80.44 86.62 + 42.75 74.00 84.05 88.96 + 44.71 80.12 87.30 90.80 + 47.06 86.89 90.02 91.48 + 49.02 91.12 90.44 89.91 + 50.98 93.22 88.87 86.34 + 52.94 93.38 85.85 81.54 + 55.29 92.10 81.38 75.23 + 57.25 90.57 77.45 69.91 + 59.22 88.94 73.53 64.67 + 61.18 87.30 69.68 59.51 + 63.14 85.69 65.90 54.47 + 65.49 83.80 61.47 48.56 + 67.45 82.26 57.86 43.76 + 69.41 80.74 54.33 39.06 + 71.37 79.25 50.86 34.45 + 73.33 77.77 47.43 29.93 + 75.69 75.96 43.31 24.59 + 77.65 74.32 39.78 20.16 + 79.61 72.35 35.99 15.71 + 81.57 69.74 31.75 11.28 + 83.53 66.29 27.10 7.15 + 85.88 61.31 21.57 3.48 + 87.84 57.05 17.54 2.33 + 89.80 53.02 14.15 2.17 + 91.76 49.23 11.18 2.28 + 93.73 45.65 8.43 2.48 + 96.08 41.54 5.11 2.74 + 98.04 38.25 2.26 2.93 +100.00 35.04 0.01 3.05 diff --git a/newt/colormaps/vik/DiscretePalettes/vik50.txt b/newt/colormaps/vik/DiscretePalettes/vik50.txt new file mode 100644 index 0000000..f7e2a53 --- /dev/null +++ b/newt/colormaps/vik/DiscretePalettes/vik50.txt @@ -0,0 +1,52 @@ +Scientific Colour Map Categorical Palette +vik 50 Swatches + 0 18 97 vik-1 #001261 + 1 26 102 vik-6 #011A66 + 2 34 107 vik-11 #02226B + 2 43 113 vik-17 #022B71 + 2 51 118 vik-22 #023376 + 2 58 123 vik-27 #023A7B + 3 66 128 vik-32 #034280 + 3 74 133 vik-37 #034A85 + 6 84 139 vik-43 #06548B + 11 93 145 vik-48 #0B5D91 + 19 102 151 vik-53 #136697 + 30 111 157 vik-58 #1E6F9D + 43 121 164 vik-63 #2B79A4 + 60 133 172 vik-69 #3C85AC + 75 144 179 vik-74 #4B90B3 + 90 154 186 vik-79 #5A9ABA +106 164 193 vik-84 #6AA4C1 +122 174 200 vik-89 #7AAEC8 +141 186 208 vik-95 #8DBAD0 +157 196 214 vik-100 #9DC4D6 +173 205 221 vik-105 #ADCDDD +189 214 227 vik-110 #BDD6E3 +204 223 232 vik-115 #CCDFE8 +222 230 233 vik-121 #DEE6E9 +232 231 229 vik-126 #E8E7E5 +238 227 220 vik-131 #EEE3DC +238 219 208 vik-136 #EEDBD0 +235 208 192 vik-142 #EBD0C0 +231 198 178 vik-147 #E7C6B2 +227 188 165 vik-152 #E3BCA5 +223 178 152 vik-157 #DFB298 +219 168 139 vik-162 #DBA88B +214 157 124 vik-168 #D69D7C +210 148 112 vik-173 #D29470 +206 139 100 vik-178 #CE8B64 +202 130 88 vik-183 #CA8258 +198 121 76 vik-188 #C6794C +194 110 63 vik-194 #C26E3F +190 101 51 vik-199 #BE6533 +184 92 40 vik-204 #B85C28 +178 81 29 vik-209 #B2511D +169 69 18 vik-214 #A94512 +156 55 9 vik-220 #9C3709 +145 45 6 vik-225 #912D06 +135 36 6 vik-230 #872406 +126 29 6 vik-235 #7E1D06 +116 21 6 vik-240 #741506 +106 13 7 vik-246 #6A0D07 + 98 6 7 vik-251 #620607 + 89 0 8 vik-256 #590008 diff --git a/newt/colormaps/vik/vik.alut b/newt/colormaps/vik/vik.alut new file mode 100644 index 0000000..cae623a --- /dev/null +++ b/newt/colormaps/vik/vik.alut @@ -0,0 +1,256 @@ +0,18,97,255 +1,20,98,255 +1,21,99,255 +1,23,100,255 +1,24,101,255 +1,26,102,255 +2,28,103,255 +2,29,104,255 +2,31,105,255 +2,32,106,255 +2,34,107,255 +2,35,108,255 +2,37,109,255 +2,39,110,255 +2,40,111,255 +2,42,112,255 +2,43,113,255 +2,45,114,255 +2,46,115,255 +2,48,116,255 +2,49,117,255 +2,51,118,255 +2,52,119,255 +2,54,120,255 +2,55,121,255 +2,57,122,255 +2,58,123,255 +3,60,124,255 +3,62,125,255 +3,63,126,255 +3,65,127,255 +3,66,128,255 +3,68,129,255 +3,69,130,255 +3,71,131,255 +3,73,132,255 +3,74,133,255 +4,76,134,255 +4,77,135,255 +4,79,136,255 +5,81,137,255 +5,82,138,255 +6,84,139,255 +6,86,140,255 +7,87,141,255 +8,89,143,255 +9,91,144,255 +11,93,145,255 +12,94,146,255 +14,96,147,255 +16,98,148,255 +17,100,150,255 +19,102,151,255 +21,103,152,255 +23,105,153,255 +25,107,154,255 +28,109,156,255 +30,111,157,255 +32,113,158,255 +35,115,160,255 +37,117,161,255 +40,119,162,255 +43,121,164,255 +45,123,165,255 +48,125,166,255 +51,127,168,255 +54,129,169,255 +57,131,171,255 +60,133,172,255 +63,135,173,255 +66,137,175,255 +69,139,176,255 +72,141,178,255 +75,144,179,255 +78,146,180,255 +81,148,182,255 +84,150,183,255 +87,152,185,255 +90,154,186,255 +93,156,187,255 +97,158,189,255 +100,160,190,255 +103,162,192,255 +106,164,193,255 +109,166,194,255 +113,168,196,255 +116,170,197,255 +119,172,198,255 +122,174,200,255 +125,176,201,255 +128,178,202,255 +132,180,204,255 +135,182,205,255 +138,184,206,255 +141,186,208,255 +144,188,209,255 +148,190,210,255 +151,192,212,255 +154,194,213,255 +157,196,214,255 +160,197,216,255 +163,199,217,255 +167,201,218,255 +170,203,220,255 +173,205,221,255 +176,207,222,255 +179,209,223,255 +182,211,225,255 +186,213,226,255 +189,214,227,255 +192,216,228,255 +195,218,229,255 +198,219,230,255 +201,221,231,255 +204,223,232,255 +207,224,232,255 +210,225,233,255 +213,227,233,255 +216,228,233,255 +219,229,233,255 +222,230,233,255 +224,230,233,255 +226,231,232,255 +229,231,232,255 +231,231,231,255 +232,231,229,255 +234,230,228,255 +235,230,226,255 +236,229,224,255 +237,228,222,255 +238,227,220,255 +238,225,218,255 +238,224,216,255 +238,222,213,255 +238,221,211,255 +238,219,208,255 +238,217,205,255 +237,215,203,255 +237,213,200,255 +236,211,197,255 +236,209,195,255 +235,208,192,255 +234,206,189,255 +233,204,186,255 +233,202,184,255 +232,200,181,255 +231,198,178,255 +230,196,176,255 +229,193,173,255 +228,191,170,255 +228,190,168,255 +227,188,165,255 +226,186,162,255 +225,184,160,255 +224,182,157,255 +223,180,154,255 +223,178,152,255 +222,176,149,255 +221,174,147,255 +220,172,144,255 +219,170,141,255 +219,168,139,255 +218,166,136,255 +217,164,134,255 +216,162,131,255 +215,160,129,255 +214,159,126,255 +214,157,124,255 +213,155,121,255 +212,153,119,255 +211,151,116,255 +211,149,114,255 +210,148,112,255 +209,146,109,255 +208,144,107,255 +207,142,104,255 +207,140,102,255 +206,139,100,255 +205,137,97,255 +204,135,95,255 +204,133,93,255 +203,131,90,255 +202,130,88,255 +201,128,86,255 +201,126,83,255 +200,124,81,255 +199,123,79,255 +198,121,76,255 +198,119,74,255 +197,117,72,255 +196,116,69,255 +195,114,67,255 +194,112,65,255 +194,110,63,255 +193,109,60,255 +192,107,58,255 +191,105,56,255 +190,103,54,255 +190,101,51,255 +189,100,49,255 +188,98,47,255 +187,96,45,255 +186,94,42,255 +184,92,40,255 +183,90,38,255 +182,88,36,255 +181,85,33,255 +179,83,31,255 +178,81,29,255 +176,79,27,255 +175,76,24,255 +173,74,22,255 +171,72,20,255 +169,69,18,255 +167,67,16,255 +165,64,15,255 +163,62,13,255 +161,60,11,255 +159,57,10,255 +156,55,9,255 +154,53,8,255 +152,51,7,255 +150,49,7,255 +148,47,6,255 +145,45,6,255 +143,43,6,255 +141,41,6,255 +139,39,6,255 +137,38,6,255 +135,36,6,255 +133,34,6,255 +131,33,6,255 +129,31,6,255 +127,30,6,255 +126,29,6,255 +124,27,6,255 +122,26,6,255 +120,24,6,255 +118,23,6,255 +116,21,6,255 +115,20,6,255 +113,19,7,255 +111,17,7,255 +109,16,7,255 +108,14,7,255 +106,13,7,255 +104,12,7,255 +103,10,7,255 +101,9,7,255 +99,7,7,255 +98,6,7,255 +96,4,8,255 +94,3,8,255 +93,2,8,255 +91,1,8,255 +89,0,8,255 diff --git a/newt/colormaps/vik/vik.clm b/newt/colormaps/vik/vik.clm new file mode 100644 index 0000000..0baba28 --- /dev/null +++ b/newt/colormaps/vik/vik.clm @@ -0,0 +1,201 @@ +0 18 97 +1 20 98 +1 22 100 +1 24 101 +1 26 102 +2 28 103 +2 30 105 +2 32 106 +2 34 107 +2 36 108 +2 39 110 +2 40 111 +2 42 112 +2 44 114 +2 46 115 +2 48 116 +2 50 117 +2 52 119 +2 54 120 +2 55 121 +2 58 123 +3 60 124 +3 62 125 +3 64 126 +3 66 128 +3 68 129 +3 69 130 +3 72 131 +3 74 133 +4 76 134 +4 78 135 +5 80 137 +5 82 138 +6 84 139 +6 86 140 +8 88 142 +9 91 144 +11 93 145 +13 95 146 +15 97 148 +17 100 150 +20 102 151 +22 104 153 +25 107 154 +28 109 156 +31 112 157 +34 114 159 +37 117 161 +41 119 162 +44 122 164 +47 125 166 +51 127 168 +55 130 170 +59 132 172 +63 135 173 +66 137 175 +70 140 177 +74 143 179 +78 146 180 +82 148 182 +86 151 184 +89 154 186 +93 156 187 +98 159 189 +102 161 191 +106 164 193 +110 166 194 +114 169 196 +118 171 198 +122 174 200 +126 177 201 +130 179 203 +134 182 205 +138 184 206 +142 187 208 +147 189 210 +151 192 212 +155 194 213 +158 196 215 +162 198 217 +167 201 218 +171 204 220 +175 206 222 +178 209 223 +182 211 225 +187 213 226 +191 215 228 +195 218 229 +199 219 230 +202 222 231 +206 224 232 +210 225 233 +214 227 233 +218 229 233 +222 230 233 +224 230 233 +227 231 232 +230 231 231 +232 231 229 +234 230 228 +236 230 225 +237 228 222 +238 227 220 +238 225 217 +238 223 214 +238 221 211 +238 219 208 +238 216 204 +237 214 201 +236 211 197 +236 209 194 +234 207 190 +233 204 187 +233 202 184 +232 199 180 +230 197 177 +229 193 173 +228 191 170 +228 189 167 +226 187 163 +225 184 160 +224 181 156 +223 179 153 +222 176 150 +221 174 147 +220 171 143 +219 169 140 +218 166 136 +217 164 133 +216 161 130 +214 159 127 +214 157 124 +213 154 120 +211 152 117 +211 149 114 +210 148 112 +209 145 108 +207 143 105 +207 140 102 +206 139 99 +205 136 96 +204 133 93 +203 131 90 +202 129 87 +201 127 84 +200 124 81 +199 123 79 +198 120 75 +197 118 73 +196 116 69 +195 114 67 +194 111 64 +193 109 61 +192 107 58 +191 104 55 +190 102 52 +189 100 49 +188 98 47 +187 95 44 +185 93 41 +183 90 38 +182 87 35 +180 84 32 +178 81 29 +176 79 27 +174 75 23 +172 73 21 +169 69 18 +167 66 16 +164 63 14 +162 61 12 +159 57 10 +155 54 9 +153 52 7 +150 49 7 +148 47 6 +144 44 6 +142 42 6 +139 39 6 +137 38 6 +134 35 6 +131 33 6 +129 31 6 +127 30 6 +125 28 6 +122 26 6 +120 24 6 +117 22 6 +115 20 6 +113 19 7 +111 17 7 +108 15 7 +106 13 7 +104 12 7 +102 10 7 +100 8 7 +98 6 7 +96 4 8 +94 3 8 +92 1 8 +89 0 8 diff --git a/newt/colormaps/vik/vik.clr b/newt/colormaps/vik/vik.clr new file mode 100644 index 0000000..53fcd2b --- /dev/null +++ b/newt/colormaps/vik/vik.clr @@ -0,0 +1,102 @@ +ColorMap 2 1 +0 0 18 97 255 +1 1 22 100 255 +2 1 26 102 255 +3 2 30 105 255 +4 2 34 107 255 +5 2 39 110 255 +6 2 42 112 255 +7 2 46 115 255 +8 2 50 117 255 +9 2 54 120 255 +10 2 58 123 255 +11 3 62 125 255 +12 3 66 128 255 +13 3 69 130 255 +14 3 74 133 255 +15 4 78 135 255 +16 5 82 138 255 +17 6 86 140 255 +18 9 91 144 255 +19 13 95 146 255 +20 17 100 150 255 +21 22 104 153 255 +22 28 109 156 255 +23 34 114 159 255 +24 41 119 162 255 +25 47 125 166 255 +26 55 130 170 255 +27 63 135 173 255 +28 70 140 177 255 +29 78 146 180 255 +30 86 151 184 255 +31 93 156 187 255 +32 102 161 191 255 +33 110 166 194 255 +34 118 171 198 255 +35 126 177 201 255 +36 134 182 205 255 +37 142 187 208 255 +38 151 192 212 255 +39 158 196 215 255 +40 167 201 218 255 +41 175 206 222 255 +42 182 211 225 255 +43 191 215 228 255 +44 199 219 230 255 +45 206 224 232 255 +46 214 227 233 255 +47 222 230 233 255 +48 227 231 232 255 +49 232 231 229 255 +50 236 230 225 255 +51 238 227 220 255 +52 238 223 214 255 +53 238 219 208 255 +54 237 214 201 255 +55 236 209 194 255 +56 233 204 187 255 +57 232 199 180 255 +58 229 193 173 255 +59 228 189 167 255 +60 225 184 160 255 +61 223 179 153 255 +62 221 174 147 255 +63 219 169 140 255 +64 217 164 133 255 +65 214 159 127 255 +66 213 154 120 255 +67 211 149 114 255 +68 209 145 108 255 +69 207 140 102 255 +70 205 136 96 255 +71 203 131 90 255 +72 201 127 84 255 +73 199 123 79 255 +74 197 118 73 255 +75 195 114 67 255 +76 193 109 61 255 +77 191 104 55 255 +78 189 100 49 255 +79 187 95 44 255 +80 183 90 38 255 +81 180 84 32 255 +82 176 79 27 255 +83 172 73 21 255 +84 167 66 16 255 +85 162 61 12 255 +86 155 54 9 255 +87 150 49 7 255 +88 144 44 6 255 +89 139 39 6 255 +90 134 35 6 255 +91 129 31 6 255 +92 125 28 6 255 +93 120 24 6 255 +94 115 20 6 255 +95 111 17 7 255 +96 106 13 7 255 +97 102 10 7 255 +98 98 6 7 255 +99 94 3 8 255 +100 89 0 8 255 diff --git a/newt/colormaps/vik/vik.cpt b/newt/colormaps/vik/vik.cpt new file mode 100644 index 0000000..cbffff1 --- /dev/null +++ b/newt/colormaps/vik/vik.cpt @@ -0,0 +1,261 @@ +# +# vik +# www.fabiocrameri.ch/colourmaps +0.000000 0 18 97 0.003922 1 20 98 +0.003922 1 20 98 0.007843 1 21 99 +0.007843 1 21 99 0.011765 1 23 100 +0.011765 1 23 100 0.015686 1 24 101 +0.015686 1 24 101 0.019608 1 26 102 +0.019608 1 26 102 0.023529 2 28 103 +0.023529 2 28 103 0.027451 2 29 104 +0.027451 2 29 104 0.031373 2 31 105 +0.031373 2 31 105 0.035294 2 32 106 +0.035294 2 32 106 0.039216 2 34 107 +0.039216 2 34 107 0.043137 2 35 108 +0.043137 2 35 108 0.047059 2 37 109 +0.047059 2 37 109 0.050980 2 39 110 +0.050980 2 39 110 0.054902 2 40 111 +0.054902 2 40 111 0.058824 2 42 112 +0.058824 2 42 112 0.062745 2 43 113 +0.062745 2 43 113 0.066667 2 45 114 +0.066667 2 45 114 0.070588 2 46 115 +0.070588 2 46 115 0.074510 2 48 116 +0.074510 2 48 116 0.078431 2 49 117 +0.078431 2 49 117 0.082353 2 51 118 +0.082353 2 51 118 0.086275 2 52 119 +0.086275 2 52 119 0.090196 2 54 120 +0.090196 2 54 120 0.094118 2 55 121 +0.094118 2 55 121 0.098039 2 57 122 +0.098039 2 57 122 0.101961 2 58 123 +0.101961 2 58 123 0.105882 3 60 124 +0.105882 3 60 124 0.109804 3 62 125 +0.109804 3 62 125 0.113725 3 63 126 +0.113725 3 63 126 0.117647 3 65 127 +0.117647 3 65 127 0.121569 3 66 128 +0.121569 3 66 128 0.125490 3 68 129 +0.125490 3 68 129 0.129412 3 69 130 +0.129412 3 69 130 0.133333 3 71 131 +0.133333 3 71 131 0.137255 3 73 132 +0.137255 3 73 132 0.141176 3 74 133 +0.141176 3 74 133 0.145098 4 76 134 +0.145098 4 76 134 0.149020 4 77 135 +0.149020 4 77 135 0.152941 4 79 136 +0.152941 4 79 136 0.156863 5 81 137 +0.156863 5 81 137 0.160784 5 82 138 +0.160784 5 82 138 0.164706 6 84 139 +0.164706 6 84 139 0.168627 6 86 140 +0.168627 6 86 140 0.172549 7 87 141 +0.172549 7 87 141 0.176471 8 89 143 +0.176471 8 89 143 0.180392 9 91 144 +0.180392 9 91 144 0.184314 11 93 145 +0.184314 11 93 145 0.188235 12 94 146 +0.188235 12 94 146 0.192157 14 96 147 +0.192157 14 96 147 0.196078 16 98 148 +0.196078 16 98 148 0.200000 17 100 150 +0.200000 17 100 150 0.203922 19 102 151 +0.203922 19 102 151 0.207843 21 103 152 +0.207843 21 103 152 0.211765 23 105 153 +0.211765 23 105 153 0.215686 25 107 154 +0.215686 25 107 154 0.219608 28 109 156 +0.219608 28 109 156 0.223529 30 111 157 +0.223529 30 111 157 0.227451 32 113 158 +0.227451 32 113 158 0.231373 35 115 160 +0.231373 35 115 160 0.235294 37 117 161 +0.235294 37 117 161 0.239216 40 119 162 +0.239216 40 119 162 0.243137 43 121 164 +0.243137 43 121 164 0.247059 45 123 165 +0.247059 45 123 165 0.250980 48 125 166 +0.250980 48 125 166 0.254902 51 127 168 +0.254902 51 127 168 0.258824 54 129 169 +0.258824 54 129 169 0.262745 57 131 171 +0.262745 57 131 171 0.266667 60 133 172 +0.266667 60 133 172 0.270588 63 135 173 +0.270588 63 135 173 0.274510 66 137 175 +0.274510 66 137 175 0.278431 69 139 176 +0.278431 69 139 176 0.282353 72 141 178 +0.282353 72 141 178 0.286275 75 144 179 +0.286275 75 144 179 0.290196 78 146 180 +0.290196 78 146 180 0.294118 81 148 182 +0.294118 81 148 182 0.298039 84 150 183 +0.298039 84 150 183 0.301961 87 152 185 +0.301961 87 152 185 0.305882 90 154 186 +0.305882 90 154 186 0.309804 93 156 187 +0.309804 93 156 187 0.313725 97 158 189 +0.313725 97 158 189 0.317647 100 160 190 +0.317647 100 160 190 0.321569 103 162 192 +0.321569 103 162 192 0.325490 106 164 193 +0.325490 106 164 193 0.329412 109 166 194 +0.329412 109 166 194 0.333333 113 168 196 +0.333333 113 168 196 0.337255 116 170 197 +0.337255 116 170 197 0.341176 119 172 198 +0.341176 119 172 198 0.345098 122 174 200 +0.345098 122 174 200 0.349020 125 176 201 +0.349020 125 176 201 0.352941 128 178 202 +0.352941 128 178 202 0.356863 132 180 204 +0.356863 132 180 204 0.360784 135 182 205 +0.360784 135 182 205 0.364706 138 184 206 +0.364706 138 184 206 0.368627 141 186 208 +0.368627 141 186 208 0.372549 144 188 209 +0.372549 144 188 209 0.376471 148 190 210 +0.376471 148 190 210 0.380392 151 192 212 +0.380392 151 192 212 0.384314 154 194 213 +0.384314 154 194 213 0.388235 157 196 214 +0.388235 157 196 214 0.392157 160 197 216 +0.392157 160 197 216 0.396078 163 199 217 +0.396078 163 199 217 0.400000 167 201 218 +0.400000 167 201 218 0.403922 170 203 220 +0.403922 170 203 220 0.407843 173 205 221 +0.407843 173 205 221 0.411765 176 207 222 +0.411765 176 207 222 0.415686 179 209 223 +0.415686 179 209 223 0.419608 182 211 225 +0.419608 182 211 225 0.423529 186 213 226 +0.423529 186 213 226 0.427451 189 214 227 +0.427451 189 214 227 0.431373 192 216 228 +0.431373 192 216 228 0.435294 195 218 229 +0.435294 195 218 229 0.439216 198 219 230 +0.439216 198 219 230 0.443137 201 221 231 +0.443137 201 221 231 0.447059 204 223 232 +0.447059 204 223 232 0.450980 207 224 232 +0.450980 207 224 232 0.454902 210 225 233 +0.454902 210 225 233 0.458824 213 227 233 +0.458824 213 227 233 0.462745 216 228 233 +0.462745 216 228 233 0.466667 219 229 233 +0.466667 219 229 233 0.470588 222 230 233 +0.470588 222 230 233 0.474510 224 230 233 +0.474510 224 230 233 0.478431 226 231 232 +0.478431 226 231 232 0.482353 229 231 232 +0.482353 229 231 232 0.486275 231 231 231 +0.486275 231 231 231 0.490196 232 231 229 +0.490196 232 231 229 0.494118 234 230 228 +0.494118 234 230 228 0.498039 235 230 226 +0.498039 235 230 226 0.501961 236 229 224 +0.501961 236 229 224 0.505882 237 228 222 +0.505882 237 228 222 0.509804 238 227 220 +0.509804 238 227 220 0.513725 238 225 218 +0.513725 238 225 218 0.517647 238 224 216 +0.517647 238 224 216 0.521569 238 222 213 +0.521569 238 222 213 0.525490 238 221 211 +0.525490 238 221 211 0.529412 238 219 208 +0.529412 238 219 208 0.533333 238 217 205 +0.533333 238 217 205 0.537255 237 215 203 +0.537255 237 215 203 0.541176 237 213 200 +0.541176 237 213 200 0.545098 236 211 197 +0.545098 236 211 197 0.549020 236 209 195 +0.549020 236 209 195 0.552941 235 208 192 +0.552941 235 208 192 0.556863 234 206 189 +0.556863 234 206 189 0.560784 233 204 186 +0.560784 233 204 186 0.564706 233 202 184 +0.564706 233 202 184 0.568627 232 200 181 +0.568627 232 200 181 0.572549 231 198 178 +0.572549 231 198 178 0.576471 230 196 176 +0.576471 230 196 176 0.580392 229 193 173 +0.580392 229 193 173 0.584314 228 191 170 +0.584314 228 191 170 0.588235 228 190 168 +0.588235 228 190 168 0.592157 227 188 165 +0.592157 227 188 165 0.596078 226 186 162 +0.596078 226 186 162 0.600000 225 184 160 +0.600000 225 184 160 0.603922 224 182 157 +0.603922 224 182 157 0.607843 223 180 154 +0.607843 223 180 154 0.611765 223 178 152 +0.611765 223 178 152 0.615686 222 176 149 +0.615686 222 176 149 0.619608 221 174 147 +0.619608 221 174 147 0.623529 220 172 144 +0.623529 220 172 144 0.627451 219 170 141 +0.627451 219 170 141 0.631373 219 168 139 +0.631373 219 168 139 0.635294 218 166 136 +0.635294 218 166 136 0.639216 217 164 134 +0.639216 217 164 134 0.643137 216 162 131 +0.643137 216 162 131 0.647059 215 160 129 +0.647059 215 160 129 0.650980 214 159 126 +0.650980 214 159 126 0.654902 214 157 124 +0.654902 214 157 124 0.658824 213 155 121 +0.658824 213 155 121 0.662745 212 153 119 +0.662745 212 153 119 0.666667 211 151 116 +0.666667 211 151 116 0.670588 211 149 114 +0.670588 211 149 114 0.674510 210 148 112 +0.674510 210 148 112 0.678431 209 146 109 +0.678431 209 146 109 0.682353 208 144 107 +0.682353 208 144 107 0.686275 207 142 104 +0.686275 207 142 104 0.690196 207 140 102 +0.690196 207 140 102 0.694118 206 139 100 +0.694118 206 139 100 0.698039 205 137 97 +0.698039 205 137 97 0.701961 204 135 95 +0.701961 204 135 95 0.705882 204 133 93 +0.705882 204 133 93 0.709804 203 131 90 +0.709804 203 131 90 0.713725 202 130 88 +0.713725 202 130 88 0.717647 201 128 86 +0.717647 201 128 86 0.721569 201 126 83 +0.721569 201 126 83 0.725490 200 124 81 +0.725490 200 124 81 0.729412 199 123 79 +0.729412 199 123 79 0.733333 198 121 76 +0.733333 198 121 76 0.737255 198 119 74 +0.737255 198 119 74 0.741176 197 117 72 +0.741176 197 117 72 0.745098 196 116 69 +0.745098 196 116 69 0.749020 195 114 67 +0.749020 195 114 67 0.752941 194 112 65 +0.752941 194 112 65 0.756863 194 110 63 +0.756863 194 110 63 0.760784 193 109 60 +0.760784 193 109 60 0.764706 192 107 58 +0.764706 192 107 58 0.768627 191 105 56 +0.768627 191 105 56 0.772549 190 103 54 +0.772549 190 103 54 0.776471 190 101 51 +0.776471 190 101 51 0.780392 189 100 49 +0.780392 189 100 49 0.784314 188 98 47 +0.784314 188 98 47 0.788235 187 96 45 +0.788235 187 96 45 0.792157 186 94 42 +0.792157 186 94 42 0.796078 184 92 40 +0.796078 184 92 40 0.800000 183 90 38 +0.800000 183 90 38 0.803922 182 88 36 +0.803922 182 88 36 0.807843 181 85 33 +0.807843 181 85 33 0.811765 179 83 31 +0.811765 179 83 31 0.815686 178 81 29 +0.815686 178 81 29 0.819608 176 79 27 +0.819608 176 79 27 0.823529 175 76 24 +0.823529 175 76 24 0.827451 173 74 22 +0.827451 173 74 22 0.831373 171 72 20 +0.831373 171 72 20 0.835294 169 69 18 +0.835294 169 69 18 0.839216 167 67 16 +0.839216 167 67 16 0.843137 165 64 15 +0.843137 165 64 15 0.847059 163 62 13 +0.847059 163 62 13 0.850980 161 60 11 +0.850980 161 60 11 0.854902 159 57 10 +0.854902 159 57 10 0.858824 156 55 9 +0.858824 156 55 9 0.862745 154 53 8 +0.862745 154 53 8 0.866667 152 51 7 +0.866667 152 51 7 0.870588 150 49 7 +0.870588 150 49 7 0.874510 148 47 6 +0.874510 148 47 6 0.878431 145 45 6 +0.878431 145 45 6 0.882353 143 43 6 +0.882353 143 43 6 0.886275 141 41 6 +0.886275 141 41 6 0.890196 139 39 6 +0.890196 139 39 6 0.894118 137 38 6 +0.894118 137 38 6 0.898039 135 36 6 +0.898039 135 36 6 0.901961 133 34 6 +0.901961 133 34 6 0.905882 131 33 6 +0.905882 131 33 6 0.909804 129 31 6 +0.909804 129 31 6 0.913725 127 30 6 +0.913725 127 30 6 0.917647 126 29 6 +0.917647 126 29 6 0.921569 124 27 6 +0.921569 124 27 6 0.925490 122 26 6 +0.925490 122 26 6 0.929412 120 24 6 +0.929412 120 24 6 0.933333 118 23 6 +0.933333 118 23 6 0.937255 116 21 6 +0.937255 116 21 6 0.941176 115 20 6 +0.941176 115 20 6 0.945098 113 19 7 +0.945098 113 19 7 0.949020 111 17 7 +0.949020 111 17 7 0.952941 109 16 7 +0.952941 109 16 7 0.956863 108 14 7 +0.956863 108 14 7 0.960784 106 13 7 +0.960784 106 13 7 0.964706 104 12 7 +0.964706 104 12 7 0.968627 103 10 7 +0.968627 103 10 7 0.972549 101 9 7 +0.972549 101 9 7 0.976471 99 7 7 +0.976471 99 7 7 0.980392 98 6 7 +0.980392 98 6 7 0.984314 96 4 8 +0.984314 96 4 8 0.988235 94 3 8 +0.988235 94 3 8 0.992157 93 2 8 +0.992157 93 2 8 0.996078 91 1 8 +0.996078 91 1 8 1.000000 89 0 8 +N 255 255 255 +B 0 18 97 +F 89 0 8 diff --git a/newt/colormaps/vik/vik.ct b/newt/colormaps/vik/vik.ct new file mode 100644 index 0000000..3d5b6ec --- /dev/null +++ b/newt/colormaps/vik/vik.ct @@ -0,0 +1,1030 @@ + + + 1.11.0 + + + 0 18 97 255 + 0 + + + 1 20 98 255 + 0.0039216 + + + 1 21 99 255 + 0.0078431 + + + 1 23 100 255 + 0.011765 + + + 1 24 101 255 + 0.015686 + + + 1 26 102 255 + 0.019608 + + + 2 28 103 255 + 0.023529 + + + 2 29 104 255 + 0.027451 + + + 2 31 105 255 + 0.031373 + + + 2 32 106 255 + 0.035294 + + + 2 34 107 255 + 0.039216 + + + 2 35 108 255 + 0.043137 + + + 2 37 109 255 + 0.047059 + + + 2 39 110 255 + 0.05098 + + + 2 40 111 255 + 0.054902 + + + 2 42 112 255 + 0.058824 + + + 2 43 113 255 + 0.062745 + + + 2 45 114 255 + 0.066667 + + + 2 46 115 255 + 0.070588 + + + 2 48 116 255 + 0.07451 + + + 2 49 117 255 + 0.078431 + + + 2 51 118 255 + 0.082353 + + + 2 52 119 255 + 0.086275 + + + 2 54 120 255 + 0.090196 + + + 2 55 121 255 + 0.094118 + + + 2 57 122 255 + 0.098039 + + + 2 58 123 255 + 0.10196 + + + 3 60 124 255 + 0.10588 + + + 3 62 125 255 + 0.1098 + + + 3 63 126 255 + 0.11373 + + + 3 65 127 255 + 0.11765 + + + 3 66 128 255 + 0.12157 + + + 3 68 129 255 + 0.12549 + + + 3 69 130 255 + 0.12941 + + + 3 71 131 255 + 0.13333 + + + 3 73 132 255 + 0.13725 + + + 3 74 133 255 + 0.14118 + + + 4 76 134 255 + 0.1451 + + + 4 77 135 255 + 0.14902 + + + 4 79 136 255 + 0.15294 + + + 5 81 137 255 + 0.15686 + + + 5 82 138 255 + 0.16078 + + + 6 84 139 255 + 0.16471 + + + 6 86 140 255 + 0.16863 + + + 7 87 141 255 + 0.17255 + + + 8 89 143 255 + 0.17647 + + + 9 91 144 255 + 0.18039 + + + 11 93 145 255 + 0.18431 + + + 12 94 146 255 + 0.18824 + + + 14 96 147 255 + 0.19216 + + + 16 98 148 255 + 0.19608 + + + 17 100 150 255 + 0.2 + + + 19 102 151 255 + 0.20392 + + + 21 103 152 255 + 0.20784 + + + 23 105 153 255 + 0.21176 + + + 25 107 154 255 + 0.21569 + + + 28 109 156 255 + 0.21961 + + + 30 111 157 255 + 0.22353 + + + 32 113 158 255 + 0.22745 + + + 35 115 160 255 + 0.23137 + + + 37 117 161 255 + 0.23529 + + + 40 119 162 255 + 0.23922 + + + 43 121 164 255 + 0.24314 + + + 45 123 165 255 + 0.24706 + + + 48 125 166 255 + 0.25098 + + + 51 127 168 255 + 0.2549 + + + 54 129 169 255 + 0.25882 + + + 57 131 171 255 + 0.26275 + + + 60 133 172 255 + 0.26667 + + + 63 135 173 255 + 0.27059 + + + 66 137 175 255 + 0.27451 + + + 69 139 176 255 + 0.27843 + + + 72 141 178 255 + 0.28235 + + + 75 144 179 255 + 0.28627 + + + 78 146 180 255 + 0.2902 + + + 81 148 182 255 + 0.29412 + + + 84 150 183 255 + 0.29804 + + + 87 152 185 255 + 0.30196 + + + 90 154 186 255 + 0.30588 + + + 93 156 187 255 + 0.3098 + + + 97 158 189 255 + 0.31373 + + + 100 160 190 255 + 0.31765 + + + 103 162 192 255 + 0.32157 + + + 106 164 193 255 + 0.32549 + + + 109 166 194 255 + 0.32941 + + + 113 168 196 255 + 0.33333 + + + 116 170 197 255 + 0.33725 + + + 119 172 198 255 + 0.34118 + + + 122 174 200 255 + 0.3451 + + + 125 176 201 255 + 0.34902 + + + 128 178 202 255 + 0.35294 + + + 132 180 204 255 + 0.35686 + + + 135 182 205 255 + 0.36078 + + + 138 184 206 255 + 0.36471 + + + 141 186 208 255 + 0.36863 + + + 144 188 209 255 + 0.37255 + + + 148 190 210 255 + 0.37647 + + + 151 192 212 255 + 0.38039 + + + 154 194 213 255 + 0.38431 + + + 157 196 214 255 + 0.38824 + + + 160 197 216 255 + 0.39216 + + + 163 199 217 255 + 0.39608 + + + 167 201 218 255 + 0.4 + + + 170 203 220 255 + 0.40392 + + + 173 205 221 255 + 0.40784 + + + 176 207 222 255 + 0.41176 + + + 179 209 223 255 + 0.41569 + + + 182 211 225 255 + 0.41961 + + + 186 213 226 255 + 0.42353 + + + 189 214 227 255 + 0.42745 + + + 192 216 228 255 + 0.43137 + + + 195 218 229 255 + 0.43529 + + + 198 219 230 255 + 0.43922 + + + 201 221 231 255 + 0.44314 + + + 204 223 232 255 + 0.44706 + + + 207 224 232 255 + 0.45098 + + + 210 225 233 255 + 0.4549 + + + 213 227 233 255 + 0.45882 + + + 216 228 233 255 + 0.46275 + + + 219 229 233 255 + 0.46667 + + + 222 230 233 255 + 0.47059 + + + 224 230 233 255 + 0.47451 + + + 226 231 232 255 + 0.47843 + + + 229 231 232 255 + 0.48235 + + + 231 231 231 255 + 0.48627 + + + 232 231 229 255 + 0.4902 + + + 234 230 228 255 + 0.49412 + + + 235 230 226 255 + 0.49804 + + + 236 229 224 255 + 0.50196 + + + 237 228 222 255 + 0.50588 + + + 238 227 220 255 + 0.5098 + + + 238 225 218 255 + 0.51373 + + + 238 224 216 255 + 0.51765 + + + 238 222 213 255 + 0.52157 + + + 238 221 211 255 + 0.52549 + + + 238 219 208 255 + 0.52941 + + + 238 217 205 255 + 0.53333 + + + 237 215 203 255 + 0.53725 + + + 237 213 200 255 + 0.54118 + + + 236 211 197 255 + 0.5451 + + + 236 209 195 255 + 0.54902 + + + 235 208 192 255 + 0.55294 + + + 234 206 189 255 + 0.55686 + + + 233 204 186 255 + 0.56078 + + + 233 202 184 255 + 0.56471 + + + 232 200 181 255 + 0.56863 + + + 231 198 178 255 + 0.57255 + + + 230 196 176 255 + 0.57647 + + + 229 193 173 255 + 0.58039 + + + 228 191 170 255 + 0.58431 + + + 228 190 168 255 + 0.58824 + + + 227 188 165 255 + 0.59216 + + + 226 186 162 255 + 0.59608 + + + 225 184 160 255 + 0.6 + + + 224 182 157 255 + 0.60392 + + + 223 180 154 255 + 0.60784 + + + 223 178 152 255 + 0.61176 + + + 222 176 149 255 + 0.61569 + + + 221 174 147 255 + 0.61961 + + + 220 172 144 255 + 0.62353 + + + 219 170 141 255 + 0.62745 + + + 219 168 139 255 + 0.63137 + + + 218 166 136 255 + 0.63529 + + + 217 164 134 255 + 0.63922 + + + 216 162 131 255 + 0.64314 + + + 215 160 129 255 + 0.64706 + + + 214 159 126 255 + 0.65098 + + + 214 157 124 255 + 0.6549 + + + 213 155 121 255 + 0.65882 + + + 212 153 119 255 + 0.66275 + + + 211 151 116 255 + 0.66667 + + + 211 149 114 255 + 0.67059 + + + 210 148 112 255 + 0.67451 + + + 209 146 109 255 + 0.67843 + + + 208 144 107 255 + 0.68235 + + + 207 142 104 255 + 0.68627 + + + 207 140 102 255 + 0.6902 + + + 206 139 100 255 + 0.69412 + + + 205 137 97 255 + 0.69804 + + + 204 135 95 255 + 0.70196 + + + 204 133 93 255 + 0.70588 + + + 203 131 90 255 + 0.7098 + + + 202 130 88 255 + 0.71373 + + + 201 128 86 255 + 0.71765 + + + 201 126 83 255 + 0.72157 + + + 200 124 81 255 + 0.72549 + + + 199 123 79 255 + 0.72941 + + + 198 121 76 255 + 0.73333 + + + 198 119 74 255 + 0.73725 + + + 197 117 72 255 + 0.74118 + + + 196 116 69 255 + 0.7451 + + + 195 114 67 255 + 0.74902 + + + 194 112 65 255 + 0.75294 + + + 194 110 63 255 + 0.75686 + + + 193 109 60 255 + 0.76078 + + + 192 107 58 255 + 0.76471 + + + 191 105 56 255 + 0.76863 + + + 190 103 54 255 + 0.77255 + + + 190 101 51 255 + 0.77647 + + + 189 100 49 255 + 0.78039 + + + 188 98 47 255 + 0.78431 + + + 187 96 45 255 + 0.78824 + + + 186 94 42 255 + 0.79216 + + + 184 92 40 255 + 0.79608 + + + 183 90 38 255 + 0.8 + + + 182 88 36 255 + 0.80392 + + + 181 85 33 255 + 0.80784 + + + 179 83 31 255 + 0.81176 + + + 178 81 29 255 + 0.81569 + + + 176 79 27 255 + 0.81961 + + + 175 76 24 255 + 0.82353 + + + 173 74 22 255 + 0.82745 + + + 171 72 20 255 + 0.83137 + + + 169 69 18 255 + 0.83529 + + + 167 67 16 255 + 0.83922 + + + 165 64 15 255 + 0.84314 + + + 163 62 13 255 + 0.84706 + + + 161 60 11 255 + 0.85098 + + + 159 57 10 255 + 0.8549 + + + 156 55 9 255 + 0.85882 + + + 154 53 8 255 + 0.86275 + + + 152 51 7 255 + 0.86667 + + + 150 49 7 255 + 0.87059 + + + 148 47 6 255 + 0.87451 + + + 145 45 6 255 + 0.87843 + + + 143 43 6 255 + 0.88235 + + + 141 41 6 255 + 0.88627 + + + 139 39 6 255 + 0.8902 + + + 137 38 6 255 + 0.89412 + + + 135 36 6 255 + 0.89804 + + + 133 34 6 255 + 0.90196 + + + 131 33 6 255 + 0.90588 + + + 129 31 6 255 + 0.9098 + + + 127 30 6 255 + 0.91373 + + + 126 29 6 255 + 0.91765 + + + 124 27 6 255 + 0.92157 + + + 122 26 6 255 + 0.92549 + + + 120 24 6 255 + 0.92941 + + + 118 23 6 255 + 0.93333 + + + 116 21 6 255 + 0.93725 + + + 115 20 6 255 + 0.94118 + + + 113 19 7 255 + 0.9451 + + + 111 17 7 255 + 0.94902 + + + 109 16 7 255 + 0.95294 + + + 108 14 7 255 + 0.95686 + + + 106 13 7 255 + 0.96078 + + + 104 12 7 255 + 0.96471 + + + 103 10 7 255 + 0.96863 + + + 101 9 7 255 + 0.97255 + + + 99 7 7 255 + 0.97647 + + + 98 6 7 255 + 0.98039 + + + 96 4 8 255 + 0.98431 + + + 94 3 8 255 + 0.98824 + + + 93 2 8 255 + 0.99216 + + + 91 1 8 255 + 0.99608 + + + 89 0 8 255 + 1 + + + \ No newline at end of file diff --git a/newt/colormaps/vik/vik.lut b/newt/colormaps/vik/vik.lut new file mode 100644 index 0000000..416938e --- /dev/null +++ b/newt/colormaps/vik/vik.lut @@ -0,0 +1,256 @@ +0 18 97 +1 20 98 +1 21 99 +1 23 100 +1 24 101 +1 26 102 +2 28 103 +2 29 104 +2 31 105 +2 32 106 +2 34 107 +2 35 108 +2 37 109 +2 39 110 +2 40 111 +2 42 112 +2 43 113 +2 45 114 +2 46 115 +2 48 116 +2 49 117 +2 51 118 +2 52 119 +2 54 120 +2 55 121 +2 57 122 +2 58 123 +3 60 124 +3 62 125 +3 63 126 +3 65 127 +3 66 128 +3 68 129 +3 69 130 +3 71 131 +3 73 132 +3 74 133 +4 76 134 +4 77 135 +4 79 136 +5 81 137 +5 82 138 +6 84 139 +6 86 140 +7 87 141 +8 89 143 +9 91 144 +11 93 145 +12 94 146 +14 96 147 +16 98 148 +17 100 150 +19 102 151 +21 103 152 +23 105 153 +25 107 154 +28 109 156 +30 111 157 +32 113 158 +35 115 160 +37 117 161 +40 119 162 +43 121 164 +45 123 165 +48 125 166 +51 127 168 +54 129 169 +57 131 171 +60 133 172 +63 135 173 +66 137 175 +69 139 176 +72 141 178 +75 144 179 +78 146 180 +81 148 182 +84 150 183 +87 152 185 +90 154 186 +93 156 187 +97 158 189 +100 160 190 +103 162 192 +106 164 193 +109 166 194 +113 168 196 +116 170 197 +119 172 198 +122 174 200 +125 176 201 +128 178 202 +132 180 204 +135 182 205 +138 184 206 +141 186 208 +144 188 209 +148 190 210 +151 192 212 +154 194 213 +157 196 214 +160 197 216 +163 199 217 +167 201 218 +170 203 220 +173 205 221 +176 207 222 +179 209 223 +182 211 225 +186 213 226 +189 214 227 +192 216 228 +195 218 229 +198 219 230 +201 221 231 +204 223 232 +207 224 232 +210 225 233 +213 227 233 +216 228 233 +219 229 233 +222 230 233 +224 230 233 +226 231 232 +229 231 232 +231 231 231 +232 231 229 +234 230 228 +235 230 226 +236 229 224 +237 228 222 +238 227 220 +238 225 218 +238 224 216 +238 222 213 +238 221 211 +238 219 208 +238 217 205 +237 215 203 +237 213 200 +236 211 197 +236 209 195 +235 208 192 +234 206 189 +233 204 186 +233 202 184 +232 200 181 +231 198 178 +230 196 176 +229 193 173 +228 191 170 +228 190 168 +227 188 165 +226 186 162 +225 184 160 +224 182 157 +223 180 154 +223 178 152 +222 176 149 +221 174 147 +220 172 144 +219 170 141 +219 168 139 +218 166 136 +217 164 134 +216 162 131 +215 160 129 +214 159 126 +214 157 124 +213 155 121 +212 153 119 +211 151 116 +211 149 114 +210 148 112 +209 146 109 +208 144 107 +207 142 104 +207 140 102 +206 139 100 +205 137 97 +204 135 95 +204 133 93 +203 131 90 +202 130 88 +201 128 86 +201 126 83 +200 124 81 +199 123 79 +198 121 76 +198 119 74 +197 117 72 +196 116 69 +195 114 67 +194 112 65 +194 110 63 +193 109 60 +192 107 58 +191 105 56 +190 103 54 +190 101 51 +189 100 49 +188 98 47 +187 96 45 +186 94 42 +184 92 40 +183 90 38 +182 88 36 +181 85 33 +179 83 31 +178 81 29 +176 79 27 +175 76 24 +173 74 22 +171 72 20 +169 69 18 +167 67 16 +165 64 15 +163 62 13 +161 60 11 +159 57 10 +156 55 9 +154 53 8 +152 51 7 +150 49 7 +148 47 6 +145 45 6 +143 43 6 +141 41 6 +139 39 6 +137 38 6 +135 36 6 +133 34 6 +131 33 6 +129 31 6 +127 30 6 +126 29 6 +124 27 6 +122 26 6 +120 24 6 +118 23 6 +116 21 6 +115 20 6 +113 19 7 +111 17 7 +109 16 7 +108 14 7 +106 13 7 +104 12 7 +103 10 7 +101 9 7 +99 7 7 +98 6 7 +96 4 8 +94 3 8 +93 2 8 +91 1 8 +89 0 8 diff --git a/newt/colormaps/vik/vik.mat b/newt/colormaps/vik/vik.mat new file mode 100644 index 0000000000000000000000000000000000000000..e13b3eb71b317bdc7a52ee7950d46dad24a078c8 GIT binary patch literal 6003 zcma*bRag^%0zhHu?#_X9hjdIvcbC#FFk*BgEin+JC8U|O(kWe%D&5VHW`GRXy)XCu z{!iySIv`_hkRpq?pfHOL$XLMH73{>%q7Sz7bN2G_kooUZ(vT42XHoKTvh#CtWbyKp zVKIK^&!Xqx$091mA|fs$EFmK-#Ud&!BF*yudhtJK8agEZ?Gx%3FMe2n)x z5jg5XdVA&}OQ+@F2i^IA5yf@Ef4?_|eKocxOvhgwg$2F|KL~$eSj=_iv(TFW=a3#| zgWY}f-}+rEsK;XZ*rKV?OsMRy@wYPPGD>|9P++w|@!^E|x4ZQt=pHimu)d3mW&z#F zr~=6_8)$UT`aH2&O@-M!yevtnOE*=H_i^k(l?n{>)kqCRQ=MA1vHKnFMwG%b`h(i7 ze9BgfY%@QoKe1Wi#qU|Xeeo@3&S|&RG~*Bm=JLPqUOQJB2uUzzLSblE#Zhde1Ge-s zy$x@7l$u9n_A-!y=-wwK`kj@(sma97Us&9BJ30=9 zAgZVdZRMW~l(9s*=WgTCB~&~UfojBitHJlY>%Oez2P6W?VQVSs$!V{IhN$hML!*5~ zz`emHL&{-_$qjn@=2|@bDCb<)x|;WcCCHuT@T@IY%c}EJ8?4VRW`kI6HFo)@C~kvp z>cInGr7q3Mg?{9_2e7|5xFjcO| zarf<2mZ6h4afH`$Iy^EXCe>e)>K4S93qQNA3mT6yPYm_-;=oxJ)RFuk&)B_i7UZt9 zvo-v6dQKD@2UVdisNn$l(&+JY#x8@yfrIxw;RE9k_?5r>(r8q%>LKU8DzWF=f_+*V zUs7rd)B3w;49Ur}FW>c~X7L-s#19Ai1)lkuB&Q);Q+m*R;8;}5`@ql^37l)@RG;e_ zsa&YsWra%-k;R}yDOEa@O!jy<2LV>w~={LUzAWlsBB!Ka{Nox2_m&XWk&fp9zknWY>Cu`~rwFtQ9!_NWPNgAG+FYUe$z+o)lMhKu{53i_Gio7vSs9T1;3r8j zRUl7Rp{d_nHOaL5Txmwfi@s%F?R&SCT}(tx-b(KzVJOB{gOjydoR-W=zz`n z=M;Uin^w4hE}+|a=KWIB7T;$mUA12*nM~RE`}OyJzD?{H0GnXk;F(_#&&7Y6U1NUT ztpyvRYuak+F)cTRaxxPDX(wIw*}1GQVkLoTsbeihSRsObxr>?gNGDY_1@{dhJ|-Ds z%OLST`^VLIiN1cjC1n>z!%KSjMC8_7tyN<4-Z%Z@6ws5N_NbgU%2Rpn9W&Z|afJY+8~*c66!ShLEN!(6Uis@)*BxV?zieeG=j1IqWTu ziyaSJD9#~KmwL6~o#m_^{dv;u+uxUD9GedVG(Za0zFlF!a>9q!4fGi^*~FunReLBM zdUFIG`yLgAey+4-=MFU*xdb*<%?&QDJF#Vs%DnpW`G}P z+EHIdtb$-aI4N~|va_T2{;y?nsAM-35b^5te8dmYSXydF5_Wv{yS0|Nz&*s(&#;mP zVHA*;3hEhSq|ar{hsSZE07N5sz|5}4RwLIFrFFQ4Rx4zYjR)~-o`2MZCF zot)i&z_OzS*2%xDtWs+Pjls9ax5>cF+J7M}48vnT^&j_33>=qXfy}SPeb{UwsDDczi+r!6B?w=%rh`Xa*{RyGu|mW!Dl&8oDYz~0P`ukZg^U25u;k5wcV z-ExE*iZU%Deec9{o(80Phqg{#MJ#ISl{_{>R`uoG+3`NB7Cv)L=9?R3quJH)DVCmu z=q{&F0f~mv(v#d@`P}Zjyn^^b?#UI=_izt0Eg`DKI3T?Ht$5Ty_yjdhEtfaKdcNi% z7q2}?n13jgljY|%qPLQrsA`7$%%HWLb*T52oht*Imet!=oZX#>Fy5w-ulbCqA#S9qzw(S!#*H!f% zN>cgi^I3Bfqzk5dg`Nm4N?eT|FarC5=L(la21hbVD1!AmLP~r&?)$#8@+DgeO-z%!h4!8h29CTkkcPSN6Tc+XE(&I2Jc z4lV+brpsjYg(_$4Bvm|0lrVPnuL-L81lmRycPx1d$1nXpMV;M=sS6F98G4FGOA^m) zrbw0?n`<*Z=j>m0US5U31%;To+eq@)fO+qaFMXNU!Vpi@3(LA_^yXepZl(F{9W`$r z{p^=8i-0~xRg6Un@naUQfOI@R)Sk(rXVt*yTY^M%3GR<4gfE z0&{Woe!H$=@_k2q`c>2|my~{chrpEkBsP9Z-YUkBt7E~$FY{K(=paoIObTcaHijGEOEA?H3V%%|&7&J4Aw z`8U#mMru~9Bq#)Iz1=3H3U(H+c9CX9j$q9MLd6h3fbmNOlp z#P}6oa3wi5O1d*Dk=Sqx{9#aog?RgET+_B5b5W73K)1WppNjCvE8VD4Vf?IljruW4Qa%-Mqm?iMN)AM?`PsvlG#kFd!L-1=w z_JHt4Vz|#n}!u= z+8`fyBp}evJhvcd$At@9RL4mGEW7RT%m(YO#USuP(T&%g|A-x|{`f6OmUCt)AI09H z$knY?z2i$DXXNB&%@v^%lWXgB-Q!%|5Yqh=t42|4B1e#5MGuPP%o5#Md0%pGz}pSq zmh9G+egZKSJZLGWh`QO;w0~P_s>Jqn6?HQX5dXVtK{?$38;O*Jf`NdV=+@OK-jH`3 zj6;~K?k{^;g3sdz*vIkBZ#Sm1wD^>sszcS6vJD@biC_8FJ;eU5Yw;66k~2yj@E^aj z!o>35&$ZDbAiL48xGZ>!#xA>o=_qz+=iO^JsXA&Y^&lrJEms&b%T!S2z7PDeAAc!W zdLTEH7v-m}&_y-)+kK(2cM=puvqq<>}|0!ViAiB$yEeJ~p~0n@Mhr{u-8 z8e+v&pzk-sp)o3pxKeGBd@ado(d0YlT^sh4^3a6p@$|KgJLJ3Dwp#;X`GeApWGKk2 zRl;rqOo(Pf{2Iw9f(`X5f=I56c<&udWto=lvgG2E7fy|^%67qR-wsKEyCiNsJppPH zB$ppwPvUv6@C%>7RDbl|_=u<&p$AA|)id~Ve9<|ahS<}h;rA9aRh;8p%p!LV=QV%u zA+3GKHPAT)C9>0M*>@)Fe@KseD64nNcCJt5e}EwH9oRfQAzm!d=Ktqwf*{+N;4 zah8Ri<0axkc=8O%U*iQnMKocEYe;4J-JI1puwV4b{3;2Ii7m>tc?=n6m&=kX+c4EZ z-@L{Mlxgp|=~c;|*6@r#Sl`O5*-6s%T)loAold{>=hfc|rKQ7kjmo&jax5S%y^&5y zw`*Or4Uc+(H3jU!iEcC4q6_*de@=+yE4wMLPg1rej@)+YeTly#!Bm^UX|NR#?a!m1 znKZCgjU_G5+btBfzP_s+H_C%N8ldGBqaHs7)U-cy2dTE87^sSgDKeL?EN$cuZCmF2 z(EIjo6Y17+6-@Vv2p50i$z3C_*4rG=iB#E4`Xjj%A|f0=r(qNIl=NDF!S96MHPIO1 zR@$|N44+-4iGedn`)F8FPP1%88 z#YgX|Gxr6m<}Y1Ey*hfNFpR8w#mnUhAMct25?}-vXlnQJ1Eq9hGbj5~JGU?}+vk)! zLTu6B!Bs`-A`q6wLNBHL*&$H1Tl^F@1u^}jV8Org z$*8R1dweC&nVeN^u--P%El64IzKsIkd@q`$@iRCA76FpD8a{YgOD8B625AUmP z*TRpzI(}J(3^;-+Nh^Yk~thzFGNw}j0mklDwk!q||0D%UW zEdD<euV3r|&o^jiH`3&x4SEM>J8)MnwS*L)9MP1?4EdST!mU2C^K;L~8-9&< z+dh8(lm{n3-4F_lM{4g-0_tRuhwTQUe|P9V&YK-oRqe?w1vH)5jY zCUt?6(qpW6X51~des`UyO^Kc*^W}c#nK4Q6_q$zZBKX#fMX(=OM?hkQzdq%2-AG_^ zMp5LtiTA-h4dI^2z>&>thj5%wqr)XJ?9-$-w~IsC;OChS*@4!ff7&ob(lRDro+dXg z_WWEJDu1!l01bKid9{^8V|u##NCqORpY{(0z9=O~63X>za(Dy$N4`t-caf;jXyp+d zjr-F)TxS#-r@n)jHa6Bog=A{!j>I{J1e_!*kFEGq7UZWTv>w^?RWZ%5e65x$Ys;9N zGTV)f%!A}wW}_MhM2PRIirkj|CMzMA!nItUq`-p6tPKykQACM2G`tt`(R-$bl0Rt> zNi$$aL$hmJS+{R%|Bo&b zD$|o&RjrW!V6~MLXbO$B3{1N1n>?0#)&CI8XrWsJC&+OIT c(9|!RZkHLQF!kHV%8H7dBoxMDJJ;v`0F(!>H~;_u literal 0 HcmV?d00001 diff --git a/newt/colormaps/vik/vik.ncmap b/newt/colormaps/vik/vik.ncmap new file mode 100644 index 0000000..ec74f86 --- /dev/null +++ b/newt/colormaps/vik/vik.ncmap @@ -0,0 +1,256 @@ +0 18 97 +1 20 98 +1 21 99 +1 23 100 +1 24 101 +1 26 102 +2 28 103 +2 29 104 +2 31 105 +2 32 106 +2 34 107 +2 35 108 +2 37 109 +2 39 110 +2 40 111 +2 42 112 +2 43 113 +2 45 114 +2 46 115 +2 48 116 +2 49 117 +2 51 118 +2 52 119 +2 54 120 +2 55 121 +2 57 122 +2 58 123 +3 60 124 +3 62 125 +3 63 126 +3 65 127 +3 66 128 +3 68 129 +3 69 130 +3 71 131 +3 73 132 +3 74 133 +4 76 134 +4 77 135 +4 79 136 +5 81 137 +5 82 138 +6 84 139 +6 86 140 +7 87 141 +8 89 143 +9 91 144 +11 93 145 +12 94 146 +14 96 147 +16 98 148 +17 100 150 +19 102 151 +21 103 152 +23 105 153 +25 107 154 +28 109 156 +30 111 157 +32 113 158 +35 115 160 +37 117 161 +40 119 162 +43 121 164 +45 123 165 +48 125 166 +51 127 168 +54 129 169 +57 131 171 +60 133 172 +63 135 173 +66 137 175 +69 139 176 +72 141 178 +75 144 179 +78 146 180 +81 148 182 +84 150 183 +87 152 185 +90 154 186 +93 156 187 +97 158 189 +100 160 190 +103 162 192 +106 164 193 +109 166 194 +113 168 196 +116 170 197 +119 172 198 +122 174 200 +125 176 201 +128 178 202 +132 180 204 +135 182 205 +138 184 206 +141 186 208 +144 188 209 +148 190 210 +151 192 212 +154 194 213 +157 196 214 +160 197 216 +163 199 217 +167 201 218 +170 203 220 +173 205 221 +176 207 222 +179 209 223 +182 211 225 +186 213 226 +189 214 227 +192 216 228 +195 218 229 +198 219 230 +201 221 231 +204 223 232 +207 224 232 +210 225 233 +213 227 233 +216 228 233 +219 229 233 +222 230 233 +224 230 233 +226 231 232 +229 231 232 +231 231 231 +232 231 229 +234 230 228 +235 230 226 +236 229 224 +237 228 222 +238 227 220 +238 225 218 +238 224 216 +238 222 213 +238 221 211 +238 219 208 +238 217 205 +237 215 203 +237 213 200 +236 211 197 +236 209 195 +235 208 192 +234 206 189 +233 204 186 +233 202 184 +232 200 181 +231 198 178 +230 196 176 +229 193 173 +228 191 170 +228 190 168 +227 188 165 +226 186 162 +225 184 160 +224 182 157 +223 180 154 +223 178 152 +222 176 149 +221 174 147 +220 172 144 +219 170 141 +219 168 139 +218 166 136 +217 164 134 +216 162 131 +215 160 129 +214 159 126 +214 157 124 +213 155 121 +212 153 119 +211 151 116 +211 149 114 +210 148 112 +209 146 109 +208 144 107 +207 142 104 +207 140 102 +206 139 100 +205 137 97 +204 135 95 +204 133 93 +203 131 90 +202 130 88 +201 128 86 +201 126 83 +200 124 81 +199 123 79 +198 121 76 +198 119 74 +197 117 72 +196 116 69 +195 114 67 +194 112 65 +194 110 63 +193 109 60 +192 107 58 +191 105 56 +190 103 54 +190 101 51 +189 100 49 +188 98 47 +187 96 45 +186 94 42 +184 92 40 +183 90 38 +182 88 36 +181 85 33 +179 83 31 +178 81 29 +176 79 27 +175 76 24 +173 74 22 +171 72 20 +169 69 18 +167 67 16 +165 64 15 +163 62 13 +161 60 11 +159 57 10 +156 55 9 +154 53 8 +152 51 7 +150 49 7 +148 47 6 +145 45 6 +143 43 6 +141 41 6 +139 39 6 +137 38 6 +135 36 6 +133 34 6 +131 33 6 +129 31 6 +127 30 6 +126 29 6 +124 27 6 +122 26 6 +120 24 6 +118 23 6 +116 21 6 +115 20 6 +113 19 7 +111 17 7 +109 16 7 +108 14 7 +106 13 7 +104 12 7 +103 10 7 +101 9 7 +99 7 7 +98 6 7 +96 4 8 +94 3 8 +93 2 8 +91 1 8 +89 0 8 diff --git a/newt/colormaps/vik/vik.pal b/newt/colormaps/vik/vik.pal new file mode 100644 index 0000000..6d52959 --- /dev/null +++ b/newt/colormaps/vik/vik.pal @@ -0,0 +1,257 @@ +set palette defined (\ +0 0.001328 0.069836 0.379529, \ +1 0.002366 0.076475 0.383518, \ +2 0.003304 0.083083 0.387487, \ +3 0.004146 0.089590 0.391477, \ +4 0.004897 0.095948 0.395453, \ +5 0.005563 0.102274 0.399409, \ +6 0.006151 0.108500 0.403388, \ +7 0.006668 0.114686 0.407339, \ +8 0.007119 0.120845 0.411288, \ +9 0.007512 0.126958 0.415230, \ +10 0.007850 0.133068 0.419166, \ +11 0.008141 0.139092 0.423079, \ +12 0.008391 0.145171 0.427006, \ +13 0.008606 0.151144 0.430910, \ +14 0.008790 0.157140 0.434809, \ +15 0.008947 0.163152 0.438691, \ +16 0.009080 0.169142 0.442587, \ +17 0.009193 0.175103 0.446459, \ +18 0.009290 0.181052 0.450337, \ +19 0.009372 0.187051 0.454212, \ +20 0.009443 0.193028 0.458077, \ +21 0.009506 0.198999 0.461951, \ +22 0.009564 0.205011 0.465816, \ +23 0.009619 0.211021 0.469707, \ +24 0.009675 0.217047 0.473571, \ +25 0.009735 0.223084 0.477461, \ +26 0.009802 0.229123 0.481352, \ +27 0.009881 0.235206 0.485250, \ +28 0.009977 0.241277 0.489161, \ +29 0.010098 0.247386 0.493080, \ +30 0.010254 0.253516 0.497020, \ +31 0.010463 0.259675 0.500974, \ +32 0.010755 0.265853 0.504938, \ +33 0.011176 0.272037 0.508925, \ +34 0.011716 0.278296 0.512923, \ +35 0.012286 0.284554 0.516953, \ +36 0.012934 0.290865 0.520998, \ +37 0.013790 0.297214 0.525074, \ +38 0.014838 0.303577 0.529184, \ +39 0.016131 0.310015 0.533308, \ +40 0.017711 0.316474 0.537485, \ +41 0.019630 0.322986 0.541677, \ +42 0.021948 0.329550 0.545931, \ +43 0.024730 0.336144 0.550210, \ +44 0.028047 0.342826 0.554538, \ +45 0.031980 0.349543 0.558906, \ +46 0.036812 0.356332 0.563341, \ +47 0.042229 0.363171 0.567811, \ +48 0.048008 0.370086 0.572345, \ +49 0.054292 0.377080 0.576933, \ +50 0.060963 0.384129 0.581571, \ +51 0.068081 0.391265 0.586280, \ +52 0.075457 0.398460 0.591042, \ +53 0.083246 0.405740 0.595868, \ +54 0.091425 0.413088 0.600754, \ +55 0.099832 0.420499 0.605697, \ +56 0.108595 0.428000 0.610711, \ +57 0.117694 0.435566 0.615770, \ +58 0.127042 0.443194 0.620895, \ +59 0.136702 0.450888 0.626062, \ +60 0.146607 0.458643 0.631289, \ +61 0.156787 0.466457 0.636560, \ +62 0.167187 0.474324 0.641866, \ +63 0.177807 0.482238 0.647218, \ +64 0.188606 0.490191 0.652599, \ +65 0.199580 0.498193 0.658021, \ +66 0.210783 0.506201 0.663465, \ +67 0.222120 0.514263 0.668924, \ +68 0.233602 0.522322 0.674403, \ +69 0.245231 0.530414 0.679894, \ +70 0.256999 0.538517 0.685405, \ +71 0.268867 0.546617 0.690908, \ +72 0.280797 0.554717 0.696428, \ +73 0.292852 0.562822 0.701935, \ +74 0.304985 0.570907 0.707448, \ +75 0.317174 0.578997 0.712950, \ +76 0.329438 0.587064 0.718447, \ +77 0.341729 0.595123 0.723934, \ +78 0.354067 0.603164 0.729412, \ +79 0.366459 0.611186 0.734877, \ +80 0.378862 0.619189 0.740325, \ +81 0.391305 0.627159 0.745757, \ +82 0.403760 0.635114 0.751183, \ +83 0.416227 0.643046 0.756582, \ +84 0.428711 0.650956 0.761968, \ +85 0.441199 0.658836 0.767341, \ +86 0.453697 0.666696 0.772699, \ +87 0.466195 0.674537 0.778044, \ +88 0.478697 0.682349 0.783369, \ +89 0.491208 0.690143 0.788682, \ +90 0.503691 0.697910 0.793980, \ +91 0.516178 0.705661 0.799260, \ +92 0.528677 0.713387 0.804525, \ +93 0.541149 0.721090 0.809775, \ +94 0.553624 0.728778 0.815010, \ +95 0.566096 0.736441 0.820229, \ +96 0.578557 0.744089 0.825435, \ +97 0.591014 0.751718 0.830626, \ +98 0.603468 0.759314 0.835793, \ +99 0.615908 0.766896 0.840941, \ +100 0.628351 0.774452 0.846058, \ +101 0.640779 0.781988 0.851147, \ +102 0.653203 0.789485 0.856206, \ +103 0.665631 0.796945 0.861214, \ +104 0.678051 0.804371 0.866172, \ +105 0.690457 0.811742 0.871059, \ +106 0.702868 0.819048 0.875866, \ +107 0.715265 0.826290 0.880567, \ +108 0.727646 0.833439 0.885146, \ +109 0.740019 0.840479 0.889570, \ +110 0.752354 0.847380 0.893807, \ +111 0.764662 0.854125 0.897821, \ +112 0.776918 0.860678 0.901565, \ +113 0.789096 0.866991 0.904992, \ +114 0.801170 0.873031 0.908043, \ +115 0.813110 0.878738 0.910653, \ +116 0.824870 0.884062 0.912761, \ +117 0.836396 0.888934 0.914302, \ +118 0.847617 0.893289 0.915195, \ +119 0.858470 0.897074 0.915385, \ +120 0.868874 0.900206 0.914812, \ +121 0.878729 0.902636 0.913418, \ +122 0.887965 0.904303 0.911164, \ +123 0.896497 0.905178 0.908034, \ +124 0.904242 0.905221 0.904013, \ +125 0.911151 0.904422 0.899132, \ +126 0.917175 0.902800 0.893409, \ +127 0.922285 0.900367 0.886911, \ +128 0.926482 0.897173 0.879687, \ +129 0.929789 0.893256 0.871826, \ +130 0.932236 0.888698 0.863396, \ +131 0.933880 0.883552 0.854476, \ +132 0.934782 0.877893 0.845152, \ +133 0.935013 0.871795 0.835493, \ +134 0.934644 0.865313 0.825561, \ +135 0.933752 0.858522 0.815421, \ +136 0.932408 0.851469 0.805112, \ +137 0.930682 0.844208 0.794685, \ +138 0.928622 0.836778 0.784169, \ +139 0.926298 0.829215 0.773579, \ +140 0.923752 0.821545 0.762958, \ +141 0.921017 0.813795 0.752313, \ +142 0.918147 0.805997 0.741659, \ +143 0.915156 0.798157 0.731008, \ +144 0.912080 0.790294 0.720370, \ +145 0.908933 0.782421 0.709752, \ +146 0.905741 0.774540 0.699150, \ +147 0.902506 0.766670 0.688588, \ +148 0.899249 0.758812 0.678051, \ +149 0.895973 0.750973 0.667550, \ +150 0.892690 0.743148 0.657086, \ +151 0.889402 0.735345 0.646657, \ +152 0.886118 0.727569 0.636274, \ +153 0.882831 0.719826 0.625923, \ +154 0.879556 0.712106 0.615618, \ +155 0.876289 0.704419 0.605357, \ +156 0.873033 0.696764 0.595141, \ +157 0.869784 0.689144 0.584972, \ +158 0.866551 0.681541 0.574832, \ +159 0.863333 0.673985 0.564746, \ +160 0.860121 0.666453 0.554708, \ +161 0.856920 0.658957 0.544709, \ +162 0.853732 0.651500 0.534753, \ +163 0.850562 0.644061 0.524842, \ +164 0.847402 0.636670 0.514974, \ +165 0.844258 0.629296 0.505146, \ +166 0.841125 0.621957 0.495369, \ +167 0.838005 0.614653 0.485627, \ +168 0.834895 0.607392 0.475941, \ +169 0.831802 0.600144 0.466284, \ +170 0.828715 0.592938 0.456675, \ +171 0.825639 0.585758 0.447109, \ +172 0.822582 0.578600 0.437595, \ +173 0.819528 0.571478 0.428106, \ +174 0.816496 0.564388 0.418657, \ +175 0.813463 0.557328 0.409260, \ +176 0.810446 0.550285 0.399892, \ +177 0.807443 0.543274 0.390575, \ +178 0.804446 0.536288 0.381299, \ +179 0.801454 0.529329 0.372040, \ +180 0.798475 0.522380 0.362835, \ +181 0.795500 0.515460 0.353660, \ +182 0.792535 0.508575 0.344523, \ +183 0.789573 0.501692 0.335435, \ +184 0.786617 0.494827 0.326343, \ +185 0.783657 0.487977 0.317312, \ +186 0.780695 0.481123 0.308300, \ +187 0.777737 0.474295 0.299327, \ +188 0.774763 0.467464 0.290352, \ +189 0.771788 0.460620 0.281424, \ +190 0.768787 0.453783 0.272508, \ +191 0.765776 0.446929 0.263640, \ +192 0.762724 0.440055 0.254764, \ +193 0.759638 0.433147 0.245872, \ +194 0.756510 0.426200 0.237047, \ +195 0.753316 0.419216 0.228190, \ +196 0.750051 0.412163 0.219330, \ +197 0.746698 0.405028 0.210470, \ +198 0.743239 0.397819 0.201593, \ +199 0.739651 0.390493 0.192739, \ +200 0.735899 0.383060 0.183852, \ +201 0.731988 0.375473 0.174977, \ +202 0.727865 0.367743 0.166045, \ +203 0.723516 0.359852 0.157131, \ +204 0.718915 0.351766 0.148211, \ +205 0.714028 0.343503 0.139282, \ +206 0.708841 0.335048 0.130458, \ +207 0.703318 0.326354 0.121545, \ +208 0.697448 0.317502 0.112841, \ +209 0.691227 0.308462 0.104132, \ +210 0.684653 0.299264 0.095633, \ +211 0.677734 0.289916 0.087350, \ +212 0.670476 0.280477 0.079197, \ +213 0.662904 0.271015 0.071510, \ +214 0.655048 0.261520 0.064079, \ +215 0.646969 0.252081 0.057104, \ +216 0.638686 0.242711 0.050618, \ +217 0.630261 0.233488 0.044750, \ +218 0.621722 0.224449 0.039414, \ +219 0.613135 0.215657 0.034829, \ +220 0.604539 0.207086 0.031072, \ +221 0.595947 0.198741 0.028212, \ +222 0.587403 0.190700 0.026019, \ +223 0.578937 0.182918 0.024396, \ +224 0.570545 0.175423 0.023257, \ +225 0.562268 0.168171 0.022523, \ +226 0.554076 0.161202 0.022110, \ +227 0.546007 0.154400 0.021861, \ +228 0.538043 0.147854 0.021737, \ +229 0.530182 0.141491 0.021722, \ +230 0.522424 0.135276 0.021800, \ +231 0.514776 0.129209 0.021957, \ +232 0.507213 0.123272 0.022179, \ +233 0.499733 0.117487 0.022455, \ +234 0.492348 0.111818 0.022775, \ +235 0.485034 0.106209 0.023130, \ +236 0.477801 0.100607 0.023513, \ +237 0.470639 0.095156 0.023916, \ +238 0.463530 0.089668 0.024336, \ +239 0.456494 0.084258 0.024766, \ +240 0.449521 0.078741 0.025203, \ +241 0.442603 0.073404 0.025644, \ +242 0.435737 0.067904 0.026084, \ +243 0.428918 0.062415 0.026522, \ +244 0.422146 0.056832 0.026954, \ +245 0.415437 0.051116 0.027378, \ +246 0.408768 0.045352 0.027790, \ +247 0.402132 0.039448 0.028189, \ +248 0.395562 0.033385 0.028570, \ +249 0.389015 0.027844 0.028932, \ +250 0.382496 0.022586 0.029271, \ +251 0.376028 0.017608 0.029583, \ +252 0.369578 0.012890 0.029866, \ +253 0.363161 0.008243 0.030115, \ +254 0.356785 0.004035 0.030327, \ +255 0.350423 0.000061 0.030499) diff --git a/newt/colormaps/vik/vik.py b/newt/colormaps/vik/vik.py new file mode 100644 index 0000000..0fcc85e --- /dev/null +++ b/newt/colormaps/vik/vik.py @@ -0,0 +1,278 @@ +# +# vik +# www.fabiocrameri.ch/colourmaps +from matplotlib.colors import LinearSegmentedColormap + +cm_data = [[0.0013282, 0.069836, 0.37953], + [0.0023664, 0.076475, 0.38352], + [0.0033042, 0.083083, 0.38749], + [0.0041459, 0.08959, 0.39148], + [0.0048968, 0.095948, 0.39545], + [0.0055632, 0.10227, 0.39941], + [0.0061512, 0.1085, 0.40339], + [0.0066676, 0.11469, 0.40734], + [0.0071192, 0.12085, 0.41129], + [0.0075116, 0.12696, 0.41523], + [0.0078503, 0.13307, 0.41917], + [0.0081413, 0.13909, 0.42308], + [0.0083912, 0.14517, 0.42701], + [0.0086057, 0.15114, 0.43091], + [0.0087895, 0.15714, 0.43481], + [0.0089466, 0.16315, 0.43869], + [0.0090802, 0.16914, 0.44259], + [0.0091935, 0.1751, 0.44646], + [0.0092897, 0.18105, 0.45034], + [0.0093719, 0.18705, 0.45421], + [0.0094431, 0.19303, 0.45808], + [0.0095061, 0.199, 0.46195], + [0.0095639, 0.20501, 0.46582], + [0.0096192, 0.21102, 0.46971], + [0.0096751, 0.21705, 0.47357], + [0.0097347, 0.22308, 0.47746], + [0.0098019, 0.22912, 0.48135], + [0.0098809, 0.23521, 0.48525], + [0.0099771, 0.24128, 0.48916], + [0.010098, 0.24739, 0.49308], + [0.010254, 0.25352, 0.49702], + [0.010463, 0.25968, 0.50097], + [0.010755, 0.26585, 0.50494], + [0.011176, 0.27204, 0.50893], + [0.011716, 0.2783, 0.51292], + [0.012286, 0.28455, 0.51695], + [0.012934, 0.29087, 0.521], + [0.01379, 0.29721, 0.52507], + [0.014838, 0.30358, 0.52918], + [0.016131, 0.31002, 0.53331], + [0.017711, 0.31647, 0.53748], + [0.01963, 0.32299, 0.54168], + [0.021948, 0.32955, 0.54593], + [0.02473, 0.33614, 0.55021], + [0.028047, 0.34283, 0.55454], + [0.03198, 0.34954, 0.55891], + [0.036812, 0.35633, 0.56334], + [0.042229, 0.36317, 0.56781], + [0.048008, 0.37009, 0.57235], + [0.054292, 0.37708, 0.57693], + [0.060963, 0.38413, 0.58157], + [0.068081, 0.39126, 0.58628], + [0.075457, 0.39846, 0.59104], + [0.083246, 0.40574, 0.59587], + [0.091425, 0.41309, 0.60075], + [0.099832, 0.4205, 0.6057], + [0.10859, 0.428, 0.61071], + [0.11769, 0.43557, 0.61577], + [0.12704, 0.44319, 0.6209], + [0.1367, 0.45089, 0.62606], + [0.14661, 0.45864, 0.63129], + [0.15679, 0.46646, 0.63656], + [0.16719, 0.47432, 0.64187], + [0.17781, 0.48224, 0.64722], + [0.18861, 0.49019, 0.6526], + [0.19958, 0.49819, 0.65802], + [0.21078, 0.5062, 0.66346], + [0.22212, 0.51426, 0.66892], + [0.2336, 0.52232, 0.6744], + [0.24523, 0.53041, 0.67989], + [0.257, 0.53852, 0.68541], + [0.26887, 0.54662, 0.69091], + [0.2808, 0.55472, 0.69643], + [0.29285, 0.56282, 0.70193], + [0.30498, 0.57091, 0.70745], + [0.31717, 0.579, 0.71295], + [0.32944, 0.58706, 0.71845], + [0.34173, 0.59512, 0.72393], + [0.35407, 0.60316, 0.72941], + [0.36646, 0.61119, 0.73488], + [0.37886, 0.61919, 0.74032], + [0.39131, 0.62716, 0.74576], + [0.40376, 0.63511, 0.75118], + [0.41623, 0.64305, 0.75658], + [0.42871, 0.65096, 0.76197], + [0.4412, 0.65884, 0.76734], + [0.4537, 0.6667, 0.7727], + [0.4662, 0.67454, 0.77804], + [0.4787, 0.68235, 0.78337], + [0.49121, 0.69014, 0.78868], + [0.50369, 0.69791, 0.79398], + [0.51618, 0.70566, 0.79926], + [0.52868, 0.71339, 0.80452], + [0.54115, 0.72109, 0.80977], + [0.55362, 0.72878, 0.81501], + [0.5661, 0.73644, 0.82023], + [0.57856, 0.74409, 0.82544], + [0.59101, 0.75172, 0.83063], + [0.60347, 0.75931, 0.83579], + [0.61591, 0.7669, 0.84094], + [0.62835, 0.77445, 0.84606], + [0.64078, 0.78199, 0.85115], + [0.6532, 0.78949, 0.85621], + [0.66563, 0.79694, 0.86121], + [0.67805, 0.80437, 0.86617], + [0.69046, 0.81174, 0.87106], + [0.70287, 0.81905, 0.87587], + [0.71526, 0.82629, 0.88057], + [0.72765, 0.83344, 0.88515], + [0.74002, 0.84048, 0.88957], + [0.75235, 0.84738, 0.89381], + [0.76466, 0.85413, 0.89782], + [0.77692, 0.86068, 0.90156], + [0.7891, 0.86699, 0.90499], + [0.80117, 0.87303, 0.90804], + [0.81311, 0.87874, 0.91065], + [0.82487, 0.88406, 0.91276], + [0.8364, 0.88893, 0.9143], + [0.84762, 0.89329, 0.9152], + [0.85847, 0.89707, 0.91539], + [0.86887, 0.90021, 0.91481], + [0.87873, 0.90264, 0.91342], + [0.88797, 0.9043, 0.91116], + [0.8965, 0.90518, 0.90803], + [0.90424, 0.90522, 0.90401], + [0.91115, 0.90442, 0.89913], + [0.91718, 0.9028, 0.89341], + [0.92229, 0.90037, 0.88691], + [0.92648, 0.89717, 0.87969], + [0.92979, 0.89326, 0.87183], + [0.93224, 0.8887, 0.8634], + [0.93388, 0.88355, 0.85448], + [0.93478, 0.87789, 0.84515], + [0.93501, 0.8718, 0.83549], + [0.93464, 0.86531, 0.82556], + [0.93375, 0.85852, 0.81542], + [0.93241, 0.85147, 0.80511], + [0.93068, 0.84421, 0.79468], + [0.92862, 0.83678, 0.78417], + [0.9263, 0.82921, 0.77358], + [0.92375, 0.82154, 0.76296], + [0.92102, 0.8138, 0.75231], + [0.91815, 0.806, 0.74166], + [0.91516, 0.79816, 0.73101], + [0.91208, 0.79029, 0.72037], + [0.90893, 0.78242, 0.70975], + [0.90574, 0.77454, 0.69915], + [0.90251, 0.76667, 0.68859], + [0.89925, 0.75881, 0.67805], + [0.89597, 0.75097, 0.66755], + [0.89269, 0.74315, 0.65709], + [0.8894, 0.73535, 0.64666], + [0.88612, 0.72757, 0.63627], + [0.88283, 0.71983, 0.62592], + [0.87956, 0.71211, 0.61562], + [0.87629, 0.70442, 0.60536], + [0.87303, 0.69676, 0.59514], + [0.86978, 0.68914, 0.58497], + [0.86655, 0.68154, 0.57483], + [0.86333, 0.67398, 0.56475], + [0.86012, 0.66645, 0.55471], + [0.85692, 0.65896, 0.54471], + [0.85373, 0.6515, 0.53475], + [0.85056, 0.64406, 0.52484], + [0.8474, 0.63667, 0.51497], + [0.84426, 0.6293, 0.50515], + [0.84113, 0.62196, 0.49537], + [0.838, 0.61465, 0.48563], + [0.83489, 0.60739, 0.47594], + [0.8318, 0.60014, 0.46628], + [0.82872, 0.59294, 0.45668], + [0.82564, 0.58576, 0.44711], + [0.82258, 0.5786, 0.43759], + [0.81953, 0.57148, 0.42811], + [0.8165, 0.56439, 0.41866], + [0.81346, 0.55733, 0.40926], + [0.81045, 0.55028, 0.39989], + [0.80744, 0.54327, 0.39058], + [0.80445, 0.53629, 0.3813], + [0.80145, 0.52933, 0.37204], + [0.79847, 0.52238, 0.36284], + [0.7955, 0.51546, 0.35366], + [0.79253, 0.50857, 0.34452], + [0.78957, 0.50169, 0.33544], + [0.78662, 0.49483, 0.32634], + [0.78366, 0.48798, 0.31731], + [0.7807, 0.48112, 0.3083], + [0.77774, 0.4743, 0.29933], + [0.77476, 0.46746, 0.29035], + [0.77179, 0.46062, 0.28142], + [0.76879, 0.45378, 0.27251], + [0.76578, 0.44693, 0.26364], + [0.76272, 0.44005, 0.25476], + [0.75964, 0.43315, 0.24587], + [0.75651, 0.4262, 0.23705], + [0.75332, 0.41922, 0.22819], + [0.75005, 0.41216, 0.21933], + [0.7467, 0.40503, 0.21047], + [0.74324, 0.39782, 0.20159], + [0.73965, 0.39049, 0.19274], + [0.7359, 0.38306, 0.18385], + [0.73199, 0.37547, 0.17498], + [0.72786, 0.36774, 0.16605], + [0.72352, 0.35985, 0.15713], + [0.71891, 0.35177, 0.14821], + [0.71403, 0.3435, 0.13928], + [0.70884, 0.33505, 0.13046], + [0.70332, 0.32635, 0.12154], + [0.69745, 0.3175, 0.11284], + [0.69123, 0.30846, 0.10413], + [0.68465, 0.29926, 0.095633], + [0.67773, 0.28992, 0.08735], + [0.67048, 0.28048, 0.079197], + [0.6629, 0.27102, 0.07151], + [0.65505, 0.26152, 0.064079], + [0.64697, 0.25208, 0.057104], + [0.63869, 0.24271, 0.050618], + [0.63026, 0.23349, 0.04475], + [0.62172, 0.22445, 0.039414], + [0.61313, 0.21566, 0.034829], + [0.60454, 0.20709, 0.031072], + [0.59595, 0.19874, 0.028212], + [0.5874, 0.1907, 0.026019], + [0.57894, 0.18292, 0.024396], + [0.57054, 0.17542, 0.023257], + [0.56227, 0.16817, 0.022523], + [0.55408, 0.1612, 0.02211], + [0.54601, 0.1544, 0.021861], + [0.53804, 0.14785, 0.021737], + [0.53018, 0.14149, 0.021722], + [0.52242, 0.13528, 0.0218], + [0.51478, 0.12921, 0.021957], + [0.50721, 0.12327, 0.022179], + [0.49973, 0.11749, 0.022455], + [0.49235, 0.11182, 0.022775], + [0.48503, 0.10621, 0.02313], + [0.4778, 0.10061, 0.023513], + [0.47064, 0.095156, 0.023916], + [0.46353, 0.089668, 0.024336], + [0.45649, 0.084258, 0.024766], + [0.44952, 0.078741, 0.025203], + [0.4426, 0.073404, 0.025644], + [0.43574, 0.067904, 0.026084], + [0.42892, 0.062415, 0.026522], + [0.42215, 0.056832, 0.026954], + [0.41544, 0.051116, 0.027378], + [0.40877, 0.045352, 0.02779], + [0.40213, 0.039448, 0.028189], + [0.39556, 0.033385, 0.02857], + [0.38902, 0.027844, 0.028932], + [0.3825, 0.022586, 0.029271], + [0.37603, 0.017608, 0.029583], + [0.36958, 0.01289, 0.029866], + [0.36316, 0.0082428, 0.030115], + [0.35679, 0.0040345, 0.030327], + [0.35042, 6.1141e-05, 0.030499]] + +vik_map = LinearSegmentedColormap.from_list('vik', cm_data) +# For use of "viscm view" +test_cm = vik_map + +if __name__ == "__main__": + import matplotlib.pyplot as plt + import numpy as np + + try: + from viscm import viscm + viscm(vik_map) + except ImportError: + print("viscm not found, falling back on simple display") + plt.imshow(np.linspace(0, 100, 256)[None, :], aspect='auto', + cmap=vik_map) + plt.show() diff --git a/newt/colormaps/vik/vik.spk b/newt/colormaps/vik/vik.spk new file mode 100644 index 0000000..8f02bff --- /dev/null +++ b/newt/colormaps/vik/vik.spk @@ -0,0 +1,256 @@ + 0.00 0.13 6.98 37.95 + 0.39 0.24 7.65 38.35 + 0.78 0.33 8.31 38.75 + 1.18 0.41 8.96 39.15 + 1.57 0.49 9.59 39.55 + 1.96 0.56 10.23 39.94 + 2.35 0.62 10.85 40.34 + 2.75 0.67 11.47 40.73 + 3.14 0.71 12.08 41.13 + 3.53 0.75 12.70 41.52 + 3.92 0.79 13.31 41.92 + 4.31 0.81 13.91 42.31 + 4.71 0.84 14.52 42.70 + 5.10 0.86 15.11 43.09 + 5.49 0.88 15.71 43.48 + 5.88 0.89 16.32 43.87 + 6.27 0.91 16.91 44.26 + 6.67 0.92 17.51 44.65 + 7.06 0.93 18.11 45.03 + 7.45 0.94 18.71 45.42 + 7.84 0.94 19.30 45.81 + 8.24 0.95 19.90 46.20 + 8.63 0.96 20.50 46.58 + 9.02 0.96 21.10 46.97 + 9.41 0.97 21.70 47.36 + 9.80 0.97 22.31 47.75 + 10.20 0.98 22.91 48.14 + 10.59 0.99 23.52 48.52 + 10.98 1.00 24.13 48.92 + 11.37 1.01 24.74 49.31 + 11.76 1.03 25.35 49.70 + 12.16 1.05 25.97 50.10 + 12.55 1.08 26.59 50.49 + 12.94 1.12 27.20 50.89 + 13.33 1.17 27.83 51.29 + 13.73 1.23 28.46 51.70 + 14.12 1.29 29.09 52.10 + 14.51 1.38 29.72 52.51 + 14.90 1.48 30.36 52.92 + 15.29 1.61 31.00 53.33 + 15.69 1.77 31.65 53.75 + 16.08 1.96 32.30 54.17 + 16.47 2.19 32.96 54.59 + 16.86 2.47 33.61 55.02 + 17.25 2.80 34.28 55.45 + 17.65 3.20 34.95 55.89 + 18.04 3.68 35.63 56.33 + 18.43 4.22 36.32 56.78 + 18.82 4.80 37.01 57.23 + 19.22 5.43 37.71 57.69 + 19.61 6.10 38.41 58.16 + 20.00 6.81 39.13 58.63 + 20.39 7.55 39.85 59.10 + 20.78 8.32 40.57 59.59 + 21.18 9.14 41.31 60.08 + 21.57 9.98 42.05 60.57 + 21.96 10.86 42.80 61.07 + 22.35 11.77 43.56 61.58 + 22.75 12.70 44.32 62.09 + 23.14 13.67 45.09 62.61 + 23.53 14.66 45.86 63.13 + 23.92 15.68 46.65 63.66 + 24.31 16.72 47.43 64.19 + 24.71 17.78 48.22 64.72 + 25.10 18.86 49.02 65.26 + 25.49 19.96 49.82 65.80 + 25.88 21.08 50.62 66.35 + 26.27 22.21 51.43 66.89 + 26.67 23.36 52.23 67.44 + 27.06 24.52 53.04 67.99 + 27.45 25.70 53.85 68.54 + 27.84 26.89 54.66 69.09 + 28.24 28.08 55.47 69.64 + 28.63 29.29 56.28 70.19 + 29.02 30.50 57.09 70.74 + 29.41 31.72 57.90 71.29 + 29.80 32.94 58.71 71.84 + 30.20 34.17 59.51 72.39 + 30.59 35.41 60.32 72.94 + 30.98 36.65 61.12 73.49 + 31.37 37.89 61.92 74.03 + 31.76 39.13 62.72 74.58 + 32.16 40.38 63.51 75.12 + 32.55 41.62 64.30 75.66 + 32.94 42.87 65.10 76.20 + 33.33 44.12 65.88 76.73 + 33.73 45.37 66.67 77.27 + 34.12 46.62 67.45 77.80 + 34.51 47.87 68.23 78.34 + 34.90 49.12 69.01 78.87 + 35.29 50.37 69.79 79.40 + 35.69 51.62 70.57 79.93 + 36.08 52.87 71.34 80.45 + 36.47 54.11 72.11 80.98 + 36.86 55.36 72.88 81.50 + 37.25 56.61 73.64 82.02 + 37.65 57.86 74.41 82.54 + 38.04 59.10 75.17 83.06 + 38.43 60.35 75.93 83.58 + 38.82 61.59 76.69 84.09 + 39.22 62.84 77.45 84.61 + 39.61 64.08 78.20 85.11 + 40.00 65.32 78.95 85.62 + 40.39 66.56 79.69 86.12 + 40.78 67.81 80.44 86.62 + 41.18 69.05 81.17 87.11 + 41.57 70.29 81.90 87.59 + 41.96 71.53 82.63 88.06 + 42.35 72.76 83.34 88.51 + 42.75 74.00 84.05 88.96 + 43.14 75.24 84.74 89.38 + 43.53 76.47 85.41 89.78 + 43.92 77.69 86.07 90.16 + 44.31 78.91 86.70 90.50 + 44.71 80.12 87.30 90.80 + 45.10 81.31 87.87 91.07 + 45.49 82.49 88.41 91.28 + 45.88 83.64 88.89 91.43 + 46.27 84.76 89.33 91.52 + 46.67 85.85 89.71 91.54 + 47.06 86.89 90.02 91.48 + 47.45 87.87 90.26 91.34 + 47.84 88.80 90.43 91.12 + 48.24 89.65 90.52 90.80 + 48.63 90.42 90.52 90.40 + 49.02 91.12 90.44 89.91 + 49.41 91.72 90.28 89.34 + 49.80 92.23 90.04 88.69 + 50.20 92.65 89.72 87.97 + 50.59 92.98 89.33 87.18 + 50.98 93.22 88.87 86.34 + 51.37 93.39 88.36 85.45 + 51.76 93.48 87.79 84.52 + 52.16 93.50 87.18 83.55 + 52.55 93.46 86.53 82.56 + 52.94 93.38 85.85 81.54 + 53.33 93.24 85.15 80.51 + 53.73 93.07 84.42 79.47 + 54.12 92.86 83.68 78.42 + 54.51 92.63 82.92 77.36 + 54.90 92.38 82.15 76.30 + 55.29 92.10 81.38 75.23 + 55.69 91.81 80.60 74.17 + 56.08 91.52 79.82 73.10 + 56.47 91.21 79.03 72.04 + 56.86 90.89 78.24 70.98 + 57.25 90.57 77.45 69.91 + 57.65 90.25 76.67 68.86 + 58.04 89.92 75.88 67.81 + 58.43 89.60 75.10 66.75 + 58.82 89.27 74.31 65.71 + 59.22 88.94 73.53 64.67 + 59.61 88.61 72.76 63.63 + 60.00 88.28 71.98 62.59 + 60.39 87.96 71.21 61.56 + 60.78 87.63 70.44 60.54 + 61.18 87.30 69.68 59.51 + 61.57 86.98 68.91 58.50 + 61.96 86.66 68.15 57.48 + 62.35 86.33 67.40 56.47 + 62.75 86.01 66.65 55.47 + 63.14 85.69 65.90 54.47 + 63.53 85.37 65.15 53.48 + 63.92 85.06 64.41 52.48 + 64.31 84.74 63.67 51.50 + 64.71 84.43 62.93 50.51 + 65.10 84.11 62.20 49.54 + 65.49 83.80 61.47 48.56 + 65.88 83.49 60.74 47.59 + 66.27 83.18 60.01 46.63 + 66.67 82.87 59.29 45.67 + 67.06 82.56 58.58 44.71 + 67.45 82.26 57.86 43.76 + 67.84 81.95 57.15 42.81 + 68.24 81.65 56.44 41.87 + 68.63 81.35 55.73 40.93 + 69.02 81.04 55.03 39.99 + 69.41 80.74 54.33 39.06 + 69.80 80.44 53.63 38.13 + 70.20 80.15 52.93 37.20 + 70.59 79.85 52.24 36.28 + 70.98 79.55 51.55 35.37 + 71.37 79.25 50.86 34.45 + 71.76 78.96 50.17 33.54 + 72.16 78.66 49.48 32.63 + 72.55 78.37 48.80 31.73 + 72.94 78.07 48.11 30.83 + 73.33 77.77 47.43 29.93 + 73.73 77.48 46.75 29.04 + 74.12 77.18 46.06 28.14 + 74.51 76.88 45.38 27.25 + 74.90 76.58 44.69 26.36 + 75.29 76.27 44.01 25.48 + 75.69 75.96 43.31 24.59 + 76.08 75.65 42.62 23.70 + 76.47 75.33 41.92 22.82 + 76.86 75.01 41.22 21.93 + 77.25 74.67 40.50 21.05 + 77.65 74.32 39.78 20.16 + 78.04 73.97 39.05 19.27 + 78.43 73.59 38.31 18.39 + 78.82 73.20 37.55 17.50 + 79.22 72.79 36.77 16.60 + 79.61 72.35 35.99 15.71 + 80.00 71.89 35.18 14.82 + 80.39 71.40 34.35 13.93 + 80.78 70.88 33.50 13.05 + 81.18 70.33 32.64 12.15 + 81.57 69.74 31.75 11.28 + 81.96 69.12 30.85 10.41 + 82.35 68.47 29.93 9.56 + 82.75 67.77 28.99 8.73 + 83.14 67.05 28.05 7.92 + 83.53 66.29 27.10 7.15 + 83.92 65.50 26.15 6.41 + 84.31 64.70 25.21 5.71 + 84.71 63.87 24.27 5.06 + 85.10 63.03 23.35 4.47 + 85.49 62.17 22.44 3.94 + 85.88 61.31 21.57 3.48 + 86.27 60.45 20.71 3.11 + 86.67 59.59 19.87 2.82 + 87.06 58.74 19.07 2.60 + 87.45 57.89 18.29 2.44 + 87.84 57.05 17.54 2.33 + 88.24 56.23 16.82 2.25 + 88.63 55.41 16.12 2.21 + 89.02 54.60 15.44 2.19 + 89.41 53.80 14.79 2.17 + 89.80 53.02 14.15 2.17 + 90.20 52.24 13.53 2.18 + 90.59 51.48 12.92 2.20 + 90.98 50.72 12.33 2.22 + 91.37 49.97 11.75 2.25 + 91.76 49.23 11.18 2.28 + 92.16 48.50 10.62 2.31 + 92.55 47.78 10.06 2.35 + 92.94 47.06 9.52 2.39 + 93.33 46.35 8.97 2.43 + 93.73 45.65 8.43 2.48 + 94.12 44.95 7.87 2.52 + 94.51 44.26 7.34 2.56 + 94.90 43.57 6.79 2.61 + 95.29 42.89 6.24 2.65 + 95.69 42.21 5.68 2.70 + 96.08 41.54 5.11 2.74 + 96.47 40.88 4.54 2.78 + 96.86 40.21 3.94 2.82 + 97.25 39.56 3.34 2.86 + 97.65 38.90 2.78 2.89 + 98.04 38.25 2.26 2.93 + 98.43 37.60 1.76 2.96 + 98.82 36.96 1.29 2.99 + 99.22 36.32 0.82 3.01 + 99.61 35.68 0.40 3.03 +100.00 35.04 0.01 3.05 diff --git a/newt/colormaps/vik/vik.svg b/newt/colormaps/vik/vik.svg new file mode 100644 index 0000000..06f0850 --- /dev/null +++ b/newt/colormaps/vik/vik.svg @@ -0,0 +1,528 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/newt/colormaps/vik/vik.txt b/newt/colormaps/vik/vik.txt new file mode 100644 index 0000000..c2b8831 --- /dev/null +++ b/newt/colormaps/vik/vik.txt @@ -0,0 +1,256 @@ +0.001328 0.069836 0.379529 +0.002366 0.076475 0.383518 +0.003304 0.083083 0.387487 +0.004146 0.089590 0.391477 +0.004897 0.095948 0.395453 +0.005563 0.102274 0.399409 +0.006151 0.108500 0.403388 +0.006668 0.114686 0.407339 +0.007119 0.120845 0.411288 +0.007512 0.126958 0.415230 +0.007850 0.133068 0.419166 +0.008141 0.139092 0.423079 +0.008391 0.145171 0.427006 +0.008606 0.151144 0.430910 +0.008790 0.157140 0.434809 +0.008947 0.163152 0.438691 +0.009080 0.169142 0.442587 +0.009193 0.175103 0.446459 +0.009290 0.181052 0.450337 +0.009372 0.187051 0.454212 +0.009443 0.193028 0.458077 +0.009506 0.198999 0.461951 +0.009564 0.205011 0.465816 +0.009619 0.211021 0.469707 +0.009675 0.217047 0.473571 +0.009735 0.223084 0.477461 +0.009802 0.229123 0.481352 +0.009881 0.235206 0.485250 +0.009977 0.241277 0.489161 +0.010098 0.247386 0.493080 +0.010254 0.253516 0.497020 +0.010463 0.259675 0.500974 +0.010755 0.265853 0.504938 +0.011176 0.272037 0.508925 +0.011716 0.278296 0.512923 +0.012286 0.284554 0.516953 +0.012934 0.290865 0.520998 +0.013790 0.297214 0.525074 +0.014838 0.303577 0.529184 +0.016131 0.310015 0.533308 +0.017711 0.316474 0.537485 +0.019630 0.322986 0.541677 +0.021948 0.329550 0.545931 +0.024730 0.336144 0.550210 +0.028047 0.342826 0.554538 +0.031980 0.349543 0.558906 +0.036812 0.356332 0.563341 +0.042229 0.363171 0.567811 +0.048008 0.370086 0.572345 +0.054292 0.377080 0.576933 +0.060963 0.384129 0.581571 +0.068081 0.391265 0.586280 +0.075457 0.398460 0.591042 +0.083246 0.405740 0.595868 +0.091425 0.413088 0.600754 +0.099832 0.420499 0.605697 +0.108595 0.428000 0.610711 +0.117694 0.435566 0.615770 +0.127042 0.443194 0.620895 +0.136702 0.450888 0.626062 +0.146607 0.458643 0.631289 +0.156787 0.466457 0.636560 +0.167187 0.474324 0.641866 +0.177807 0.482238 0.647218 +0.188606 0.490191 0.652599 +0.199580 0.498193 0.658021 +0.210783 0.506201 0.663465 +0.222120 0.514263 0.668924 +0.233602 0.522322 0.674403 +0.245231 0.530414 0.679894 +0.256999 0.538517 0.685405 +0.268867 0.546617 0.690908 +0.280797 0.554717 0.696428 +0.292852 0.562822 0.701935 +0.304985 0.570907 0.707448 +0.317174 0.578997 0.712950 +0.329438 0.587064 0.718447 +0.341729 0.595123 0.723934 +0.354067 0.603164 0.729412 +0.366459 0.611186 0.734877 +0.378862 0.619189 0.740325 +0.391305 0.627159 0.745757 +0.403760 0.635114 0.751183 +0.416227 0.643046 0.756582 +0.428711 0.650956 0.761968 +0.441199 0.658836 0.767341 +0.453697 0.666696 0.772699 +0.466195 0.674537 0.778044 +0.478697 0.682349 0.783369 +0.491208 0.690143 0.788682 +0.503691 0.697910 0.793980 +0.516178 0.705661 0.799260 +0.528677 0.713387 0.804525 +0.541149 0.721090 0.809775 +0.553624 0.728778 0.815010 +0.566096 0.736441 0.820229 +0.578557 0.744089 0.825435 +0.591014 0.751718 0.830626 +0.603468 0.759314 0.835793 +0.615908 0.766896 0.840941 +0.628351 0.774452 0.846058 +0.640779 0.781988 0.851147 +0.653203 0.789485 0.856206 +0.665631 0.796945 0.861214 +0.678051 0.804371 0.866172 +0.690457 0.811742 0.871059 +0.702868 0.819048 0.875866 +0.715265 0.826290 0.880567 +0.727646 0.833439 0.885146 +0.740019 0.840479 0.889570 +0.752354 0.847380 0.893807 +0.764662 0.854125 0.897821 +0.776918 0.860678 0.901565 +0.789096 0.866991 0.904992 +0.801170 0.873031 0.908043 +0.813110 0.878738 0.910653 +0.824870 0.884062 0.912761 +0.836396 0.888934 0.914302 +0.847617 0.893289 0.915195 +0.858470 0.897074 0.915385 +0.868874 0.900206 0.914812 +0.878729 0.902636 0.913418 +0.887965 0.904303 0.911164 +0.896497 0.905178 0.908034 +0.904242 0.905221 0.904013 +0.911151 0.904422 0.899132 +0.917175 0.902800 0.893409 +0.922285 0.900367 0.886911 +0.926482 0.897173 0.879687 +0.929789 0.893256 0.871826 +0.932236 0.888698 0.863396 +0.933880 0.883552 0.854476 +0.934782 0.877893 0.845152 +0.935013 0.871795 0.835493 +0.934644 0.865313 0.825561 +0.933752 0.858522 0.815421 +0.932408 0.851469 0.805112 +0.930682 0.844208 0.794685 +0.928622 0.836778 0.784169 +0.926298 0.829215 0.773579 +0.923752 0.821545 0.762958 +0.921017 0.813795 0.752313 +0.918147 0.805997 0.741659 +0.915156 0.798157 0.731008 +0.912080 0.790294 0.720370 +0.908933 0.782421 0.709752 +0.905741 0.774540 0.699150 +0.902506 0.766670 0.688588 +0.899249 0.758812 0.678051 +0.895973 0.750973 0.667550 +0.892690 0.743148 0.657086 +0.889402 0.735345 0.646657 +0.886118 0.727569 0.636274 +0.882831 0.719826 0.625923 +0.879556 0.712106 0.615618 +0.876289 0.704419 0.605357 +0.873033 0.696764 0.595141 +0.869784 0.689144 0.584972 +0.866551 0.681541 0.574832 +0.863333 0.673985 0.564746 +0.860121 0.666453 0.554708 +0.856920 0.658957 0.544709 +0.853732 0.651500 0.534753 +0.850562 0.644061 0.524842 +0.847402 0.636670 0.514974 +0.844258 0.629296 0.505146 +0.841125 0.621957 0.495369 +0.838005 0.614653 0.485627 +0.834895 0.607392 0.475941 +0.831802 0.600144 0.466284 +0.828715 0.592938 0.456675 +0.825639 0.585758 0.447109 +0.822582 0.578600 0.437595 +0.819528 0.571478 0.428106 +0.816496 0.564388 0.418657 +0.813463 0.557328 0.409260 +0.810446 0.550285 0.399892 +0.807443 0.543274 0.390575 +0.804446 0.536288 0.381299 +0.801454 0.529329 0.372040 +0.798475 0.522380 0.362835 +0.795500 0.515460 0.353660 +0.792535 0.508575 0.344523 +0.789573 0.501692 0.335435 +0.786617 0.494827 0.326343 +0.783657 0.487977 0.317312 +0.780695 0.481123 0.308300 +0.777737 0.474295 0.299327 +0.774763 0.467464 0.290352 +0.771788 0.460620 0.281424 +0.768787 0.453783 0.272508 +0.765776 0.446929 0.263640 +0.762724 0.440055 0.254764 +0.759638 0.433147 0.245872 +0.756510 0.426200 0.237047 +0.753316 0.419216 0.228190 +0.750051 0.412163 0.219330 +0.746698 0.405028 0.210470 +0.743239 0.397819 0.201593 +0.739651 0.390493 0.192739 +0.735899 0.383060 0.183852 +0.731988 0.375473 0.174977 +0.727865 0.367743 0.166045 +0.723516 0.359852 0.157131 +0.718915 0.351766 0.148211 +0.714028 0.343503 0.139282 +0.708841 0.335048 0.130458 +0.703318 0.326354 0.121545 +0.697448 0.317502 0.112841 +0.691227 0.308462 0.104132 +0.684653 0.299264 0.095633 +0.677734 0.289916 0.087350 +0.670476 0.280477 0.079197 +0.662904 0.271015 0.071510 +0.655048 0.261520 0.064079 +0.646969 0.252081 0.057104 +0.638686 0.242711 0.050618 +0.630261 0.233488 0.044750 +0.621722 0.224449 0.039414 +0.613135 0.215657 0.034829 +0.604539 0.207086 0.031072 +0.595947 0.198741 0.028212 +0.587403 0.190700 0.026019 +0.578937 0.182918 0.024396 +0.570545 0.175423 0.023257 +0.562268 0.168171 0.022523 +0.554076 0.161202 0.022110 +0.546007 0.154400 0.021861 +0.538043 0.147854 0.021737 +0.530182 0.141491 0.021722 +0.522424 0.135276 0.021800 +0.514776 0.129209 0.021957 +0.507213 0.123272 0.022179 +0.499733 0.117487 0.022455 +0.492348 0.111818 0.022775 +0.485034 0.106209 0.023130 +0.477801 0.100607 0.023513 +0.470639 0.095156 0.023916 +0.463530 0.089668 0.024336 +0.456494 0.084258 0.024766 +0.449521 0.078741 0.025203 +0.442603 0.073404 0.025644 +0.435737 0.067904 0.026084 +0.428918 0.062415 0.026522 +0.422146 0.056832 0.026954 +0.415437 0.051116 0.027378 +0.408768 0.045352 0.027790 +0.402132 0.039448 0.028189 +0.395562 0.033385 0.028570 +0.389015 0.027844 0.028932 +0.382496 0.022586 0.029271 +0.376028 0.017608 0.029583 +0.369578 0.012890 0.029866 +0.363161 0.008243 0.030115 +0.356785 0.004035 0.030327 +0.350423 0.000061 0.030499 diff --git a/newt/colormaps/vik/vik.xcmap b/newt/colormaps/vik/vik.xcmap new file mode 100644 index 0000000..4e859f5 --- /dev/null +++ b/newt/colormaps/vik/vik.xcmap @@ -0,0 +1,268 @@ + + + + + + scientific-vik + linear + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/newt/colormaps/vik/vik_PARAVIEW.xml b/newt/colormaps/vik/vik_PARAVIEW.xml new file mode 100644 index 0000000..f249aba --- /dev/null +++ b/newt/colormaps/vik/vik_PARAVIEW.xml @@ -0,0 +1,260 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/newt/colormaps/vik/vik_QGIS.xml b/newt/colormaps/vik/vik_QGIS.xml new file mode 100644 index 0000000..c203d74 --- /dev/null +++ b/newt/colormaps/vik/vik_QGIS.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/newt/cubature.py b/newt/cubature.py new file mode 100644 index 0000000..ad4aac7 --- /dev/null +++ b/newt/cubature.py @@ -0,0 +1,341 @@ +import objax +from jax import vmap +import jax.numpy as np +from jax.ops import index_add, index +from jax.scipy.linalg import cholesky, cho_factor +from .utils import solve, gaussian_first_derivative_wrt_mean, gaussian_second_derivative_wrt_mean +from numpy.polynomial.hermite import hermgauss +import itertools + + +class Cubature(objax.Module): + + def __call__(self, dim): + raise NotImplementedError + + +class GaussHermite(Cubature): + + def __init__(self, num_cub_points=20): + self.num_cub_points = num_cub_points + + def __call__(self, dim): + return gauss_hermite(dim, self.num_cub_points) + + +class UnscentedThirdOrder(Cubature): + + def __call__(self, dim): + return symmetric_cubature_third_order(dim) + + +class UnscentedFifthOrder(Cubature): + + def __call__(self, dim): + return symmetric_cubature_fifth_order(dim) + + +class Unscented(UnscentedFifthOrder): + pass + + +def mvhermgauss(H: int, D: int): + """ + This function is adapted from GPflow: https://github.com/GPflow/GPflow + + Return the evaluation locations 'xn', and weights 'wn' for a multivariate + Gauss-Hermite quadrature. + + The outputs can be used to approximate the following type of integral: + int exp(-x)*f(x) dx ~ sum_i w[i,:]*f(x[i,:]) + + :param H: Number of Gauss-Hermite evaluation points. + :param D: Number of input dimensions. Needs to be known at call-time. + :return: eval_locations 'x' (H**DxD), weights 'w' (H**D) + """ + gh_x, gh_w = hermgauss(H) + x = np.array(list(itertools.product(*(gh_x,) * D))) # H**DxD + w = np.prod(np.array(list(itertools.product(*(gh_w,) * D))), 1) # H**D + return x, w + + +def gauss_hermite(dim=1, num_quad_pts=20): + """ + Return weights and sigma-points for Gauss-Hermite cubature + """ + # sigma_pts, weights = hermgauss(num_quad_pts) # Gauss-Hermite sigma points and weights + sigma_pts, weights = mvhermgauss(num_quad_pts, dim) + sigma_pts = np.sqrt(2) * sigma_pts.T + weights = weights.T * np.pi ** (-0.5 * dim) # scale weights by 1/โˆšฯ€ + return sigma_pts, weights + + +def symmetric_cubature_third_order(dim=1, kappa=None): + """ + Return weights and sigma-points for the symmetric cubature rule of order 5, for + dimension dim with parameter kappa (default 0). + """ + if kappa is None: + # kappa = 1 - dim + kappa = 0 # CKF + if (dim == 1) and (kappa == 0): + weights = np.array([0., 0.5, 0.5]) + sigma_pts = np.array([0., 1., -1.]) + # sigma_pts = np.array([-1., 0., 1.]) + # weights = np.array([0.5, 0., 0.5]) + # u = 1 + elif (dim == 2) and (kappa == 0): + weights = np.array([0., 0.25, 0.25, 0.25, 0.25]) + sigma_pts = np.block([[0., 1.4142, 0., -1.4142, 0.], + [0., 0., 1.4142, 0., -1.4142]]) + # u = 1.4142 + elif (dim == 3) and (kappa == 0): + weights = np.array([0., 0.1667, 0.1667, 0.1667, 0.1667, 0.1667, 0.1667]) + sigma_pts = np.block([[0., 1.7321, 0., 0., -1.7321, 0., 0.], + [0., 0., 1.7321, 0., 0., -1.7321, 0.], + [0., 0., 0., 1.7321, 0., 0., -1.7321]]) + # u = 1.7321 + else: + # weights + weights = np.zeros([1, 2 * dim + 1]) + weights = index_add(weights, index[0, 0], kappa / (dim + kappa)) + for j in range(1, 2 * dim + 1): + wm = 1 / (2 * (dim + kappa)) + weights = index_add(weights, index[0, j], wm) + # Sigma points + sigma_pts = np.block([np.zeros([dim, 1]), np.eye(dim), - np.eye(dim)]) + sigma_pts = np.sqrt(dim + kappa) * sigma_pts + # u = np.sqrt(n + kappa) + return sigma_pts, weights # , u + + +def symmetric_cubature_fifth_order(dim=1): + """ + Return weights and sigma-points for the symmetric cubature rule of order 5 + TODO: implement general form + """ + if dim == 1: + weights = np.array([0.6667, 0.1667, 0.1667]) + sigma_pts = np.array([0., 1.7321, -1.7321]) + elif dim == 2: + weights = np.array([0.4444, 0.1111, 0.1111, 0.1111, 0.1111, 0.0278, 0.0278, 0.0278, 0.0278]) + sigma_pts = np.block([[0., 1.7321, -1.7321, 0., 0., 1.7321, -1.7321, 1.7321, -1.7321], + [0., 0., 0., 1.7321, -1.7321, 1.7321, -1.7321, -1.7321, 1.7321]]) + elif dim == 3: + weights = np.array([0.3333, 0.0556, 0.0556, 0.0556, 0.0556, 0.0556, 0.0556, 0.0278, 0.0278, 0.0278, + 0.0278, 0.0278, 0.0278, 0.0278, 0.0278, 0.0278, 0.0278, 0.0278, 0.0278]) + sigma_pts = np.block([[0., 1.7321, -1.7321, 0., 0., 0., 0., 1.7321, -1.7321, 1.7321, -1.7321, 1.7321, + -1.7321, 1.7321, -1.7321, 0., 0., 0., 0.], + [0., 0., 0., 1.7321, -1.7321, 0., 0., 1.7321, -1.7321, -1.7321, 1.7321, 0., 0., 0., + 0., 1.7321, -1.7321, 1.7321, -1.7321], + [0., 0., 0., 0., 0., 1.7321, -1.7321, 0., 0., 0., 0., 1.7321, -1.7321, -1.7321, + 1.7321, 1.7321, -1.7321, -1.7321, 1.7321]]) + else: + raise NotImplementedError + return sigma_pts, weights + + +def variational_expectation_cubature(likelihood, y, post_mean, post_cov, cubature=None): + """ + Computes the "variational expectation" via cubature, i.e. the + expected log-likelihood, and its derivatives w.r.t. the posterior mean + E[log p(yโ‚™|fโ‚™)] = โˆซ log p(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + with EP power a. + :param likelihood: the likelihood model + :param y: observed data (yโ‚™) [scalar] + :param post_mean: posterior mean (mโ‚™) [scalar] + :param post_cov: posterior variance (vโ‚™) [scalar] + :param cubature: the function to compute sigma points and weights to use during cubature + :return: + exp_log_lik: the expected log likelihood, E[log p(yโ‚™|fโ‚™)] [scalar] + dE_dm: derivative of E[log p(yโ‚™|fโ‚™)] w.r.t. mโ‚™ [scalar] + d2E_dm2: second derivative of E[log p(yโ‚™|fโ‚™)] w.r.t. mโ‚™ [scalar] + """ + if cubature is None: + x, w = gauss_hermite(post_mean.shape[0], 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(post_mean.shape[0]) + # fsigแตข=xแตขโˆš(vโ‚™) + mโ‚™: scale locations according to cavity dist. + sigma_points = cholesky(post_cov) @ np.atleast_2d(x) + post_mean + # pre-compute wแตข log p(yโ‚™|xแตขโˆš(2vโ‚™) + mโ‚™) + weighted_log_likelihood_eval = w * likelihood.evaluate_log_likelihood(y, sigma_points) + # Compute expected log likelihood via cubature: + # E[log p(yโ‚™|fโ‚™)] = โˆซ log p(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข p(yโ‚™|fsigแตข) + exp_log_lik = np.sum( + weighted_log_likelihood_eval + ) + # Compute first derivative via cubature: + # dE[log p(yโ‚™|fโ‚™)]/dmโ‚™ = โˆซ (fโ‚™-mโ‚™) vโ‚™โปยน log p(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข (fโ‚™-mโ‚™) vโ‚™โปยน log p(yโ‚™|fsigแตข) + invv = np.diag(post_cov)[:, None] ** -1 + dE_dm = np.sum( + invv * (sigma_points - post_mean) + * weighted_log_likelihood_eval, axis=-1 + )[:, None] + # Compute second derivative via cubature (deriv. w.r.t. var = 0.5 * 2nd deriv. w.r.t. mean): + # dE[log p(yโ‚™|fโ‚™)]/dvโ‚™ = โˆซ [(fโ‚™-mโ‚™)ยฒ vโ‚™โปยฒ - vโ‚™โปยน]/2 log p(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข [(fโ‚™-mโ‚™)ยฒ vโ‚™โปยฒ - vโ‚™โปยน]/2 log p(yโ‚™|fsigแตข) + dE_dv = np.sum( + (0.5 * (invv ** 2 * (sigma_points - post_mean) ** 2) - 0.5 * invv) + * weighted_log_likelihood_eval, axis=-1 + ) + dE_dv = np.diag(dE_dv) + d2E_dm2 = 2 * dE_dv + return exp_log_lik, dE_dm, d2E_dm2 + + +def log_density_cubature(likelihood, y, mean, cov, cubature=None): + """ + logZโ‚™ = log โˆซ p(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + :param likelihood: the likelihood model + :param y: observed data (yโ‚™) [scalar] + :param mean: cavity mean (mโ‚™) [scalar] + :param cov: cavity covariance (cโ‚™) [scalar] + :param cubature: the function to compute sigma points and weights to use during cubature + :return: + lZ: the log density, logZโ‚™ [scalar] + """ + if cubature is None: + x, w = gauss_hermite(mean.shape[0], 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(mean.shape[0]) + cav_cho, low = cho_factor(cov) + # fsigแตข=xแตขโˆšcโ‚™ + mโ‚™: scale locations according to cavity dist. + sigma_points = cav_cho @ np.atleast_2d(x) + mean + # pre-compute wแตข p(yโ‚™|xแตขโˆš(2vโ‚™) + mโ‚™) + weighted_likelihood_eval = w * likelihood.evaluate_likelihood(y, sigma_points) + # Compute partition function via cubature: + # Zโ‚™ = โˆซ p(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ โ‰ˆ โˆ‘แตข wแตข p(yโ‚™|fsigแตข) + Z = np.sum( + weighted_likelihood_eval, axis=-1 + ) + lZ = np.log(np.maximum(Z, 1e-8)) + return lZ + + +def moment_match_cubature(likelihood, y, cav_mean, cav_cov, power=1.0, cubature=None): + """ + TODO: N.B. THIS VERSION ALLOWS MULTI-DIMENSIONAL MOMENT MATCHING, BUT CAN BE UNSTABLE + Perform moment matching via cubature. + Moment matching involves computing the log partition function, logZโ‚™, and its derivatives w.r.t. the cavity mean + logZโ‚™ = log โˆซ pแตƒ(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + with EP power a. + :param likelihood: the likelihood model + :param y: observed data (yโ‚™) [scalar] + :param cav_mean: cavity mean (mโ‚™) [scalar] + :param cav_cov: cavity covariance (cโ‚™) [scalar] + :param power: EP power / fraction (a) [scalar] + :param cubature: the function to compute sigma points and weights to use during cubature + :return: + lZ: the log partition function, logZโ‚™ [scalar] + dlZ: first derivative of logZโ‚™ w.r.t. mโ‚™ (if derivatives=True) [scalar] + d2lZ: second derivative of logZโ‚™ w.r.t. mโ‚™ (if derivatives=True) [scalar] + """ + if cubature is None: + x, w = gauss_hermite(cav_mean.shape[0], 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(cav_mean.shape[0]) + cav_cho, low = cho_factor(cav_cov) + # fsigแตข=xแตขโˆšcโ‚™ + mโ‚™: scale locations according to cavity dist. + sigma_points = cav_cho @ np.atleast_2d(x) + cav_mean + # pre-compute wแตข pแตƒ(yโ‚™|xแตขโˆš(2vโ‚™) + mโ‚™) + weighted_likelihood_eval = w * likelihood.evaluate_likelihood(y, sigma_points) ** power + + # Compute partition function via cubature: + # Zโ‚™ = โˆซ pแตƒ(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข pแตƒ(yโ‚™|fsigแตข) + Z = np.sum( + weighted_likelihood_eval, axis=-1 + ) + lZ = np.log(np.maximum(Z, 1e-8)) + Zinv = 1.0 / np.maximum(Z, 1e-8) + + # Compute derivative of partition function via cubature: + # dZโ‚™/dmโ‚™ = โˆซ (fโ‚™-mโ‚™) vโ‚™โปยน pแตƒ(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข (fโ‚™-mโ‚™) vโ‚™โปยน pแตƒ(yโ‚™|fsigแตข) + d1 = vmap( + gaussian_first_derivative_wrt_mean, (1, None, None, 1) + )(sigma_points[..., None], cav_mean, cav_cov, weighted_likelihood_eval) + dZ = np.sum(d1, axis=0) + # dlogZโ‚™/dmโ‚™ = (dZโ‚™/dmโ‚™) / Zโ‚™ + dlZ = Zinv * dZ + + # Compute second derivative of partition function via cubature: + # dยฒZโ‚™/dmโ‚™ยฒ = โˆซ [(fโ‚™-mโ‚™)ยฒ vโ‚™โปยฒ - vโ‚™โปยน] pแตƒ(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข [(fโ‚™-mโ‚™)ยฒ vโ‚™โปยฒ - vโ‚™โปยน] pแตƒ(yโ‚™|fsigแตข) + d2 = vmap( + gaussian_second_derivative_wrt_mean, (1, None, None, 1) + )(sigma_points[..., None], cav_mean, cav_cov, weighted_likelihood_eval) + d2Z = np.sum(d2, axis=0) + + # dยฒlogZโ‚™/dmโ‚™ยฒ = d[(dZโ‚™/dmโ‚™) / Zโ‚™]/dmโ‚™ + # = (dยฒZโ‚™/dmโ‚™ยฒ * Zโ‚™ - (dZโ‚™/dmโ‚™)ยฒ) / Zโ‚™ยฒ + # = dยฒZโ‚™/dmโ‚™ยฒ / Zโ‚™ - (dlogZโ‚™/dmโ‚™)ยฒ + d2lZ = -dlZ @ dlZ.T + Zinv * d2Z + return lZ, dlZ, d2lZ + + +def statistical_linear_regression_cubature(likelihood, mean, cov, cubature=None): + """ + Perform statistical linear regression (SLR) using cubature. + We aim to find a likelihood approximation p(yโ‚™|fโ‚™) โ‰ˆ ๐“(yโ‚™|Afโ‚™+b,ฮฉ+Var[yโ‚™|fโ‚™]). + TODO: this currently assumes an additive noise model (ok for our current applications), make more general + """ + if cubature is None: + x, w = gauss_hermite(mean.shape[0], 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(mean.shape[0]) + # fsigแตข=xแตขโˆš(vโ‚™) + mโ‚™: scale locations according to cavity dist. + sigma_points = cholesky(cov) @ np.atleast_2d(x) + mean + lik_expectation, lik_covariance = likelihood.conditional_moments(sigma_points) + # Compute zโ‚™ via cubature: + # zโ‚™ = โˆซ E[yโ‚™|fโ‚™] ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข E[yโ‚™|fsigแตข] + mu = np.sum( + w * lik_expectation, axis=-1 + )[:, None] + # Compute variance S via cubature: + # S = โˆซ [(E[yโ‚™|fโ‚™]-zโ‚™) (E[yโ‚™|fโ‚™]-zโ‚™)' + Cov[yโ‚™|fโ‚™]] ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข [(E[yโ‚™|fsigแตข]-zโ‚™) (E[yโ‚™|fsigแตข]-zโ‚™)' + Cov[yโ‚™|fโ‚™]] + # TODO: allow for multi-dim cubature + S = np.sum( + w * ((lik_expectation - mu) * (lik_expectation - mu) + lik_covariance), axis=-1 + )[:, None] + # Compute cross covariance C via cubature: + # C = โˆซ (fโ‚™-mโ‚™) (E[yโ‚™|fโ‚™]-zโ‚™)' ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข (fsigแตข -mโ‚™) (E[yโ‚™|fsigแตข]-zโ‚™)' + C = np.sum( + w * (sigma_points - mean) * (lik_expectation - mu), axis=-1 + )[:, None] + # Compute derivative of z via cubature: + # d_mu = โˆซ E[yโ‚™|fโ‚™] vโ‚™โปยน (fโ‚™-mโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข E[yโ‚™|fsigแตข] vโ‚™โปยน (fsigแตข-mโ‚™) + d_mu = np.sum( + w * lik_expectation * (solve(cov, sigma_points - mean)), axis=-1 + )[None, :] + return mu, S, C, d_mu + + +def predict_cubature(likelihood, mean_f, var_f, cubature=None): + """ + predict in data space given predictive mean and var of the latent function + """ + if cubature is None: + x, w = gauss_hermite(mean_f.shape[0], 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(mean_f.shape[0]) + chol_f, low = cho_factor(var_f) + # fsigแตข=xแตขโˆšcโ‚™ + mโ‚™: scale locations according to latent dist. + sigma_points = chol_f @ np.atleast_2d(x) + mean_f + # Compute moments via cubature: + # E[y] = โˆซ E[yโ‚™|fโ‚™] ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข E[yโ‚™|fโ‚™] + # E[y^2] = โˆซ (Cov[yโ‚™|fโ‚™] + E[yโ‚™|fโ‚™]^2) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข (Cov[yโ‚™|fโ‚™] + E[yโ‚™|fโ‚™]^2) + conditional_expectation, conditional_covariance = likelihood.conditional_moments(sigma_points) + expected_y = np.sum(w * conditional_expectation, axis=-1) + expected_y_squared = np.sum(w * (conditional_covariance + conditional_expectation ** 2), axis=-1) + # Cov[y] = E[y^2] - E[y]^2 + covariance_y = expected_y_squared - expected_y ** 2 + return expected_y, covariance_y diff --git a/newt/data/TRI2TU-data.csv b/newt/data/TRI2TU-data.csv new file mode 100644 index 0000000..d0eb403 --- /dev/null +++ b/newt/data/TRI2TU-data.csv @@ -0,0 +1,12929 @@ +7.8,9.2 +10.7,4.8 +11.3,11 +16.6,6.8 +1.2,24.4 +5.5,30.5 +8.8,23.4 +18.6,49.5 +3.2,63.3 +9.4,72.5 +10.6,74.3 +18.8,77.6 +16.7,63.8 +18.3,84.7 +18.5,94.6 +16.6,116.8 +18.9,131.3 +2.5,122.7 +10.4,136 +19.4,134.1 +2.2,147.7 +2.5,154.3 +13.8,154.9 +16.9,152.1 +18.2,161.1 +3.3,184.8 +3.2,196.4 +7.9,191.8 +8.2,188 +5.6,204.4 +10.2,214.2 +14.6,207 +14.6,209.1 +9.4,209 +14.8,225.6 +7.3,228.8 +8.1,228.5 +3.2,255.4 +6.7,257.3 +10.3,259.5 +14.7,271 +10.2,275.8 +14.4,275.9 +18.1,261.8 +11.4,285.9 +5.2,292 +3.9,290.5 +18.4,295.1 +7.4,280.2 +18.7,288.9 +14.9,305.1 +1.6,301.6 +18.4,312.9 +6.8,319.7 +12.8,309 +1.1,338.8 +0.3,339 +12.9,324.7 +14.6,322.9 +11.1,327.1 +13.2,326.3 +13.3,327.2 +12.1,346.9 +13.8,354.8 +2.1,361.4 +12,373.7 +16.4,360.8 +9.6,398.1 +13.1,399.1 +12,406.2 +0.4,401.9 +16,414.4 +7.3,404.7 +7.4,420.9 +15.3,433.5 +0.5,459.7 +13.9,479.4 +6,487.5 +8.8,481.9 +8.3,482.2 +27.7,19 +25.4,13.2 +25.3,7.2 +29.2,4.2 +34.5,9 +32.9,14.8 +34.8,13.8 +33.8,18.2 +35.2,12.2 +25.2,36.9 +26.3,36.2 +29.5,33.8 +25.2,28.6 +33.8,23.5 +24.2,54.7 +24.7,55.5 +25.2,49.8 +35.7,57 +36.4,59.7 +37.6,49.3 +39.9,49.7 +39,45.2 +38.7,44.5 +21.6,79.4 +24.7,77 +26.1,70.3 +33.7,66.5 +31.2,74.4 +30.5,78.6 +35.1,74.6 +35.3,61.1 +39.2,64.5 +38.2,60 +36.3,60.4 +37.7,62.4 +27.2,75.6 +30.2,86.1 +24.2,88.1 +20.9,86.1 +23.5,99.7 +28.7,99.7 +25.1,84.6 +34.6,86.3 +30.4,93.6 +32.7,93.9 +34.8,92.2 +30.3,99.1 +36.2,94.9 +37.5,94.3 +39.8,91.1 +35.3,89.6 +39,87.4 +39.3,85.2 +39.2,82.9 +28.3,114.4 +21.2,104.7 +28.4,109.4 +28.3,105.3 +27.6,104.4 +39.7,118.5 +37.8,132.1 +28.8,139.6 +29.8,136.8 +28.4,120.7 +30.8,124.4 +34.4,127.3 +39.2,136.7 +37.3,127.2 +37.9,121.4 +23.7,141.8 +28.9,151.2 +26.3,144.5 +32.4,143.9 +30.6,147.4 +31.8,147.8 +30.3,149.4 +32.6,150.2 +33.3,152.2 +35.2,147.5 +37.4,149.7 +35.5,140.8 +37.6,179.3 +34.4,171.2 +28.3,164 +35,178.9 +34.5,199.1 +27.2,182.8 +32.3,187.6 +32.5,197.4 +35.2,198.1 +35.5,192.3 +22.9,214.6 +21.2,229.4 +20.7,259.8 +29.9,244 +32.6,246.3 +36.6,245.9 +36.9,242.7 +20.1,262.4 +26,278.3 +28.7,276.9 +25.5,260.4 +33.3,266.5 +36.2,270.9 +36.8,274.1 +20.9,291 +22.9,287 +34.7,307.6 +30.4,322.8 +38.2,330.2 +39.2,329.1 +38.6,324.1 +27.2,354.8 +28.1,340.1 +34.1,341.8 +33.8,359.4 +33.5,355.2 +36.8,358.7 +36.8,352.3 +37.4,374.5 +37.2,373.4 +31.9,363.8 +25.2,374.6 +22.8,372.2 +30.6,371.5 +35.2,371.9 +35.4,390.3 +35.9,383.7 +30.5,381.3 +30.4,390.6 +21.6,411.6 +26.5,417.5 +27.9,440.3 +24,476.4 +24.2,486.7 +20.4,492.2 +32.3,486.1 +55.1,0.5 +49.8,7.8 +55.4,12.5 +49.8,17.5 +52.7,9.5 +51.1,34.1 +58.5,37 +58.5,30.4 +42.5,47.5 +42,52.6 +42,54.6 +43.7,54.7 +45.1,55.3 +48,56.5 +47.3,54.5 +47.6,42.2 +53.3,40.7 +59.1,55.6 +41.4,63.8 +40.2,69.2 +43.6,68.7 +42.9,74.8 +44.4,71.7 +46.9,75.8 +46.4,74.1 +47.6,60.8 +46.8,61.8 +52.4,71.8 +40.8,84.7 +41.6,83.9 +41.1,86.8 +40.7,89.8 +42.3,84.9 +46,95.9 +46.2,97.1 +58.9,96.6 +56.9,95.9 +56.2,94.2 +40.5,101.7 +44.1,119.8 +48.4,113.8 +49.1,114 +49.5,112.5 +45.7,106.1 +46.4,104.2 +52.4,102.7 +50,107.2 +54,109.1 +53.5,105.8 +50.7,111.1 +51.2,112.8 +53.6,114.8 +53.2,116.7 +58.2,113.8 +58.5,110.7 +57.8,109.1 +59.2,108.8 +56,101.5 +40.3,136.3 +46.1,128 +54.4,123.3 +52,120.7 +52,128.4 +51.2,131 +50.8,134.3 +56,125.2 +42.8,144.9 +54.8,159.6 +46.5,161.5 +45.5,164.7 +41.8,177.5 +45.9,171.1 +54.6,191.6 +55.7,196.8 +58.7,199.4 +44,184 +47.2,194 +53,185.8 +58.9,187.9 +40.6,212.2 +54.1,204.4 +58,205.1 +59.8,202.9 +49.2,235.1 +41.4,235.9 +48.4,238.8 +48.9,248.8 +47.8,244 +52.8,240.6 +54.5,247.5 +44.9,260.2 +45.6,277.9 +54.7,266.8 +57.5,267.2 +41.9,293.3 +54.6,291.2 +44.4,338.8 +40.8,333.8 +54.4,324.7 +53.8,336.1 +53.1,336.6 +55.2,322.2 +56.6,321.8 +45.4,346.9 +46,340.7 +52.8,350.7 +59.7,358.8 +42.4,370.3 +41.6,372.7 +46.1,380.9 +43,381.3 +43.2,386.2 +43.6,397 +43,397.2 +47.5,384.1 +42.4,385.4 +44.8,399.5 +43.7,401.3 +56.2,449.3 +48.3,452.1 +43,458 +68.7,33.6 +60.7,30.8 +61.5,37.6 +65.9,36.1 +68.1,37.6 +76.6,29.1 +79.6,29.1 +77.8,22.3 +75.8,58.8 +60.1,40.1 +74.5,68.6 +67.5,64.9 +60.9,68.1 +67.1,75.1 +69.9,70.2 +66.2,72.9 +70.6,74.8 +72.6,78.1 +77.8,76.4 +61.6,91 +64.8,99.2 +65.2,98.2 +72.7,82.6 +72.1,91.9 +71.8,98.2 +75.3,83.7 +77.7,80.8 +63.8,109.9 +60.1,111.2 +60.9,111.8 +63.1,110.3 +62.4,111.4 +69.1,108.8 +70.6,103.8 +72.2,103.4 +73.6,103.2 +71.2,102.2 +70.3,105.5 +71.2,110.3 +71.4,116 +78.8,116.4 +75.1,114 +79.4,113.5 +79.2,112.4 +65.2,138.9 +68.2,139.1 +71.1,124.2 +74.7,122.8 +73.4,132.4 +72.8,139.7 +74.1,139.4 +74.9,139.6 +74.5,139.3 +74.6,139 +74,138.1 +74.6,137.3 +72.1,136.4 +72.2,136.8 +75.8,136.8 +78.4,138.1 +76.2,136 +76.1,138.2 +76.3,123.5 +77.7,120.6 +77.5,122.4 +62.8,143.6 +64.6,148.2 +64.1,145.8 +63.9,154.9 +67.1,158.5 +66,151.1 +66.8,154.2 +67.8,149.2 +68.7,149.6 +66.7,144.9 +72.3,141.5 +70.4,147.1 +70.1,147.9 +73.6,149.2 +74.4,147.2 +70.5,153.2 +76.4,147.7 +77.4,147.5 +76.9,145.7 +75.4,142.9 +75.7,144.4 +79.3,140.2 +78.2,168.5 +64.2,189.3 +74.5,193.6 +61.6,185.5 +60.6,194.5 +70.2,198.7 +74.6,198 +74.5,197.1 +72.5,197.8 +60.5,208 +65.4,214.7 +66.4,210.2 +71.3,201.3 +76.8,210.2 +75.5,200.9 +76.9,202.8 +72.8,239.1 +77.5,230.7 +66.1,258.9 +64.3,241.9 +69.4,251.3 +68.5,251.4 +69.2,240.6 +70.3,253 +74.4,251.9 +79.3,254.6 +63.4,268.3 +62.5,270.2 +63.6,272 +69,274.2 +67.4,270.9 +65.8,265.1 +73.7,260.8 +79.4,290.1 +60.5,299.7 +71.3,284.5 +64.5,288.1 +64.1,296.38 +66.4,294.9 +68,289.8 +66.4,287.4 +70.9,294.9 +74.2,293 +76.4,290.8 +78.8,281.8 +65.9,305.2 +64.8,303.3 +67.2,313.7 +66.5,302.8 +73,307 +76.4,315.3 +61.8,329.6 +60.7,338.9 +65.8,335.8 +76.4,324.7 +78.4,351.8 +60.2,342.4 +60.8,344.4 +67.2,357.5 +65.2,354.1 +68.4,348 +72.9,354.1 +72.3,352.7 +75.6,341.4 +62.9,360.7 +62,361.9 +68.9,376.8 +65.9,365.6 +78.1,375.4 +77.6,371.8 +70.6,386.2 +71.6,388.6 +65.2,410.2 +66.7,428.6 +71,430.2 +65.1,492.1 +67.1,492.5 +66.5,485.9 +66.4,486.7 +65.9,488.2 +66.8,487.6 +77.9,495.6 +76.6,492.7 +79.8,481.8 +78.2,480.6 +94.3,8.1 +93.5,5.2 +92.1,14.6 +93.2,19.7 +95.7,10.1 +83.4,26.7 +82.1,30.7 +84.7,38.8 +84.7,37.5 +83.5,35.4 +89.7,38 +86.1,38.3 +89.5,30.8 +85.8,33.2 +85.1,29.4 +92.8,29.4 +91.6,26.6 +90.4,32.5 +91.6,37 +81.3,44.9 +85.8,59.7 +98.6,59.6 +89.4,58.1 +94.1,40.6 +93.5,53 +91.3,56.8 +92.3,57.7 +98.9,57.8 +96.1,57 +96.7,49.4 +86,76.9 +90.2,79.8 +93.5,79.7 +96.2,79.2 +99,76.7 +96.4,74.6 +82.7,99.1 +82.2,96.1 +89.2,98.8 +89.1,90.2 +91,82.9 +93.3,81.8 +97.7,82.9 +87.5,117 +82.4,101.9 +82.5,109.1 +81.9,114.2 +84,115.1 +81.4,112.3 +82.5,118.4 +89.3,118.8 +89.8,118 +94.5,115.9 +95.8,108.8 +96.2,103 +96.4,100.4 +82.8,127.1 +87.1,136.5 +81.1,123.7 +82.8,126.7 +88.1,121.2 +94.8,137.4 +98.3,125.7 +99.8,122.9 +84.5,144.6 +81.2,140.1 +80.3,148.3 +84.3,146.2 +84.4,150.4 +82.5,158.7 +86.4,156.6 +87.6,149.4 +94.5,141.7 +91.4,145.3 +90.5,146.9 +92.5,147.4 +94.7,151.7 +96.9,159.6 +94.9,148.1 +98.2,143.6 +92.8,193.1 +86.8,198.7 +84.6,181 +83.4,193.2 +81.9,191 +82.7,199.8 +83.8,199.2 +96,199.9 +84.8,202.8 +82.1,209.3 +92.8,202.2 +99.5,210.3 +96.7,201.8 +81.2,241.1 +87.3,248.3 +94.6,253.5 +80.9,255.8 +88.5,256.6 +87.1,253.5 +99.4,251.9 +96.4,267.2 +92.5,277.5 +94.3,263.9 +88,285.4 +94,283.3 +90.2,286.6 +99.8,287.1 +99.5,301.3 +86,306.4 +86.7,319.5 +86.3,309.5 +94.6,313.2 +92,318.6 +95.7,310.9 +80.4,330.4 +91.8,332.8 +97.4,359.5 +98.9,352.8 +83.5,365.9 +80.7,377.2 +89.4,360.9 +92.3,367.6 +99.6,378.1 +98.6,374.8 +97.2,361.2 +84.7,389.6 +86.2,394 +91.5,383.8 +90.4,388.2 +93.1,391.1 +97,392.7 +98.3,415.6 +94,418.1 +90.6,401.8 +98.1,401.7 +93.8,421 +89.8,434.8 +91.2,422.9 +96.3,426 +87.4,455.1 +86,442.2 +92.6,457 +98.1,445.4 +98.3,460.4 +86.8,474.6 +95.1,490.8 +80.6,482.1 +108.8,11.4 +106.4,5.3 +119.6,2.3 +113.2,33.2 +114.8,36.2 +115.3,32.2 +114.1,54.2 +102.5,59.2 +116.7,40.7 +104.1,60.7 +118.1,70.4 +104.7,83.9 +104.6,89.1 +105.2,96.4 +105.7,80.1 +113,97.4 +114.3,96.2 +116.2,96 +115.9,94.7 +115.2,88.8 +103.4,115.6 +106.5,114.7 +114.1,103.8 +111.5,115.1 +116,109.1 +118.6,107 +118.6,102.3 +108.4,114.4 +107.1,136.6 +104.9,126.5 +115.1,129.5 +100.3,126.2 +100.6,135.1 +106.6,132.1 +107.8,133.9 +105.6,123.6 +111.1,124 +114.6,121.8 +110.7,125.7 +114.1,125.9 +113.4,132.9 +114.6,135.9 +119,125.4 +109.6,141.2 +104.2,146.2 +100.9,150.8 +104.7,154.2 +104.8,153.4 +101.7,157 +108.9,149.4 +108.5,143.8 +108.1,141.2 +107.2,140.4 +112.4,153.2 +115.8,158.4 +115.8,159.6 +119.7,158.2 +117.7,158 +115.3,151.2 +102.7,163.2 +103.5,174.4 +101.4,179.3 +104.2,177.5 +105.4,176.3 +105.1,177.1 +106.2,179.3 +106.2,178.4 +106.3,177.6 +108.7,177.7 +106.7,176.7 +113.9,174.6 +112.3,177.6 +117,178.1 +116,160.5 +115.1,161.2 +109.6,184.4 +113,183.1 +119.9,186.8 +110.2,203.4 +112.3,214.5 +108.5,255.3 +103.9,258.9 +101.9,259.4 +111.7,249.1 +101,246.1 +108.5,246.4 +111.6,258.4 +118.1,257.1 +107,269.1 +100.7,269.8 +102.9,273.3 +103.4,275.3 +108.8,275.4 +107,263.2 +109.8,262.1 +118.6,263.8 +119.1,261.4 +102.8,289.5 +108.1,287.7 +108.8,280.9 +114.5,292.1 +117.8,281 +113,311.7 +101.5,316.1 +105.8,328.8 +119.4,330.4 +116.4,327.2 +119.7,328.2 +104.1,345.3 +103.6,345.5 +114.3,358.5 +117.6,367.4 +108.4,362.8 +104.1,362.3 +103.9,369.2 +106.9,372.6 +116.9,364.3 +118.7,361.6 +101.1,391.2 +102.6,390.5 +100.6,395.4 +104.8,413.4 +104.4,410.5 +119.3,419.6 +115.3,405.3 +118.6,403.6 +106.6,432 +104.3,420.9 +109.1,425.8 +109,424.1 +109.6,422.4 +119.3,430.6 +118.3,441.6 +101.5,446.4 +106.5,445.3 +112.3,449.5 +104.7,463.8 +110.1,461.2 +103.3,479.2 +107.5,479 +108.4,479.7 +105.3,471.5 +108.5,471.9 +108.5,471.1 +117.2,469.7 +106.5,499 +109.9,480.7 +138.7,16 +139.5,12.2 +137.1,0.7 +129.3,37.5 +134.5,50.9 +121.2,53.3 +124.2,57.2 +139,46.8 +124.6,63.2 +133.5,70.2 +130.3,98.4 +120.8,102.2 +126.2,110.4 +120.5,122.4 +120.1,147.8 +123.7,154.1 +121,157 +126.3,158.8 +129.7,155.5 +130.4,152.4 +134.2,151.1 +136.4,142.9 +120.3,162.4 +121.7,162 +121.5,165.4 +129.5,177.1 +128.3,174.9 +130.5,176.2 +137.1,179.9 +138.9,178.4 +139,175.9 +139.8,175.8 +135.7,173.5 +139,168.5 +139.4,199.7 +128.5,196.5 +134.7,190.6 +132.9,195.7 +137.9,195.8 +139.9,180.7 +139.3,203.5 +139.5,211.5 +121.9,216.8 +131.2,214.2 +137.2,215.9 +135.3,210.1 +137.3,207.2 +127.2,223.3 +122.8,228.2 +120.9,230.1 +131.1,223.1 +132.4,224.5 +138.1,220.6 +123.1,257.7 +133.8,259.6 +129.4,276.8 +123.2,267.1 +123.7,284.6 +124.2,295.6 +128.4,295.5 +125.8,291.1 +128.9,283 +134.3,296.8 +134.7,317.2 +138.5,317.4 +126.2,319.3 +122.7,306.9 +127.1,303.6 +130.6,304.4 +124.3,322.6 +129,333.6 +135.3,337.5 +133.2,324.7 +135.3,321.4 +125.9,355 +122.9,352.3 +123.3,346.9 +130.9,354.6 +133,368.3 +135.2,363.9 +128.5,379.8 +126.3,362.8 +133.8,370.7 +131.8,397.8 +121.3,388.1 +134.8,392.9 +134.9,392.5 +134.9,397.5 +126.6,403.4 +126.8,412 +123.1,419.6 +132.1,402.2 +134.5,417 +128.6,434.8 +137.9,435.4 +125.3,439.5 +123.8,421 +127.3,436.7 +125.6,430.3 +134.5,426.8 +128.6,441.7 +132.3,445.5 +134.8,440.2 +138.5,451.2 +124.9,452.8 +122.7,444.5 +123.2,449.5 +127.6,450.2 +139.5,441.3 +131.4,467.2 +120.7,467.9 +126.7,474.3 +128.1,469 +133,463.7 +139.2,476.8 +128.9,482.1 +135.7,490.4 +151,15.9 +151,17.7 +145.4,36.5 +150.6,31.4 +159.5,29.1 +156.3,36.5 +143,28.1 +143.3,31.1 +153.2,31 +158.9,32.2 +158.5,58.8 +159.6,59.2 +156.9,57.6 +155.9,52.4 +159.8,54.8 +154,75.2 +156.8,65.4 +151.2,81.7 +155.1,109.1 +140.6,117.3 +145.1,118 +155.2,139 +155.8,130.1 +150.4,134.6 +159.7,133.5 +140.8,137.2 +159.7,127.6 +158.4,127.8 +155.1,123.7 +151.6,151.1 +158,168.9 +140.9,176.2 +140.8,178.3 +141.6,184.1 +144.1,184.4 +140.9,189.1 +145.3,184.3 +147.5,184.8 +154.5,185.9 +155.2,196.9 +157.9,197.2 +156.2,191 +143.8,214.6 +149.4,212.8 +150.2,204.8 +155.4,219.4 +159.6,212.2 +154.1,230.9 +140.1,238.1 +155.9,246 +159.6,248.3 +156.1,243.5 +151.1,295.1 +145.5,297.9 +142.8,299.4 +147.7,286.3 +156.5,299.2 +159.7,296.1 +155.4,286 +159.6,288.2 +148.8,317 +152.6,303.3 +141,303.9 +141.5,311.9 +149.6,311.6 +146.5,306 +145.4,300.5 +149.9,304.1 +149,301.6 +151.7,311.2 +155.2,312 +159.3,313.6 +156.7,308.9 +149.5,335.6 +148.7,333.7 +154.6,333.9 +147.7,367.5 +151.3,379.1 +159.3,379.2 +159.7,362.2 +144.3,393.9 +140.5,391.6 +144.8,389.8 +143.2,382.7 +154.1,388 +140.6,394.9 +149.7,396 +148.2,393.4 +142.4,412.8 +140.9,416.9 +144.9,416.9 +147.9,416 +145.6,412.6 +147.9,414.7 +152.5,403.5 +153.4,419.8 +158.3,414 +146.2,423.2 +140.7,429.6 +146.4,420.8 +150.1,423 +154.1,439.3 +159.7,436.7 +156.6,433.6 +157.1,428 +158.2,422.4 +150.1,441.9 +153.7,444.6 +152.2,440.4 +151.2,447.2 +154.2,448.5 +152.8,462.9 +152.2,471.9 +158.5,472.6 +155.4,466.8 +157.5,465.5 +149.2,485.5 +170.4,2 +163.2,8.1 +168.2,4.9 +173.3,11.5 +173.5,17.9 +163.7,30.1 +167.9,29.3 +173.8,38.6 +164.2,40.9 +160.8,50.2 +164.1,54 +161.6,57.8 +167.3,58.8 +170.8,57.3 +173.1,58.4 +172.8,58 +178.1,54.3 +167.8,75.4 +163.7,68 +178.9,65.9 +170.4,92.6 +163.3,118.3 +166.5,101.3 +172.3,105.1 +179.2,106.9 +173.1,128.9 +168.1,129.1 +167.4,128.8 +166.9,138.5 +161,131.1 +171,133.3 +167.1,162.5 +168.4,167.4 +164.1,170.4 +179.9,160.3 +160.4,190.2 +162.9,181.2 +160.3,196.4 +169.9,198.1 +169,195.1 +169,196 +173.1,184.1 +172.9,188.9 +172.3,192.3 +178.7,195.5 +176.9,196.4 +176.2,184.1 +179.5,183.5 +178.6,182.5 +169.2,196.4 +163.1,212.9 +166.5,216.8 +167.7,202.8 +173.6,205.1 +173.4,205.2 +174.3,215.2 +179.9,217.6 +176,200.9 +166.5,220.6 +167.7,238.2 +169.3,231 +166.2,220.7 +171,238.5 +162.4,241.2 +162,240.4 +160.4,247.4 +164.9,247.9 +163.7,246.9 +160.8,255.5 +162.1,255.2 +168.8,249.5 +170.3,244.6 +164.4,253.2 +163.2,277.4 +171,279.8 +176.7,279.8 +168.8,298.2 +174.6,293 +173,299.7 +173.2,295.9 +175.9,292.8 +177.4,292 +163.6,309.5 +169.6,313 +160.8,317.4 +161.8,317.7 +167.2,310.2 +171.9,302.4 +171.8,304 +174.6,303.1 +173.6,303.1 +171.9,308.5 +171.9,309.7 +174,309.3 +173.2,305.7 +172.7,310.2 +177.4,314.8 +176.2,303.4 +178.2,300.7 +177.4,303.1 +168.8,337 +172.8,324.2 +174.9,326.9 +170.4,331 +174.8,334.7 +175.2,327.4 +177.7,323.7 +179.9,324.6 +173.5,340.2 +170.4,343.2 +172.9,348.3 +173.6,353.6 +178.3,371.9 +162.1,374.2 +164,378.2 +167.4,375.1 +169.8,368 +169.8,367.2 +164,396.5 +161.9,395.2 +165.8,383.3 +170.7,383.6 +175.9,381.4 +179.6,399.8 +163,409.9 +161.8,414.8 +163.6,414.5 +164.7,411 +163,415.8 +165.3,417.8 +168.4,416.1 +178.7,409.8 +178.2,405.3 +168,425.7 +163.1,434.1 +171.8,443.5 +177.4,444.5 +178.6,452.8 +163.7,452.7 +161.7,451.9 +160.1,458.5 +164.6,457.1 +169.8,447.2 +165.3,440.2 +169.7,443 +170.6,454.2 +173.9,459.8 +172.9,466 +177.4,469.5 +162.8,462.9 +176.1,475.9 +177.8,461.7 +166,480.4 +160.8,494.2 +172.4,489.5 +175.3,482.3 +194.9,17.5 +197.9,9 +181.5,15.9 +181,18.3 +185.1,15.1 +189,15.1 +198.1,16.2 +196.2,13.9 +191.9,37 +192.6,34 +190.4,20.5 +183.7,23.2 +180.4,35.3 +188,34.2 +188.4,29.6 +191.1,27.2 +193.2,26.6 +195.6,38.5 +196.5,37.5 +180.1,55.1 +183.1,58.8 +186.9,55.9 +187.9,58.3 +198.2,41.1 +181.7,61 +185.2,78.7 +186.6,76.7 +187.7,64.6 +193.9,62.1 +198.5,60.3 +193.9,91.2 +181.9,97.5 +188.9,90.2 +186.8,80.2 +188.6,118.9 +199,116.1 +199,118 +191.6,121.7 +189.7,139.9 +189.8,137.6 +189.4,136.7 +193.5,134.2 +186.4,158.5 +184,150.6 +193.2,141.9 +191.1,158.9 +196.1,173.1 +184.8,163.1 +192.5,185.7 +196.2,182.3 +181.2,199.1 +196.8,195.7 +197,198.2 +199.5,189.1 +189.2,201.8 +184.3,209.9 +180.8,218 +181.9,218.9 +193.2,219.6 +198.9,219.8 +195.2,212.9 +182.1,222.1 +181.6,227.9 +194.1,232.9 +195.7,222 +199.6,244.4 +191.8,245.8 +190.1,255.9 +188.6,266.9 +187.5,273.6 +180.8,279.6 +184.3,278.2 +183.9,276.7 +195.5,268.8 +196.4,299.3 +189.1,280.7 +195.7,281.3 +199.5,282.7 +184.3,297.2 +180.5,285.6 +184.8,285.1 +181.7,287.2 +180.2,291.9 +184.6,291.6 +181.2,296.1 +182,295.5 +180.5,299.3 +185.1,296.2 +189,292 +189.4,281 +191.3,289.5 +189.5,308.4 +189.8,305.9 +193.9,301 +180.1,300.6 +180.4,304.8 +181.7,304.3 +183.7,304.2 +182.4,302.3 +181.7,305.8 +183.1,310.5 +187,319.6 +189.2,310 +187,309.5 +189.6,303.5 +192.1,307.6 +198.5,319.8 +199.1,310.8 +199.7,304.4 +199,301.3 +191,339.6 +181.2,322 +186.3,324.4 +190.5,325.5 +194.4,336 +184.5,347 +182.1,349.2 +181.1,351.2 +184.2,354.2 +181.8,357.9 +189.9,349.7 +185.7,340.3 +199.2,374.1 +196.5,361.9 +191.9,364.9 +194,364 +193.1,366.6 +192.1,374.6 +197.6,378.8 +195.4,367 +190.6,387.7 +181.9,396.7 +183.5,397.1 +194.9,404 +182,408 +180.1,412.8 +189.9,416.1 +187.8,418.6 +191.2,409.8 +188.4,436.7 +184,433 +197.3,428.1 +188.4,429.4 +189.1,432.2 +190.7,423 +199.5,452.6 +188.6,456.6 +182.9,448.6 +181.9,440.6 +187.4,440.5 +180.8,456.9 +181.9,458.7 +191.3,444.5 +187.4,477.7 +189.5,471.2 +198.7,468.9 +180.6,461.5 +181.8,468.1 +188.1,465.1 +182.4,480.1 +195.6,497.9 +204.9,15.7 +204.5,11.6 +201.2,13.6 +211.5,5.7 +213.7,15.8 +216.4,8.7 +207.1,38.3 +207.2,22.1 +209.4,22.9 +210.9,21.1 +211.5,21.1 +212.5,30.3 +214.1,28.1 +200.4,43.5 +203,47.2 +200.9,50.9 +202.1,58 +206.3,44.3 +208.9,42.9 +212.4,42.6 +211.1,45.8 +217.1,54.1 +217.9,48.9 +206.1,77.4 +204.7,96.3 +201.2,89.4 +200.6,88.2 +200.3,97.3 +205.2,93.7 +209.6,93.8 +209.5,90.2 +214.1,80.8 +211.1,87.5 +214.4,90.4 +212.9,93.8 +214.5,98.9 +217.5,97.4 +216.2,94.2 +218.4,91.7 +215.3,83 +203.3,116.7 +210.9,105.9 +200.7,108.2 +202.7,109.8 +203,108.8 +209,110.8 +205.8,107.9 +206.1,109.1 +209.4,103.6 +219.8,101.3 +203.7,138.5 +202,151 +212.5,155.4 +201.1,144.9 +208.2,152.4 +205.4,144.1 +210.8,144.7 +216.4,142.4 +204.1,173.2 +206.4,171.5 +208.6,166.4 +213.7,170.2 +211.1,178 +219.1,178.9 +216.7,166 +206.3,189.7 +212,190.9 +216.2,189.4 +207.7,195.4 +208.8,181.6 +213.4,180.7 +215.8,191.9 +203.3,208.3 +201.3,219.3 +209.2,218.1 +208.1,216.9 +209.9,213.6 +214.9,212.4 +215.3,218 +211.8,215.8 +219.9,217.4 +215.3,213.9 +200.7,224.6 +201.4,221.9 +201.5,227.9 +204.4,227.3 +205.7,228.3 +206.7,226.8 +205.8,221.2 +210.9,232.4 +213.7,236.6 +214,236.1 +215.7,236.4 +209.6,256 +203.7,244.1 +202.4,255.4 +219.1,246.4 +207.5,279.2 +207,272.1 +206.7,266.3 +215.3,276.1 +203.6,285.2 +208.1,282.2 +204,288.1 +211.4,298.5 +219.4,293.6 +213.8,301 +210.9,306.3 +204.1,311.5 +201.1,313.1 +200.3,307.7 +202,311.5 +207.9,312.5 +205.4,308.6 +214.7,303.5 +212.3,304.4 +213,307.8 +217.8,311.4 +218.3,311 +215.4,306 +216.8,305 +216.5,306.8 +217.1,306.8 +216.3,303.8 +219.1,302.4 +219.4,301.9 +203.6,327.5 +200.5,332.4 +208.9,330.2 +206.6,329.8 +200.2,357.6 +200.9,341.6 +201.1,352.3 +219.9,345 +202.4,362.9 +208.6,362.4 +215.7,369.6 +205.5,385.8 +205.4,383 +209.2,413.1 +214,410.6 +215.2,414.3 +215.2,406.4 +216.6,402.8 +210.5,423 +213.4,426 +215.6,430.7 +214.7,438.8 +204.2,436.5 +204.4,430.9 +209.8,435.4 +214.5,439.7 +215.2,436.3 +217.3,430.2 +201.9,457.1 +215.9,454.5 +218.2,445.8 +204.7,462.2 +209.6,476 +213.7,468.8 +205.7,494.4 +213,481.3 +212.4,499.8 +215.5,493 +217.7,490.5 +216.2,493 +233.3,15.5 +237.2,17.9 +220.1,19.4 +233,4.9 +235.2,4.7 +234.8,36.3 +237.8,30.1 +232.4,24.5 +225.8,28.9 +221.6,21.4 +224.1,24.1 +220.4,26.8 +220.5,29.8 +221.2,36.1 +237.5,34.4 +238,36.1 +231.5,52.2 +235.4,51.3 +223.3,40.8 +239.5,48 +224.3,53.8 +226.5,47.4 +235.4,87.6 +231.7,83.8 +228.2,93.1 +227.5,88.1 +227.7,80.8 +232.7,94.2 +230.8,98 +234,99.2 +236.6,99.7 +239.9,99.3 +239.5,98.8 +239.9,96.7 +235.9,97.4 +238.5,90.8 +235.3,92.7 +234.6,100.9 +226.2,112.1 +228.4,108.5 +228.9,102.5 +230.4,104 +233.4,104.4 +233.8,101.9 +232.3,100.2 +234.3,105.8 +234.6,111.7 +238.7,105.4 +235.2,106.2 +236.5,108.9 +238.3,108.2 +238.4,100.7 +237.5,101 +236.3,104.7 +225.2,113.2 +238.9,103.5 +221,138.6 +228,129.6 +228.4,125.4 +235.5,154.2 +231.2,146.8 +223.1,143.6 +221.2,147.8 +233.9,148.9 +238.3,144 +228.4,160.4 +225,166.5 +230.9,167.9 +222.7,183.8 +224.3,180.8 +221.4,186.2 +233.2,196.2 +235.8,185.7 +223.5,213 +221.1,218.5 +228.8,204.8 +233.3,210.9 +238.3,203.8 +223.7,229.6 +224.4,231.5 +229.1,228.2 +230.4,221.2 +230.2,225.1 +234.9,233.2 +238.5,226.6 +237.6,251.8 +236.4,254 +223.4,265.2 +221.9,299.1 +220.2,293.8 +220.7,294.8 +221.6,293.9 +225.1,298.1 +227.3,289.9 +228.2,288.2 +225.5,282.8 +230.4,297.9 +237.3,285.7 +220.5,303.9 +224.8,304.2 +224.8,306.5 +222.1,314.5 +229,311.1 +226.7,301 +232.2,314.2 +233.2,318.7 +232.6,321.3 +235,325.3 +238.3,357.2 +236.6,352.9 +228.7,354.3 +232.7,356.4 +236.7,347.4 +235.1,348.4 +233.7,371.6 +239.3,374.2 +239.3,368.1 +220.7,372.1 +227.3,378.5 +232.1,380.2 +223.3,396.4 +236.7,393 +221,409.5 +233.8,406.2 +231.2,414.8 +237.2,436.1 +234.6,429.2 +226,428.3 +233.6,425 +235.5,431.3 +221,448.3 +236.9,456 +238.6,452.2 +237.9,445.4 +238.2,444.8 +234.7,479.8 +228.7,471.1 +220.3,477.2 +234,492.3 +233.5,498.8 +237.1,493.5 +223,495.3 +240.3,0.5 +243.6,1.3 +242.4,15.4 +247.9,11 +258.9,18 +244,44.1 +244,49.6 +240.5,50.2 +241.4,51.4 +255.9,46.2 +258.6,43.3 +258.3,41 +249.1,76.5 +252.2,79.4 +254.1,85.5 +250.1,85.8 +244.4,82 +244.4,88.6 +246.4,97.3 +255.4,90.5 +254.6,98.5 +241.9,93.4 +243,92 +240.5,95.5 +240.7,97.7 +241.7,99.5 +245.1,88.6 +247,89 +250.5,84.8 +253.6,84 +250.1,89.8 +252.9,93.5 +253.9,95.7 +259.1,99.1 +259.9,97.5 +256.5,94.6 +256,81.7 +240.9,104 +242.9,104.1 +241.4,103 +243.6,110.5 +249.5,102.5 +248.4,101.1 +247.6,101.1 +253,104.7 +257.4,110.2 +257.3,110.1 +244.1,131.2 +252.5,145.4 +252.2,147.2 +240.8,141.8 +248.8,154.6 +245.8,149.8 +246.9,149.4 +250.8,152.9 +252.8,154.2 +258.4,155.2 +259.5,152.5 +258.3,150.9 +255.3,165.3 +249,172.9 +246.4,164.6 +258.6,170.8 +248.2,199.2 +247.9,197.4 +249.7,193.1 +245.4,186.3 +245.4,188.8 +250.4,194.8 +252.6,191.3 +253.6,193.4 +259.7,197.6 +257.1,195.4 +242.1,202.1 +247,209.4 +245.1,215.5 +243.1,218.7 +242.5,210.4 +249.3,200.8 +253.7,202.2 +251,214.4 +259,205.9 +257.3,200.3 +251.8,220.4 +249,223.1 +245.2,224.1 +241.4,228.9 +240.3,238.1 +246,238.8 +250,229.4 +251.1,229.4 +253.7,228.7 +245.5,252.4 +252.7,249.4 +254,250.7 +259.2,258.5 +256,253.2 +258.9,250.9 +256.9,250.6 +257.2,251 +259.6,246.8 +257.7,245 +255.4,243.1 +253.8,272.4 +251.6,272.7 +241.4,289.6 +242.1,287.1 +244.9,292.2 +248.1,299.3 +246.7,290.6 +246.1,292.5 +240.2,315.9 +242,315.4 +245,316.5 +249.3,308 +249.2,301.6 +252.6,303.9 +253.1,308.3 +257.5,318.5 +258.6,329.2 +259.1,331.6 +244.9,322.1 +249.4,336 +250.6,338.1 +249.5,346.6 +252.7,351.1 +247.4,351.1 +242.1,359.8 +241.9,344.2 +244.8,340.1 +243.8,359.5 +245.1,353.8 +248.1,346 +254.5,352.1 +251.3,357.2 +257,355.2 +255.6,353.4 +259.4,340.7 +255.7,366.6 +251.5,366.7 +245.7,364.1 +242.4,373.8 +242.5,370.6 +246.2,379.7 +248.8,376.3 +248.7,361 +253.7,377 +248.1,394.6 +247.6,394.1 +254.7,414.3 +242.4,406 +249.3,412 +256.3,418.5 +259.7,410 +247.1,422.9 +240.7,425.5 +242.9,435.6 +251.4,426.7 +259.3,435.9 +250.9,439.3 +254.5,438 +247.3,423.1 +252.8,431.4 +255.2,434.8 +257.4,433.6 +247.5,440.6 +242.1,441.1 +255.6,443.2 +242.3,449.9 +255.4,451.4 +258.18,450.82 +251.1,463.2 +251.8,479.6 +240.4,499.5 +249.3,499.5 +245.4,494.2 +257.3,483.7 +278.8,9.1 +274.2,4.6 +264.6,3.1 +268.7,3.6 +270.9,3.9 +271.5,8.1 +274.8,9.1 +275.8,18.7 +277.9,16 +277.6,3.8 +273.5,32.8 +261.5,29.5 +264.6,34.7 +266.5,29.9 +274.7,25.3 +272.1,38 +271.5,39.4 +279.2,37.7 +279.6,36.3 +278.1,30.9 +277.1,26.8 +278.4,25.6 +276,24.2 +268,49 +263.8,53.7 +261.4,55.9 +269,59.4 +273.1,54.4 +270.4,58.2 +275.4,45.1 +271.3,79.1 +275.5,70.3 +271.5,69.8 +273.7,66.2 +260,76.2 +261.5,79.8 +268.8,71.4 +269,62.7 +279.2,64.7 +264.7,93.7 +273.8,91 +267.2,82 +260.9,82.5 +261.7,88.3 +277.3,96.7 +272.2,106.4 +264.9,100.7 +263.2,106.2 +260.2,117.4 +264.5,115.8 +274.9,106.4 +270.2,117.2 +279.8,101.7 +262.2,131.2 +266.7,124.2 +270.9,123.5 +276.8,134 +261.3,144.8 +260.4,152.8 +264.8,151.7 +260.4,157.6 +261.8,159.8 +262.5,155.3 +266.1,157.4 +265.2,158.9 +267.1,155.3 +265.5,150.8 +266.2,151.3 +265.1,153.2 +266.3,145.8 +265.8,148.9 +270.5,151.2 +274.9,154.8 +274.8,151 +271.8,151.6 +271.3,153 +272.7,153.2 +270.5,158.9 +274.6,158.2 +279.6,154.9 +275.3,149.6 +273.7,164.1 +273.8,161.6 +269.4,164 +268.3,166 +266.3,175.8 +262.6,174.8 +273.2,164.7 +271,166.2 +273.8,169.6 +272.7,172.8 +270.3,179.3 +274.4,176.7 +277.4,176.1 +277.8,177.9 +278.2,165.6 +277.4,166.7 +262.2,183.4 +262.8,181.8 +268.9,192 +265.6,186.1 +265.9,186.5 +269.3,186.8 +270.1,183.8 +273.5,183 +273.3,187.6 +271.3,192.3 +273.2,197.2 +278.3,187.6 +276.4,184.3 +260.6,201.6 +265.3,215.3 +265.9,219.4 +269.9,214.5 +276.2,209.3 +261.4,231 +273.1,229.4 +273.2,230.2 +273.3,233.6 +275.3,238.7 +276.3,233.1 +278.9,233.5 +274.5,257.8 +263.8,253.5 +261.8,256.1 +270.2,266.8 +269.3,277.9 +268.4,264.1 +278.5,260.9 +279.7,296.6 +266.4,309.7 +260.5,339.4 +277.9,339.5 +275.7,352.2 +264,347.1 +260,352.3 +264.2,358.9 +263,355.3 +266.8,352.1 +275.8,353.9 +260.6,369 +264.2,362.5 +261.7,368.9 +268.2,395.8 +263.8,382 +266.8,390 +266.9,419.5 +266.5,412 +262.2,408.6 +263,404 +269.2,413.8 +279.5,437 +279.3,422.7 +272.6,424.7 +269.5,434.4 +272.4,438.5 +273.9,439.1 +275.1,422.1 +264.2,443.7 +261.7,442.8 +268.6,441.7 +263.1,474.4 +265.2,478.6 +266,477 +272.4,477.9 +273,476.9 +263.7,481.7 +262.4,481.9 +267.6,494.7 +269.4,492.5 +268.8,493.1 +266.5,489.3 +266.2,489.9 +265.3,480.3 +266.1,481.1 +267.5,483.5 +267.4,480.3 +270.4,484.1 +271.7,485.8 +273,496.2 +269.2,487.2 +289.6,4.8 +282.4,6.8 +283.6,18.4 +286.2,2.2 +285.5,4.2 +292.6,18.4 +293.1,16.2 +281,30.6 +280.2,38.3 +281.8,38.4 +288.1,23.2 +285.3,32.1 +290.6,20.6 +294.1,26.1 +293.9,25.4 +298.7,33.9 +297.2,31.3 +297.6,29.4 +289.9,53.4 +287.2,52.3 +288.4,46.4 +287.1,43.3 +293,49.4 +297.2,40.8 +284.6,40.6 +284.4,65.2 +289.4,61.2 +294.5,61.1 +291.2,79.8 +296.7,70.8 +296.3,69.1 +298.1,67.7 +295.3,62.3 +296.2,64.8 +298.8,87.5 +293.2,80.5 +280.6,97.8 +281.6,87.2 +282.9,87.8 +284.3,85.8 +281.5,97.6 +282.6,98.8 +284.9,97.8 +285.1,90.3 +285.8,87.3 +287.5,89.3 +289.2,89.6 +289.6,87.1 +285.1,85.1 +297.5,90.2 +283.7,100.1 +282.8,113.4 +289.3,117.5 +286.2,105.3 +294.7,101.8 +291.6,112.9 +297.8,119.3 +296.3,114.5 +299.5,114.3 +295.4,107.1 +282.4,133.4 +286.2,135.5 +289.9,138.4 +291.9,127.1 +283.7,141.3 +282.8,153.3 +283.3,159.7 +288.6,149.2 +292.6,147.3 +292.9,156.7 +294.5,172.2 +294.1,175.1 +284.4,162.5 +281.8,168.2 +282.3,166.1 +283,171.3 +287.8,172.1 +285.8,169.4 +287.8,168 +286.9,164.5 +290.2,179.1 +298.3,173.2 +295.8,161.8 +293.9,184.5 +298.5,183.6 +291.6,186.1 +284.4,182.1 +284.6,189.7 +283.9,186.3 +280.3,190.3 +280.9,190.8 +284.8,193.8 +286.7,192.1 +287.4,191.3 +286,185.2 +294.1,194.6 +296.5,199 +283.3,213.9 +288.1,218.7 +288.1,215.1 +289.1,209.6 +287.9,208 +294,212.1 +298.1,218.9 +298.7,219.3 +285.9,221.4 +280.2,229.8 +281,220.4 +284.3,233.1 +280.5,236.9 +281.5,239 +283,238.6 +282.8,239.5 +284.5,237.9 +285.1,236.7 +285.2,237.8 +286.2,235.2 +293.1,235 +293.3,231.3 +294.5,235.5 +295.2,235.2 +298.7,226.4 +297.3,221.8 +286.7,246.2 +285.4,251.1 +289,243.9 +288.1,242.9 +290.3,240.4 +291.3,244 +291.5,244.1 +290.2,243.9 +294.9,249.6 +292.4,250.3 +292.1,255.1 +296.6,240.7 +282.2,272 +289,271.5 +290.6,269.7 +298,266.1 +283.3,297.4 +283.9,296.5 +288.1,295.2 +294.6,292.3 +283.1,312.6 +285.5,301 +285.5,302.2 +291.3,304.9 +298.6,301.8 +284,324.1 +286.1,321.5 +284,355.4 +281,348.1 +294.5,346 +299.6,340.2 +286.7,366.9 +280.6,375 +298.2,372.4 +289,389.1 +285.1,408.6 +289.8,414.7 +299.5,412.3 +298.4,411.5 +282.9,423.1 +290.5,422.7 +298.8,426.6 +288.3,453.1 +298.7,451.5 +299.2,450.4 +289.3,478.4 +285.4,465.1 +298.9,475 +298.2,483.7 +285.1,494.7 +291.6,497.2 +304.7,10.8 +309.7,2.9 +309.9,0.5 +311,3.5 +314.6,4.2 +300.1,28.5 +303.1,25.4 +303.6,26.2 +301.8,27.6 +304.6,38.4 +302.4,35.2 +305.2,39.2 +307.1,34 +309.3,27.5 +307.5,25.9 +314.9,25.1 +311.4,31.3 +315.6,31.2 +318.6,27.1 +300.9,41.1 +303.7,49.3 +304.8,50.7 +300.5,56.5 +302.8,59.2 +303.7,56.5 +305.9,57.5 +305.8,58.1 +305.2,57.8 +305.2,58.8 +306,59.6 +308.8,59 +305.5,40.9 +307,44.5 +311.5,59.5 +319.2,55.4 +315.5,51.6 +319.5,54.7 +319.7,48.4 +304.2,64.2 +304.4,62.5 +302.2,61.4 +302,69 +303.7,68.2 +301,71.7 +303.5,71.2 +305.2,78.5 +308,73.7 +309.3,74.1 +308.5,72.5 +306.6,70.5 +305.4,69.3 +306.3,67.7 +307.1,60.5 +308.3,61.5 +310.2,64.6 +312.1,69.5 +310.1,73 +310.8,74.9 +312.4,74.1 +313.1,72.8 +311.2,77.6 +311.8,79.2 +314.5,79.5 +317.3,79.5 +318.6,71.2 +318.6,69.2 +317.7,66 +316.7,63.3 +317.7,62.3 +300.8,83.4 +302.6,89.5 +304.3,88.3 +304.8,85.4 +304.7,93.7 +302.5,91.8 +301.5,98.9 +303.5,97 +302.8,97.1 +306.2,90.3 +305.5,92.7 +306,86.7 +308,87.5 +308.2,87.8 +305.4,80.8 +305.6,83 +306,83.5 +312.4,81.6 +313.6,87.2 +317.9,85.4 +317.3,80.5 +301.6,119.1 +318.3,118.9 +318.7,131.6 +301,133.6 +309.1,123.7 +309,120.3 +308.4,121.9 +312.4,120.8 +310.8,125.4 +314,156 +313.1,147.9 +316.1,144.2 +314.7,169.1 +300.6,160.9 +314.7,172 +317.9,176.8 +315.8,174.2 +315.1,167.3 +313.8,188.7 +306.6,191.5 +304.6,192.2 +309.3,181.9 +317.6,208.1 +317.8,200.6 +302.3,204.8 +301.7,215.5 +303.4,219.6 +302.6,218.4 +305.2,218.3 +307.4,216.3 +306.1,216.9 +305.5,213 +305.7,214.7 +307.4,212.6 +309.9,209.8 +312.6,213 +310.8,215.1 +312.9,217.1 +319.5,210.7 +319.7,208.7 +301.9,222 +303,222.8 +303.4,225.4 +315.9,239.8 +311.4,247.3 +316.3,241.4 +301.2,267.9 +314,265.9 +302.2,328.8 +300.7,337.4 +304.9,339.9 +304.4,337 +301.9,339 +309.6,330.2 +306.6,327.4 +316.3,325.2 +317.9,328.1 +309.5,359 +313.8,344.7 +310.3,352.2 +311.5,357.4 +315,357.7 +310.7,375.7 +306.2,370.5 +306.5,364.5 +309.2,364.9 +318.9,366.9 +319,360.6 +304.5,398.9 +309,395.4 +307.2,394.6 +319.1,396 +317.6,381 +301.2,402.8 +302.2,409.7 +303.3,405.5 +300.8,416 +310.7,413.6 +310.5,439.4 +302.2,456.9 +308.5,455.2 +312,454.7 +315.9,457.4 +318.3,455.4 +309.6,499.7 +309.2,492 +313.2,488.1 +301,488.1 +304.9,491.6 +301.3,490.3 +308.2,490.9 +305.4,489 +308.7,488.1 +328.5,4.4 +329.9,1.5 +332.9,5.2 +332.5,5.3 +331.1,6.7 +339,3.7 +336.1,4.2 +320.7,33.8 +337.4,27.2 +320.3,39.3 +333.2,32.2 +336.7,36 +336.4,30.8 +338.4,26.7 +320.6,43.5 +320,47.9 +321.7,49.3 +323.2,49.1 +323.3,50.1 +324,47.4 +320.6,50.7 +324.7,54.9 +322.7,53.2 +320.9,55.2 +322.3,59.3 +322.9,59.7 +323.6,55.7 +324.1,54.9 +323.1,55.3 +326.7,56.5 +328.2,53.4 +325.9,54.8 +326.2,54.8 +324.9,48.5 +336.8,53.1 +321.9,59.8 +320.5,60.2 +321.8,60.5 +321.2,60.8 +321.2,62.6 +324.9,60.2 +321,69.2 +332.4,66.8 +329.4,71.1 +331,77.9 +332.7,79.2 +335.2,78.7 +336.2,77.1 +338.4,74 +338.4,67.8 +321.6,89 +321.2,90.2 +326,84.8 +324.9,118.8 +324.1,127.1 +332.1,127.3 +333.3,136.4 +323.8,135.1 +323,137.8 +327.6,136.9 +338.4,125.6 +320.5,146.6 +324.8,154.1 +323.8,152.8 +326.6,155.2 +327.4,157.6 +330.2,153.4 +330.8,159.1 +334.9,159.1 +325.3,177.5 +320.9,178.8 +331.2,169 +334.7,171.1 +335.3,178.8 +338,172.8 +339.1,169 +339.2,165.1 +335.2,168.1 +323.5,183.1 +337.9,180.7 +321.2,194.8 +324.6,195.1 +322.7,192 +323.3,199.9 +324.2,197.1 +329,198.2 +328.1,196.9 +325.7,197.3 +325.5,194 +329,186.6 +327.1,187.2 +330.8,180.3 +334.8,194.2 +330.7,198.7 +333.5,199.8 +335.7,190.1 +339.8,184.4 +338.1,182.9 +331.4,218.8 +321,208.6 +321.8,208.6 +324.1,213.4 +324.2,209.9 +321.4,210.3 +323.7,217 +326.1,201.7 +330.2,201.1 +330.8,202.1 +332.4,201.8 +332.1,211.2 +336.8,203.1 +336.6,201.6 +324.9,220.9 +320.7,235.1 +326.8,231.2 +337,237.3 +321.8,259.9 +327.8,254.3 +336.4,257.3 +337,246.3 +327.4,276.5 +337.2,314 +338.7,317.8 +328.1,325.6 +331.9,345.1 +324.5,359 +333.3,342.5 +334.8,350.6 +339.6,342.8 +321.6,378.4 +322,371.9 +328.3,376.1 +326.4,377.3 +325.9,363.4 +329.8,364.1 +331.7,366.8 +323.5,399 +337.3,399.3 +335.5,385.6 +331,385.5 +330.2,387.9 +339,397.5 +338,393.3 +336.7,392.3 +337.1,389.3 +337.5,382 +328.1,401.9 +337.2,408.2 +333.8,412.7 +325.6,410.7 +327,409.9 +336.4,410.2 +336.1,403 +339.1,403.4 +328.5,436.1 +334.1,436.8 +332.3,434.1 +337.6,438.5 +337.1,453.1 +322.2,443.6 +327.3,458.3 +328.7,445.4 +334,458.6 +338,451.2 +337.8,448.1 +336.5,442 +322.4,464.6 +337.4,464.5 +326.5,484.5 +325.8,491 +329.7,482.3 +338.9,494.3 +340.1,6.5 +348.7,18 +347,10.4 +345.3,9.3 +354.6,0.6 +358.7,1.9 +352,32.7 +355.6,59.5 +353.5,56 +352.8,54.5 +346.9,50.6 +341.3,40.8 +343.3,42.7 +343.1,45.4 +344.9,59 +349.2,41.1 +355.1,45.5 +344.2,69.3 +348.7,66.3 +351.2,61.5 +342.7,61 +342.7,77.4 +347.1,71.5 +351,67.4 +351.9,74.1 +357.1,70.5 +356.5,74.4 +356.7,65.2 +341.5,83.8 +343,88.2 +349.4,91.7 +349.8,91.1 +347.8,85.4 +352.9,84.7 +354,81.2 +346.3,101.2 +340.2,103.3 +343.3,103.1 +344.3,101.7 +341,107.8 +341.5,108.6 +354.9,116.7 +355.4,105.9 +340.1,124.4 +340.6,129.9 +341.2,135.8 +350.7,128.5 +357.1,131.9 +359.9,140.1 +340.6,156 +354.1,159 +359.2,155.6 +356.1,150.9 +357.4,153.5 +343,179.5 +346.1,179 +349.6,178.5 +349.1,175.1 +348,173.2 +347.2,174.1 +352.4,174 +351,176.2 +352,177.8 +353.3,179.5 +356,178.7 +356.6,172.1 +356.4,166.9 +355.1,169.6 +344.9,181.6 +342.5,180.8 +341.2,189.6 +347.6,182.8 +348.3,180.9 +354.2,183.2 +354.5,182.7 +359,195.2 +359.8,180.4 +356.8,180.1 +349.6,208.2 +342.7,219.2 +348.6,214.2 +347.4,214.1 +350.4,218.5 +350.5,219.5 +359.9,217 +356,208.9 +356.7,207.8 +359,206.9 +355.8,204.4 +357.6,201.7 +355.7,230.2 +357.7,239.8 +344.3,222.1 +344,232 +349.9,220.6 +346.8,221.6 +347.1,224.6 +354.6,220.8 +350.8,220.9 +350.3,221.7 +352.2,221.2 +356.2,237.2 +358.7,224.6 +356.4,221.1 +356,221.8 +349.8,240.1 +343.2,250.8 +347,249.7 +340.5,279.7 +352.68,275.68 +341.2,285.9 +353.3,281.5 +359.5,292.2 +359.2,289.3 +340.5,310.3 +344.5,310.6 +345.1,319.5 +349.5,313.4 +345.6,312 +345.4,309.9 +343.5,339.9 +343.9,332.9 +342.53,339.11 +359.2,330.3 +354.2,355.6 +357.7,350.5 +352.3,358.4 +352.3,369.5 +343.5,362.4 +345.6,374.9 +349.3,369.1 +346.4,360.6 +351.1,363 +352.7,368.3 +353.7,373 +353.6,375.8 +358.2,370.2 +355.6,371.2 +347.5,395.1 +348.9,391.3 +349.2,388.6 +352.1,380.4 +355,397.3 +344.1,406.4 +344.8,403.6 +341.8,406.1 +358,411.8 +359.2,410.2 +357.9,405.9 +356.9,405.3 +345.2,434.9 +349.1,439.3 +356.4,434.8 +343.9,433.5 +341.5,423.3 +350.7,423.2 +358.5,438.6 +357.7,437.1 +359.1,443.6 +344.2,453 +349.7,455.2 +345.1,449.4 +356.8,440.3 +355.3,467.7 +350.7,467.8 +340.8,471 +359.3,474 +354.6,496 +343.9,497 +345.4,481.1 +375.9,17.3 +376.2,32.6 +366.6,38.8 +379.8,24.1 +377.5,20.7 +376,20.1 +363.9,54.3 +363.1,52.6 +363.2,56.7 +362.7,56.3 +365.8,53.3 +374.1,41.7 +374.6,53.7 +377.4,55.5 +376,60.9 +360.3,62.4 +361,65.2 +362.1,69.6 +362.8,69.8 +364.2,67.6 +363.9,67.1 +361.1,72.9 +364.7,70.4 +363.7,70.2 +363.8,76.8 +366.1,73.9 +365.8,69.1 +366.8,66.7 +368.5,67.3 +374,74.3 +374.8,78.6 +375.9,78.5 +379.6,73.9 +365.2,74 +360.8,99.9 +360.6,80.2 +368,99.3 +370.1,98.2 +374.1,84.1 +372.4,96.6 +373.2,99.1 +376.2,99.6 +378.2,94 +379,90.2 +379.3,88.5 +379.9,89 +378.4,86.3 +377.9,85.6 +366,91.4 +364.5,106.9 +360.2,116 +366.5,115.5 +367,115.1 +368.7,113.1 +365.9,102.7 +368.3,104.7 +370.3,101.8 +370.5,101.9 +373.6,100.4 +372.9,101.7 +377.5,100.1 +367,134.2 +368.8,131.4 +371.4,135.1 +375.1,137 +378.5,138.8 +378.2,121.8 +378.2,122.1 +363.9,153.2 +362.4,150.4 +364.2,152.3 +361.1,155.1 +361,157.7 +362.1,159.9 +364.6,157.2 +364,156.1 +362,154.6 +366.5,154.9 +366.1,156.3 +369.9,156.4 +367.5,150.4 +365.6,141.2 +369,142.7 +371.2,144 +371.9,144 +374.4,142 +370.3,147.7 +371,147.2 +373.4,149.9 +370.5,150.1 +374.4,154.8 +372.3,155.7 +370.8,155.6 +377,159.6 +379.5,156.9 +379.2,152.9 +378.2,149.7 +375.3,146.9 +376.6,142.4 +379.5,144.6 +379.7,141.2 +368.7,153.8 +369.6,142.2 +371.7,143.8 +375.6,148.7 +377,152.9 +376.2,166.5 +363.2,170.2 +379,175.5 +369.8,169.1 +366.8,166.8 +366,163.3 +373.6,161.2 +372.3,173.5 +375.5,179 +379.4,178.9 +375.8,174.6 +377,166.2 +363.5,177.8 +363.9,176.9 +374,190.2 +360.4,188.4 +363.9,187.4 +360.1,190.2 +361.2,193.8 +364,190.5 +364.4,190.1 +364.8,198.9 +365.5,195.7 +367.1,194.4 +367.6,191.5 +365.9,192 +365.7,183.4 +377,191.5 +379,188.8 +362.2,180.1 +362.6,193.5 +364.2,217.3 +363.6,218.5 +362.3,200.2 +364,208.6 +364.5,207.2 +362.5,208.2 +366.6,211 +365.5,208.1 +366.7,208.2 +367.5,206.9 +368.8,204.1 +374.6,206.1 +373.1,210.6 +374.8,214.1 +374.5,213 +373,211.7 +370.2,219 +374.1,218.5 +375.2,215.9 +372,215.3 +371,216.1 +375.5,214.6 +364.9,230.2 +372.9,223.1 +376,234 +361.9,224.1 +363,228.2 +363,229.8 +366.5,224.3 +365.8,224 +368.4,221.9 +371.7,224.9 +378.7,221.2 +378.8,250.1 +366.1,255.3 +370.9,254.5 +373.8,253.9 +376.9,255.1 +367.6,241.7 +379.6,286.6 +360.4,291.5 +363.4,294.2 +375.2,317.3 +372.5,303.7 +362.6,330.7 +371.7,337.8 +363.7,335.9 +369.9,339.8 +368.8,324.6 +373.1,330 +370.7,330.8 +372.8,331.5 +374.5,339 +374.5,336.7 +377.2,335.1 +377.5,334.6 +376.3,341.1 +371.9,342 +375.5,344.5 +371.7,346.8 +367.3,343.5 +363.1,343 +368.2,358.9 +361.4,345.1 +360.4,345.9 +363.7,345 +368.4,348.1 +368.7,345 +369.9,343.9 +369.5,341.8 +367.3,343 +374.9,344.6 +374.8,342.8 +374.6,357 +376.7,356.4 +375.5,347.7 +367.4,365 +360.1,365.2 +377.1,367.4 +370.2,378.7 +363.2,363.3 +361.4,373.3 +368.1,374.6 +366.2,365.7 +367.7,363.3 +370.8,368.8 +370.8,376.1 +375.2,378.9 +377.4,379.8 +375,375 +376.1,366 +375.7,364.4 +366.2,399.4 +364.8,398.6 +363.9,394.8 +367.6,394.6 +368.3,399.9 +368.9,394.9 +366.7,383.2 +378.2,386.8 +376,383 +379.1,405.1 +361.3,407.5 +362,407.5 +361.4,409.4 +363.7,419.9 +374.3,416.1 +375.9,418.3 +377.8,416 +373.3,432.8 +360.3,431.2 +367.9,436.3 +366.2,437.9 +366.8,434.9 +367.3,434.5 +369.8,430.4 +366.3,427.4 +372.5,420 +373,422.2 +370.9,428.2 +371.2,428.8 +374.6,426.2 +371.4,434.8 +372.8,434.3 +374.4,431.6 +379.8,432.1 +375.3,431.3 +375.3,432.8 +379.9,423.8 +376.2,429.6 +378.7,420.7 +378,420.4 +378.2,421.8 +375.9,423.4 +365.5,447.9 +371.2,441.2 +372.1,458.1 +370.2,456.1 +369.7,475 +370.1,471.5 +370,472.1 +372.1,473.7 +372.7,472.5 +374.5,474.2 +372.9,475.5 +370.9,476 +366.5,463.7 +376.8,476.2 +379.3,477.2 +369.7,484.5 +371.2,487.6 +363.5,494.8 +364.7,494.3 +363.6,498.6 +370.5,494 +376.7,485.5 +391.5,6.3 +383.2,14 +384.6,11.7 +380.2,18.4 +387.4,14.2 +393.9,14.9 +394.4,18 +397.2,18.1 +398.2,17.3 +395.6,16.1 +396,12.4 +385,31.3 +380.1,20.5 +389.6,38 +389.4,23.8 +390.4,20.3 +391.6,23.9 +394.7,33.5 +390.1,35.6 +395.2,22.1 +381.3,40.8 +382.5,58.2 +384.9,55.1 +385.8,40.9 +392.5,58.5 +393.6,58.2 +394.5,59.4 +391.1,73.4 +388.1,62.7 +389.7,62.4 +389.1,63.9 +390.9,62.4 +394.8,75.1 +398.3,75.6 +398.9,75.8 +396.2,66.4 +397.1,65.5 +380.3,82.1 +382.4,81.6 +380.9,87.3 +386.8,96.6 +389.6,98.7 +387.8,90.3 +386.8,89.2 +399.2,89.2 +397.8,86.1 +396.2,87.3 +387.1,81.2 +398.5,87.5 +389.2,95.2 +381.6,108.4 +383.7,114.3 +387.1,105.3 +394.3,114.8 +391.6,116.7 +399.2,109.4 +398.8,112.6 +382.3,125 +383.8,139.3 +386.2,139.4 +396.4,139 +399.4,139.3 +398.2,121.2 +397.7,120.7 +386.9,156.8 +399,158.2 +394.9,150.3 +382.1,148.7 +380.2,143.7 +384.8,145 +381.5,149.7 +383.5,151.8 +382.6,152.8 +383.2,153.9 +382.6,156.4 +385.6,159.9 +385.4,156.7 +387.3,157.6 +387.9,157.6 +388.5,157.2 +389,151.8 +387.1,150.3 +388,149.7 +385.8,149.2 +388.5,146.9 +389.1,148.2 +389.4,145.1 +385.1,143.1 +386.7,141.2 +387.1,144.7 +389,144.1 +390.1,141.2 +393.3,141.5 +391.6,142.1 +394.4,142.7 +392.7,142.7 +394.7,144.9 +393.9,145.2 +393.9,149.2 +391.9,150 +390.5,152.3 +394.4,155.6 +392.6,158.4 +390.2,159.4 +397.5,159.4 +395.2,145.6 +397.3,145 +399,148.8 +397.8,141.7 +387.5,178.6 +398.5,177.2 +386.3,171.2 +381.2,162 +383.8,169.2 +380.8,172 +387.1,165.1 +389.3,161 +398.9,178.3 +397.2,169.7 +389.3,174.4 +380.6,173.1 +383.2,177.6 +382.6,177 +392.4,182.5 +393.5,185.4 +385.8,183.1 +382.8,182.1 +384.5,189.1 +384.1,190.9 +385.7,194.2 +387.3,194.3 +387.4,193.5 +387.9,191.8 +389.5,192.1 +389.8,190.9 +389.5,189.5 +387.5,186.4 +391.9,189.3 +392.1,190.4 +394.5,198.7 +396.5,194.3 +385.8,192.7 +381.3,214.6 +384.5,218.5 +387.8,205.4 +386.4,201.2 +391.9,210 +394.5,209.5 +394.5,207.9 +390.6,212 +399.4,213.4 +396.4,210.5 +395.4,207 +389.9,259.8 +388.5,294.7 +380.2,290.3 +385.3,287.1 +381.6,281.8 +389.3,290.9 +390.6,280.8 +380.2,287.1 +395.8,315.7 +398,317 +395.6,310.3 +397.7,303.3 +392,330.6 +382.2,329 +381.7,338.3 +382.8,338.6 +389,337.7 +393.4,329.2 +383.4,341.9 +381.3,359.6 +384.4,356.2 +388.2,358.5 +387.6,357.1 +385.9,354 +391.4,350.4 +390.6,359.1 +389.5,378.6 +387.3,363.8 +386.2,361 +391.2,364 +394.7,367.8 +392.1,365.8 +393.4,376.9 +398.9,368.7 +390.2,397.7 +384.2,397.1 +396.4,383.5 +397.8,394.6 +384.1,382.5 +381.3,388.6 +383.6,395.5 +382.4,395.6 +386.5,398.4 +386.6,390.2 +393.9,381.5 +394.8,398.6 +398.1,397.2 +395.2,391 +395.8,386.2 +396.3,385.1 +399.3,383.7 +380.5,417.1 +384.8,416.8 +386,419.7 +388.5,419.1 +389.7,418.6 +388.9,412.4 +391.1,406.1 +390.6,411.3 +392.7,414.7 +393,413.2 +392.5,417 +395.9,439.4 +384.6,421.9 +385,421.5 +381.3,425.2 +380.1,430 +382.5,428.7 +383.8,426.9 +384.9,427 +380.5,433.2 +384.8,430.9 +383,430.3 +380.1,437 +381.1,437.2 +386,431 +386.3,433.9 +386,434.5 +387.5,429.9 +386,427 +386.1,429 +388.4,429.3 +389.7,429.3 +388.6,424.9 +386.7,426.7 +388.4,422.8 +390.2,420.8 +392.7,421.2 +391.5,433.6 +390.7,436.8 +398.9,436 +395.7,434.5 +397.2,433.4 +399.4,431.2 +398.8,430.7 +396.3,430.9 +398.7,432.8 +396.4,426.1 +396.2,423.7 +394.5,445.5 +384,462.8 +389.5,474.8 +391.7,477.9 +398.2,477.4 +399.4,476.8 +385.2,492.9 +381.4,485.3 +381.5,488.8 +384.6,486 +382.5,490.7 +382.8,495.1 +388.9,499.1 +389.3,481 +389.3,482.5 +389.6,484 +388.2,484.6 +391.7,484.7 +393.7,488.1 +395,486.8 +392.4,491.4 +396.3,494.7 +399.1,494.2 +398.6,492.1 +399.8,491.2 +399.2,490.1 +398.1,492.2 +396.4,489 +398.6,487.6 +396.5,486.1 +410.1,0.9 +414.5,10.9 +410.6,28.8 +418.8,28.2 +418.2,24.7 +413.2,49.1 +413.1,54.5 +414.3,58.1 +416.8,58.4 +416.2,55 +405.4,57.4 +409.8,56.4 +408.4,46.9 +409.5,44.1 +411.2,41 +408.5,62.6 +411,66 +401.8,62.5 +401.5,67 +400.5,67.2 +400.5,69.3 +400.7,69.7 +401.6,69.1 +402.4,69.6 +402.1,72.5 +402.8,73.9 +400.4,70.7 +400.5,79 +407.8,67.3 +408.9,66.9 +407.2,64.8 +410.4,69.5 +411,70.3 +410.2,74.8 +412.6,79.5 +415.9,73.3 +403.8,81 +408.8,99.7 +408.7,86.1 +407.2,87.7 +408.7,83.4 +412.5,85.9 +413.7,91.3 +416.3,98.9 +418.8,96.8 +404.2,108.5 +407.8,104.6 +410.5,101.8 +410.6,113.8 +413.3,114.4 +410.8,115.3 +418.3,102.4 +411.3,133.8 +402.8,124.7 +407.4,135.1 +406.1,139 +407.1,122.8 +411.7,120.1 +414.4,125.8 +411.3,138.2 +418.8,123.7 +415.9,120.5 +419.3,143.8 +410.5,148.8 +401.9,154.8 +400.1,148.9 +402.4,154.5 +404.2,159.7 +404.6,156.2 +408.5,141.2 +415.8,159.6 +416.6,157.4 +407.9,174.1 +413.3,174.1 +408,177.8 +414.6,169.5 +413.2,165.8 +410.5,174.7 +413.4,176.1 +407.6,180.4 +412.4,183.2 +419.8,191.1 +409,206.2 +413.8,218.1 +419.8,225.3 +409,222.5 +411.2,223.4 +415.3,223.7 +416.8,256.7 +401.2,268.7 +400.5,271.5 +418.4,266.2 +413.5,269.4 +416.3,275.6 +404.2,298.4 +416.8,312.7 +400.6,316.7 +402.5,315.3 +408.4,302.5 +411.2,319.7 +416.1,320.5 +404.2,326.5 +402.8,325.5 +408.9,339.8 +405.9,325.7 +414.6,323.1 +414.4,332.6 +410.7,338.1 +415.3,335.2 +415.2,334.5 +404.9,357 +409.4,348.2 +406.3,349.8 +410.3,341.4 +411.4,344.4 +412.8,344.8 +416.5,356 +417.5,367.9 +409.5,366.4 +405.7,366.1 +403.3,374.3 +406.4,375 +403.7,375.6 +406.2,374.3 +405.4,370.7 +409.5,367.5 +414.1,378.7 +403.3,388.5 +405.7,389.4 +403,395.6 +406.7,396.6 +405.7,396.3 +405.9,394.5 +409.6,386.3 +413.9,383.4 +414.7,386.2 +411.6,399.8 +416.3,385.1 +416.3,383.2 +415.5,384.1 +409.9,387.2 +400.7,401.3 +416.7,404.5 +402.2,407.3 +419.7,405.1 +400.9,424.3 +400.7,431.4 +400.5,432.4 +400.6,434.3 +400.9,433 +410.6,424 +414,428.8 +411.7,426.8 +416.2,439.2 +418.1,439.6 +418.5,438.9 +419.8,435.1 +418.1,426.2 +417.8,422.9 +404.3,430.2 +413.3,444.1 +414.2,441.4 +419.2,455.2 +419.4,453.3 +419.6,449.6 +418.5,443.2 +419.7,441 +401.9,472.3 +402.9,473.9 +400.1,477.6 +407.6,475.8 +409.5,468.4 +407.7,464.2 +413,460.1 +413.2,464 +410.5,465.5 +410.2,467.3 +411.4,465.5 +414.5,479.7 +416.5,475.7 +419.2,479.5 +418.3,478.7 +418.8,475.8 +419.8,474.9 +419.4,473.5 +415.9,466.3 +418.3,469.9 +419.6,462.7 +418.3,460.3 +413.8,483 +400.7,483.6 +400.1,492.8 +400.5,493.3 +403.2,498.4 +422.4,9.8 +420.6,39 +431.8,20.7 +437.5,27.3 +436.2,29 +434,44.2 +433.9,41.7 +421.6,42.3 +420.9,59.1 +421.9,58.8 +429.2,58.4 +426.6,58 +433.5,54.7 +434,52.6 +431.8,51.5 +435.9,58.4 +436.7,50.6 +439.8,40.2 +423,62 +422.9,67.3 +420.7,79.9 +426.3,69.1 +426.7,60.8 +430.6,60.7 +432.2,64.9 +431.7,62.1 +435.5,65.7 +424.2,82.8 +422.8,99 +436.3,93.2 +423.4,105.2 +426,138 +424.5,133.8 +436.3,134.6 +429.2,132.1 +428,129.1 +431.4,128.1 +430.2,130.6 +434.8,132 +436,129.7 +437.8,129.6 +438,155.8 +435.3,153.8 +430.6,153.5 +434.4,146.1 +438.1,143.9 +432.2,142 +430.7,142.2 +435.6,140.7 +438.1,171.4 +431.2,178 +434,184.4 +432.7,180.7 +436.4,207.9 +438.7,208 +439.8,203.6 +432.9,228.7 +428.9,253.6 +424,251.7 +426.4,250.4 +434.3,250.5 +439.8,253.8 +420.4,259.7 +436.5,258.1 +427.4,276.2 +422.5,263.2 +423,260.5 +423.8,261.1 +430.5,266 +430.8,267.1 +437,279.3 +420.5,282.3 +429.8,295.1 +429.7,288 +435.9,298.1 +438.7,299.5 +435.6,293.6 +437.1,292.6 +428.5,317.7 +433.8,310.6 +425.9,339.4 +420.6,321.9 +426.3,338.1 +426.8,339.7 +431,334 +431.4,338.2 +438.6,331.9 +425.7,354.2 +432.2,340.5 +420.9,353.2 +421.2,355.4 +423.1,356.4 +426.7,342.7 +430.4,347.7 +430.3,350.2 +431.2,353.1 +420.8,360.3 +437.5,365.8 +434.3,397.6 +420.3,387.4 +420.8,387.4 +422.1,387.6 +420.4,395.5 +423.5,397.6 +428.9,390.9 +421.9,414.3 +428.7,418.3 +432.9,408.2 +420.4,403.8 +429.5,411 +433.8,409.4 +420.9,427.7 +424.6,430.4 +421.3,438 +421.5,439.8 +426.8,436 +425.7,438.2 +428.4,438 +426.3,432 +426.9,434.4 +426.2,425.2 +425.3,429.8 +432.4,429 +431.3,430.9 +432,434.2 +431.6,439.7 +433.4,438.1 +420.4,443 +421.7,444 +424.6,441.8 +421.9,441.4 +423.5,449.1 +424.8,449.1 +423.7,444.7 +420.9,450.5 +425.2,459.6 +425.5,450.1 +429.1,450.9 +429.2,450.3 +429.6,446.2 +427,447 +425.6,441 +430.7,449.3 +430.7,450.9 +434,450.9 +433,451.6 +430.6,455.4 +430.7,455.9 +431.1,458.2 +432,458.3 +437.6,446.9 +433.1,445.3 +420.2,462.7 +424.6,463.2 +421.3,469.9 +420.5,473.9 +420.7,474.5 +424,473.5 +423.1,470.5 +422.3,473.2 +420.1,476.8 +421.8,477.4 +421.8,478.6 +426,471.1 +426.2,471.3 +426.7,462 +427.2,464.1 +428.9,461.6 +430.7,464.6 +431.8,464.8 +431,465.3 +433.3,465.8 +434.5,465.1 +434.1,465.1 +437.6,470.1 +420.5,480.9 +423,481.5 +423.2,481.3 +422.6,487.3 +427.8,498.1 +427.2,496.5 +429,490.4 +428,489.2 +429,489.1 +428.4,487.2 +425.9,483.1 +429.5,481.9 +430.8,485.5 +431.2,488.9 +433.2,485.9 +434,490.3 +432.1,492.4 +434,495.8 +435.6,497.3 +438.1,499 +436.3,492.9 +437.7,487.9 +425.2,499.7 +445.1,2.4 +451.9,3.6 +450.5,5.7 +450.1,7.9 +453.8,10.2 +456.1,10.5 +459.6,10.1 +455.9,6.7 +459.5,3.5 +458.1,0.9 +440.6,23 +441.2,31.2 +440.6,32.9 +440.4,33.7 +444.7,31.9 +449.1,25.1 +453.5,28.2 +451.7,34.6 +453.1,37.4 +455.7,21.3 +458.4,20.2 +441.7,48.7 +440.2,50.1 +454.9,55.9 +456.3,46.9 +458.8,49.2 +457,44 +459.2,40.8 +457.7,41.2 +456.3,47 +446.4,78.9 +443.5,81.1 +444.6,87.9 +447.3,95.2 +447.4,95.8 +449.8,83.2 +458.4,98.2 +452.5,101 +452.8,100.4 +450.8,112.2 +455.9,115.6 +457.4,116.3 +456.8,117.3 +457.3,117.7 +458.6,116.4 +455.8,114.8 +455.9,107.4 +459.6,103.4 +440.3,128.7 +443.4,136.1 +451.1,138.2 +458.4,145.5 +456,155.6 +440.8,141.3 +444.2,145 +441,147.2 +454.8,164.7 +445.5,162.4 +449.8,169.8 +452.6,178.2 +442,176.1 +443.5,176.3 +449,163.5 +454.6,186.5 +459.4,196.3 +444.7,199.2 +444.8,198.5 +442.6,196.5 +445.5,198.8 +445.8,199.6 +447.5,195.6 +453,194.8 +443.8,212.7 +449.2,216.6 +454.3,203.9 +450.9,211 +452.2,219.7 +452.7,218.4 +454.1,219.2 +455.3,212.4 +456.1,214.1 +441.2,246.4 +440.3,253.5 +443.9,265.5 +445.3,272.8 +444.7,315.7 +449.3,334.2 +443.9,334.7 +448.5,352.8 +443,354.3 +455.2,347.2 +448.1,351.7 +449.4,362.1 +443.8,369.6 +444.2,376.9 +449.9,372.3 +450.1,374.3 +459.1,362.5 +441.3,399 +442.6,399.7 +451.5,397.7 +457.9,390.1 +456,390.5 +450.4,403.8 +457,413.4 +444.1,418.4 +455.2,419.1 +459.8,405.7 +457.3,406 +450.1,424.6 +441.3,435.8 +442.9,438.1 +448.9,434.9 +440.2,455.1 +447.7,452.4 +443.4,467 +442.9,470.3 +442.9,475.3 +449.5,477 +449.8,473.3 +453,471 +459.6,476.6 +457.8,475.3 +458.5,468.2 +440.2,481.3 +442.2,493.4 +444.5,499.1 +447.6,484.2 +447.7,481.2 +460,3.5 +468.5,8.4 +478.5,17.3 +478.3,15.1 +476.3,15.3 +479.1,12.1 +465.2,33 +474,21.3 +472,28.3 +479.5,34.1 +475.7,26.2 +462.2,44.3 +463.1,43.3 +464.7,44.3 +461.7,46.1 +462.7,48.7 +464.9,46.4 +462.7,45.8 +465.4,48.1 +469.1,43.4 +467.5,69.9 +464,60.4 +466.2,64.4 +474.5,64.1 +471.7,74.1 +475.1,73.7 +479.5,73.8 +478.9,68.8 +479,99.3 +478.6,92.6 +477.9,87.1 +474,100.7 +461,118.6 +466.7,119 +466,107.9 +469.9,109.4 +468.8,139 +466,134 +466.7,122.9 +467.6,122.2 +463.4,159.6 +474.7,159.6 +464.2,154.1 +469.5,148 +475.3,150.4 +476.9,148.4 +468.2,179.6 +472.6,172.3 +476.2,167.7 +479,195.1 +479.7,198.7 +471.3,194.4 +464.7,193.5 +464.2,185.7 +467.5,197.8 +469.2,190.9 +479.1,199.8 +476.6,181.6 +460.2,219.4 +465.8,216.3 +469.4,201.5 +472.7,205.9 +476.6,203.8 +469.3,227.6 +466.9,224.4 +468,220.3 +474.4,224.6 +471,231.6 +475.3,223.6 +479,224.8 +467.8,254.9 +468.5,248.8 +469.5,246.4 +475.8,251.9 +477.8,247.1 +474.8,253.5 +474,251.8 +473.3,251.7 +466,278.6 +463.6,266.2 +462.9,274.3 +469.3,261 +474.7,277.5 +476.6,264 +469.7,260.7 +464.8,289.4 +473.5,314.5 +460.3,314.7 +461.1,325.4 +461.2,334.7 +467.5,331.7 +465.3,328.1 +469,328 +467.5,326 +466.9,327.1 +474.6,322 +479.6,331.3 +464.5,353.1 +462.5,355.9 +466,358.6 +479,377.8 +460.7,367.1 +462.7,364.8 +464.8,362 +466.7,362.6 +479.4,374.8 +464,365.4 +479.4,386.5 +465.8,387 +472.3,386.1 +462.7,402.5 +461.9,403.9 +474.9,414.8 +479.8,415.8 +478.4,407 +466.2,435.3 +461.9,429.5 +464.6,420.3 +474,424.9 +475.7,435.1 +478.5,436.4 +478.2,438.4 +475.5,426.5 +477,426.5 +477.9,445.2 +464.1,441.6 +464.3,458.2 +461.9,458.4 +466.4,458.6 +466.8,459.4 +468.2,444.2 +470.8,442.1 +473.9,444.6 +473.4,441.6 +474.1,447.9 +473.4,446.2 +471.7,446.4 +471.1,453.8 +474.1,453.4 +474.7,453.2 +474.5,451 +473.2,450.5 +472.1,451 +471.7,451.7 +472.6,453.1 +473.2,451.1 +475.3,456.4 +475.3,458.1 +478.4,457.6 +479.2,457 +477.3,456.9 +477.5,457.7 +477.3,455.3 +475.7,455.5 +476,452.1 +476,454.8 +476.2,453.6 +477.1,453.4 +477.5,453.2 +477.1,453 +479.5,454.1 +478.5,451.6 +476.7,452 +475.1,446.4 +475.7,447.5 +475.6,448.2 +477.5,449.7 +478.1,449.6 +479.5,448.9 +476.5,443.8 +476.6,443.4 +478.8,442.5 +464.6,473 +462.8,472.8 +461.3,477.4 +464.8,461.5 +466.1,462.4 +466.6,464.6 +468.8,461.5 +472.9,460.2 +473,461.6 +470.3,469 +471.6,468.2 +479.6,463.1 +471.8,479.1 +474.2,477.9 +478.6,472.3 +477.9,470.1 +478.9,467.9 +477.5,466.5 +475.4,461.4 +475.2,464.1 +476.7,461.4 +479.2,479.3 +460.8,488 +464.3,493.9 +464.2,493 +464.4,498 +465.3,498.2 +468.1,499.8 +466,493 +468.8,494.8 +465.2,480.2 +466.5,484.1 +466.2,480.4 +470.4,480.8 +472.4,494.7 +471.1,499.2 +477.5,496.5 +476.3,492.4 +477.4,492.9 +476.6,491 +475.9,489.4 +481.6,11 +485.3,11.4 +491,5.6 +493.5,7.3 +499.4,12.8 +496.2,8.1 +499,8.3 +483.8,35.6 +482.4,39 +480.9,38.6 +487.1,36 +486.3,32.8 +493.4,28.8 +493.3,32.8 +494.6,35.5 +496.4,24.5 +495.4,23.5 +498.3,22.6 +485.2,32 +482.9,52.1 +488.3,56 +488.2,58.1 +494.9,45.5 +494.7,54.5 +491.3,55.8 +490.3,57.1 +490.3,58.8 +496.2,58.4 +488.9,74.6 +480.3,64.7 +483.2,67.4 +481.1,67.5 +488.8,65.3 +489.6,65.9 +490.7,60.6 +497.1,61.9 +499.7,60.8 +490.9,86.4 +495.9,99 +486.5,94.4 +483.1,87.2 +483.5,106 +489.2,116.5 +488.2,114.2 +497.7,116.9 +499.2,115.9 +498,112.8 +481.6,133.7 +486,131.1 +488.1,128.4 +490.1,124.6 +492.2,127 +493.8,126.8 +493.4,125.2 +491.6,129.8 +493.3,134.8 +490.6,136.2 +494.1,137.1 +492.9,138.4 +497,136.4 +499.4,136 +496.6,131.4 +499.5,125.4 +483.5,133.1 +484.4,146.4 +482,149.8 +481,149.9 +485.9,158.3 +487.7,155.4 +489.4,157.6 +488,159.6 +486.2,154.2 +488.8,154 +494,141.5 +491.1,152.1 +493.8,152.9 +494.5,154.2 +492.9,154.6 +492.6,157 +498.6,150.8 +497.2,147.4 +498.4,140.2 +481.2,178.3 +481.6,162.5 +485.3,161.6 +489.2,160.7 +492.6,162.2 +493.9,170.4 +496.5,175.8 +495.9,171.5 +490.4,186.9 +481.6,195 +481.2,196 +489.8,198.6 +485.4,184.8 +494.5,197.8 +498.6,199.8 +497.7,198.7 +482.3,209 +492.4,216.7 +480.1,208.4 +483.6,213.5 +485.5,209.7 +487.2,208.9 +487.2,200.7 +498.2,205.3 +489,234.8 +482.1,221.4 +480.2,236.6 +494,238.8 +493.3,253 +482.1,240.5 +498.2,261.2 +482.2,270.5 +488.7,277.5 +488.2,262.8 +493.3,261.7 +492.2,263.8 +492.8,266.9 +499.9,272.5 +488.3,295.2 +489.2,287.5 +491,288.1 +497.9,286.2 +499.5,314.6 +495.5,310.8 +482.5,309.6 +495.2,310.1 +497,300.4 +489,320.7 +493.8,329.8 +499.9,337.5 +498.2,330.9 +485.4,344.6 +490.7,351.6 +485.7,361.5 +496.4,361 +486.9,376.4 +489.1,379.3 +487,374.5 +495.1,375.5 +495.7,374.5 +498.1,387.4 +483.2,392.1 +485.3,387.5 +489.9,382.3 +499.6,398.6 +480.1,403.5 +482,409.3 +481.2,410.1 +486,419.3 +488.6,416.8 +487.9,418.1 +489.8,418.3 +485.8,409.8 +487.2,408.5 +491.4,400.8 +491.3,404.4 +494,414.7 +494.8,416.1 +494,417.9 +494.1,419.5 +496.1,417.4 +496.3,416.1 +496.7,415.8 +498.6,416.4 +499.2,417.1 +499,418 +496.4,413.9 +496,412 +496.1,412.3 +499.6,414.4 +499.5,414.6 +495.8,408.2 +496.3,404.7 +495.9,401 +499.1,400.2 +484.4,425.8 +484.7,428.1 +484.6,428.9 +481.9,428.7 +481.9,439.1 +482.1,437.4 +481.5,437.2 +480.6,439.2 +485.3,435.8 +487.8,426.8 +487.7,422.5 +488.9,421.5 +489.3,423.9 +494.6,423 +491.7,423 +493.6,426 +492,429 +491.1,439.1 +498,435.8 +499.7,439.2 +496.5,434.7 +498.3,433.7 +496.3,428.3 +496.9,428 +499.6,425.9 +498.1,427 +497.1,420.6 +495.3,420.5 +482.1,440.9 +482.4,443 +482.9,444.9 +482.6,444.8 +480.1,443.6 +483.7,445.9 +484.5,447.8 +484,447.7 +483.6,449 +484.5,449.1 +481,450.8 +481.5,450.8 +484.1,450.4 +484.1,451.1 +482.7,452.6 +482.7,454.8 +481.5,454.6 +480.5,451.8 +480.3,451.3 +483.7,457.1 +484.3,459.6 +480.7,459.1 +485.5,459.2 +485.9,456.3 +488.9,456 +489.1,455.1 +486,454.5 +487,454.6 +486.9,454 +485.8,451.7 +485.1,450.6 +487.5,450.1 +488.9,450.1 +487.6,451.6 +489.3,452 +489.8,451.8 +487.2,452.8 +488.3,454.8 +486.4,449.3 +487.2,447.7 +485.1,448.5 +486.1,446.7 +487.9,446.4 +487.9,445.9 +486.9,446 +485.5,445.7 +489,446.2 +489.5,445.5 +489,447.2 +489.6,449.5 +488.9,449.9 +485.1,444.3 +485.1,443.9 +486.2,444.8 +487.3,443.4 +492.1,442.2 +493.9,440.8 +492.6,443.3 +491.6,444.6 +490.2,445.6 +490.7,446.6 +493.1,446.8 +492.9,447.5 +493.3,447.9 +494.8,449.4 +494.1,448.8 +493.7,449.7 +493.2,449.5 +492.1,449.3 +492,448.9 +490.9,448.3 +490.8,450.3 +490.8,450.7 +491.6,451.2 +491.9,450.9 +493.7,450.4 +494.4,454.9 +490.3,452.7 +490.3,455.4 +497.1,452.2 +495.1,450.8 +495.5,449.9 +495.3,448.4 +495.4,446.2 +499.8,445.2 +499,448.3 +496.7,446.9 +496.9,444 +496.7,441.9 +498.9,442 +498.9,443.9 +490.3,443 +481.6,462.2 +482.8,460.6 +483,461.6 +483.8,462.5 +484.7,464 +481.3,465.2 +482.1,465.8 +484.2,466.2 +484.3,466.6 +481.5,468.7 +482.9,472.1 +483,470.8 +482.7,474.7 +481.8,473.5 +480.5,476.7 +483.7,479.4 +488.9,475.3 +485.5,473.3 +488.1,473.4 +485.2,468.5 +488.5,466 +489.2,467.3 +485.2,464.8 +487,461.2 +487.1,460.3 +489.1,462.4 +493.9,461.8 +492.9,461.5 +492.5,462.9 +492.4,464.1 +492,465.2 +490.2,469.8 +492,475.8 +496.1,477.1 +496.8,474.6 +496.8,473.5 +485.5,467.5 +480.5,482 +484.6,497.9 +486.2,498.7 +486.2,495.5 +489.1,494.9 +488,485.5 +494.8,482.9 +490.5,487.2 +492.1,494.7 +494.1,497.4 +494.1,497.8 +494.2,498.1 +492.8,499.9 +495.7,499.1 +495.9,499.1 +497,499.6 +496.4,496.2 +497.4,495.9 +495.7,485.2 +501.2,2.6 +503,19.4 +510.9,18.1 +514.7,18.6 +518.2,19.8 +519.8,19.6 +515.6,0.5 +506.1,37.1 +509.9,37.1 +508.1,30.5 +508.3,25.2 +507.8,22.1 +508.8,20.4 +514,21.1 +513.1,32.8 +513.4,39.9 +517.7,39.3 +516.1,22.9 +517.6,23.9 +519.2,24.2 +501.6,55.9 +503,56.9 +506.8,48.5 +517.3,55.5 +516.3,51 +516.4,50.3 +503.2,60.9 +503,68.6 +511.3,64.9 +514.5,64.1 +511.7,64.5 +514.9,68.6 +514.2,65.3 +510.8,73.8 +510.3,74.8 +513.8,71.3 +514.4,79.2 +519.8,75.7 +515.1,74.5 +518.9,71.8 +519,69.3 +503.2,85.2 +500.3,94.2 +507.4,82.7 +513.1,84.1 +513.4,94.4 +518.2,89.2 +518.8,89.4 +516.9,84.1 +519.2,80.5 +500.8,107.9 +500.8,114.2 +501.5,111.4 +504.6,116 +503.2,116 +502.2,115.3 +514.9,111.4 +517,119.3 +517.3,118.3 +518.2,119.3 +519.1,119.3 +519.6,115.9 +517.1,111.1 +518.7,110.8 +518.2,109.6 +517.2,122.7 +507.2,137.1 +507.1,138.1 +506.3,129.9 +509.3,128.5 +507.7,128.3 +505.6,121.2 +512.8,124.3 +514.5,121 +511.5,129.3 +514.9,128.6 +512.1,133.3 +511,135.8 +510.8,139.4 +515.1,137.4 +515.3,133.6 +518,134.2 +516.4,125.9 +518.9,127 +519.6,124.5 +519.9,122.1 +500.8,142.9 +503.6,141.1 +502.1,140.9 +500.7,145.7 +502.8,149.9 +503,145.3 +508.4,179.2 +502.2,169.9 +500.4,175.4 +507.2,189.1 +500.6,183.6 +514,195.8 +503.9,199.3 +506.4,183.3 +514.3,197.8 +504.7,203.2 +507.9,214.9 +508.8,204.1 +509.4,204.8 +508.8,203.6 +508.4,200.9 +511.4,200.5 +519.5,215.4 +510.2,201.8 +514.1,203.5 +514.2,202.8 +515.3,208.9 +518.3,208.3 +518.1,207.5 +519,206.2 +515.7,200.8 +516,203.7 +516.9,203.1 +518.3,201.5 +519,200.5 +518.2,200.6 +511,230.6 +517.2,229.1 +506.1,247.8 +509.9,249.1 +504.6,254.6 +512.3,255.7 +504.5,276.9 +500.8,268 +504.6,269.1 +500,273.1 +504.6,274.6 +505.1,271.1 +506.5,273.1 +513.1,275.1 +517.8,278.5 +516.9,270.9 +519.1,287.2 +507.8,297.9 +510.6,287.4 +511.1,297.5 +516.2,298 +502.7,317.9 +511,307 +514.2,307.2 +513.2,306.2 +512.2,305.1 +510.9,311.2 +518.4,313 +518.6,309 +518.8,306.1 +516,308 +518.5,303.6 +518.2,300.3 +502,322.5 +512.6,328.9 +509.3,332.9 +503.8,337.4 +503.9,335.6 +519,333 +516.1,321.8 +504,341.1 +512.3,342 +517.2,340.8 +517.8,345.5 +502.4,353.3 +509.6,354.9 +509.1,343.1 +515.6,355.1 +502.5,366.7 +508.9,367.9 +500.7,376.6 +509.1,376 +511,365.7 +511.5,378.4 +512.5,379.1 +518.4,375.5 +519.8,370.5 +518.7,370.8 +515.2,369.1 +518.8,369.4 +503.9,386.2 +517.6,385.6 +502.1,382.6 +500.3,394.9 +503.1,393 +503.1,396 +505.7,397 +507.1,391.7 +505.1,393.7 +507.3,394.8 +507.9,389.9 +508.4,381.1 +513.4,380.8 +510.1,389.3 +510.1,392.6 +512.8,398.9 +517.7,398.9 +518.2,399.3 +519.6,398.5 +518.7,397.7 +518.5,396.8 +519.6,395.5 +516,394 +518.6,393.6 +515.3,385.4 +519.5,380.3 +500.4,418.2 +500.1,419.2 +500.7,419.7 +500.9,419.9 +503.9,419.9 +506.2,418.8 +507.4,403.7 +512,417 +515.2,419.9 +518,417 +500.4,415.7 +502,417.9 +501.6,421.1 +502.9,423.2 +502.3,424 +504.2,421.9 +503,420.9 +500.5,428.3 +500.3,430.3 +501.7,433.8 +501.2,437.8 +500.9,439.8 +504.9,437.8 +503.2,435.4 +505.2,436 +506.3,438.9 +509.2,439.6 +509.6,431.4 +506.4,430.7 +505.6,427.9 +504.4,429.2 +505.7,429.9 +509.7,427.8 +508.9,426.7 +507.1,425.9 +507.2,423.8 +511.4,422.6 +510.3,428.3 +514.7,428.2 +514.2,426.5 +511.1,433.9 +514.7,430.8 +509.6,439.1 +510.2,439 +512,438.7 +516,435.3 +517.4,431.9 +517,432.5 +515.3,434.5 +519,433.6 +519.9,430.7 +515.8,425.7 +517.1,425.9 +516.2,429.7 +519.5,428.7 +518.2,426 +517,422.7 +515.9,423.5 +517,424.4 +519.5,422.6 +517.4,420.5 +517.2,421 +502.5,440 +501.7,442.3 +500.8,443.3 +501.7,444.7 +503.1,444.9 +503.5,443 +502.6,441 +501,449.7 +503.1,445.4 +501.6,452.8 +504.1,452.1 +504.6,457.6 +504.2,455.7 +505.9,456.1 +505.7,458.5 +508.4,459.9 +510,457.9 +507.6,454.3 +505.3,448.8 +505.2,449.9 +507.8,446.1 +505.4,442 +508.1,440.4 +511.9,444.9 +514.3,442.2 +512.4,446.4 +514,448 +514.6,447.7 +513.6,446.1 +513,452 +513,459.9 +514.5,459.9 +519.5,455.5 +515.8,453.1 +517.8,452.9 +515.9,445.5 +516.2,446.8 +516.2,448.1 +515.8,449.1 +501.8,460.5 +500.8,469 +504.6,469.3 +503,474 +507.1,478.6 +506.6,468 +506.5,468.5 +509.1,469.8 +509.6,466.5 +508.2,460.3 +508,462.3 +510.7,460.3 +510.6,464.7 +514.1,463.9 +511.5,466 +511.8,468.9 +513.5,469 +514,467.4 +512.4,465.6 +510.2,474.3 +519.8,477 +516.7,471.8 +515.3,464 +517,460.5 +503.5,484.4 +503,487 +500.8,496 +501.7,495.9 +501,496.2 +503.7,499.8 +503.9,498.8 +503.8,498.1 +503.5,496.1 +505.1,497.5 +504.5,499 +506.4,492.5 +507.2,492.2 +505,493.9 +508.9,491.9 +509.6,480.6 +508.3,482 +513,492.2 +510.7,492.9 +510.8,497.8 +511.1,498.1 +514,498.3 +514.5,499.9 +513.3,495.7 +519.4,498.2 +519.2,492.2 +518.2,485.8 +519.4,480.8 +524.2,14 +520.3,15.1 +532.9,12.9 +539.3,19.9 +539.3,19.5 +527.3,24 +522.5,30.2 +521.6,37.1 +525.6,37.3 +526.3,39.8 +528.3,39.4 +528.2,37.5 +528.6,31.2 +527.3,24.3 +529.1,20.9 +531.1,32.6 +530.8,34.5 +534.5,31.9 +530.3,35.5 +534,39 +534,36.2 +536.9,32.7 +537.1,34 +526.8,34.4 +523.2,49.1 +521.5,45.4 +522.2,59.4 +522.5,56.9 +526.8,58.6 +529,58 +526.3,54.5 +526.3,54.8 +528.5,53.9 +528,47.9 +526.8,44.4 +528.4,41.9 +531.3,47.6 +530.5,50.7 +531.8,59.1 +538,50.1 +528.8,50.9 +531.3,46.8 +538.5,65 +520.6,63.9 +522.1,64.6 +523.5,63.6 +524.6,62.6 +523,60.8 +521.5,66.7 +521,69.5 +522.7,70.5 +523,72.1 +521.4,77.8 +520.8,79.5 +527.7,79.1 +529,78.4 +528.4,73 +527.6,68.9 +529.2,65.5 +527.3,62.9 +530.2,64.8 +530.9,66.3 +536.6,75.4 +536,79.5 +520.9,81 +521.1,83.1 +521.8,84.5 +524.2,89.2 +525.2,90.5 +527.5,83.9 +529.8,83.3 +528.3,81.3 +539.8,81.5 +522.6,114.7 +526.3,110.6 +529.9,112 +522.6,104.4 +522.7,104.6 +524.7,102.8 +522.6,119.4 +527.4,119.5 +535.7,117.8 +538.7,113.1 +539.1,111.9 +537.4,110.9 +539.7,105.1 +537.3,113 +530.5,125.5 +523.5,122.2 +523,120.3 +522.2,120.6 +522.5,121.8 +520.2,126.7 +523.3,126.9 +524.8,133.4 +523.6,137.7 +522.2,135.3 +525.6,137.4 +528.4,132.9 +526.1,126.5 +525.7,126.8 +528.6,126.2 +525.9,121.8 +525.5,123 +534.4,121 +531.6,122.1 +534.1,127.5 +532.3,130.9 +534.1,137.3 +534.3,136 +539.8,139.7 +536.1,127.6 +538.6,128.3 +538.1,121.9 +525.3,131.5 +524.5,141.9 +524.8,140.4 +522.8,147.1 +524.8,150.7 +525.1,141.1 +530.6,143.9 +538,150.5 +535.1,145.5 +521.5,176.3 +533.5,160.8 +534.5,176.2 +520.2,196.2 +522.8,197.9 +528.8,198.4 +532.3,189.9 +534.6,193.8 +531,199.4 +538.7,195.2 +536,180.2 +536.7,183.1 +538,182.4 +520.9,200.4 +524.2,203.1 +522,200.4 +522.8,201.9 +523.3,202.8 +520.3,208.9 +522.6,217 +528.8,205.2 +526.7,205.1 +525.9,201 +526.1,202.7 +526.1,203 +528.7,201.1 +530.4,201.9 +537,209.4 +524.6,234.6 +520.3,238 +532.1,224.1 +522.8,254.4 +533.6,258.1 +531.3,248.4 +534.8,247.7 +535.2,252.9 +533.4,259.8 +536.1,252.4 +539.8,276.4 +536.5,269.4 +538,269 +539.6,269 +539.7,265.1 +524.3,287.9 +537.4,287 +532.4,286.5 +524.8,296.9 +528.2,299 +522.5,307.1 +531.3,306.6 +527.1,308 +528.5,315.4 +521.5,302.7 +527.7,318.5 +526.6,313 +526.2,300.4 +525.6,302.5 +525.9,304.7 +531.1,302.5 +532.6,300.4 +533,308.8 +536.1,309.1 +522.8,329.8 +521.5,331.5 +522.9,333.3 +525.7,330.9 +528.7,330.3 +528.5,321.3 +527.5,320.2 +528.8,355.2 +529,341.6 +529.7,343.7 +529.7,340.5 +539.4,355.3 +535.5,354.8 +538.7,346.4 +527.2,376.6 +526.2,378.3 +526.4,371.9 +525.9,368.9 +527.9,363.8 +530.7,368.2 +530.1,370.3 +534.5,372.7 +534,370.9 +531.5,377 +530.6,377.4 +536.3,371.3 +537.4,371.9 +523.6,385.8 +524,385.6 +521.4,392.1 +520.4,394.4 +523.4,394.4 +523.3,393 +521.3,395.4 +522.7,398.4 +527.2,395.1 +528.3,390.6 +527,388 +528.4,389.7 +529.4,380.3 +530.5,388.5 +532.4,389.8 +535.7,382.8 +535.8,380.6 +536.5,393 +531.6,398.8 +522.2,401.8 +524.7,401.9 +523.1,405.3 +520.6,410.5 +528.6,419.1 +529.3,415.5 +525.5,405.3 +527.6,407.4 +529,407.2 +529.8,407 +526.7,400.3 +532.6,404 +533.9,413.8 +535.5,416.3 +535.3,419.7 +536.7,418.9 +538.8,415.6 +536.7,410.8 +536.1,414.7 +538.7,418.9 +522,422.6 +521.2,428.5 +520.1,429.2 +522.5,429 +523.6,429.4 +523.2,426.1 +520.5,432.5 +521,433.5 +524.9,432.1 +521.6,436.2 +522,434.9 +527,434.4 +529.4,429.1 +529.7,428.2 +525.8,426.3 +529.8,421.4 +531,421.6 +532.6,421.9 +533,424.8 +532.3,427.3 +532.6,428.4 +534,427 +533.7,434.6 +533.5,431.5 +534.1,430.8 +536,430.9 +536,426.7 +536.3,425 +537.7,424.7 +530.3,431.2 +520,436.1 +522.7,445.6 +522,448.5 +520.9,450.7 +522.2,450.7 +520.7,455.1 +525.9,449.8 +526.7,446 +535.2,452.6 +535.4,455.6 +538.5,458.8 +539.5,459.1 +537.3,450.9 +537.2,446.3 +535.7,443.5 +538,441.9 +539.6,442.2 +522.8,469.6 +523.1,473.6 +524.2,474 +524.8,474 +529.2,466.9 +525.8,464.9 +533.7,464 +531.2,465.8 +531.9,466.4 +533,474.4 +533.3,473.3 +537.4,477.1 +536.9,478.9 +535.6,474.8 +537,465.4 +536.4,460.8 +527.4,478.4 +531.5,461.6 +530.9,475.1 +520,461.4 +523,485.8 +521.6,496.9 +520.2,497.9 +522.6,498.8 +524,499.8 +523.8,497.4 +525.1,498.8 +529.1,495.9 +529.3,498.1 +527.5,493.7 +529.5,493.4 +529.3,487.7 +529.5,486.6 +530.9,487.6 +531.5,487.7 +532,487.1 +530.5,485.7 +533.9,488.5 +531.3,491.2 +531.9,492.7 +530.4,493.6 +531,493.2 +530.4,493.9 +532,494.7 +533.6,494.2 +532.9,493 +533,490.7 +533.8,490.8 +530.7,496.5 +531.4,497.1 +532.4,496.9 +531.8,498.7 +531.2,498.9 +530.4,498.3 +531.7,499.9 +533.9,499.8 +534.2,496.9 +533.8,496.3 +534.5,495.5 +537.7,497 +537.2,498 +536.3,497.2 +535.4,498.1 +535.1,499.2 +539.1,493.7 +537.3,494 +536.8,493.1 +534.8,481 +524.2,483.2 +525.9,484.6 +527.8,481.4 +528.6,482.2 +535.9,494.8 +540.2,11 +548.5,14.1 +545.1,9.6 +545.9,2.8 +552.2,2.3 +551.4,4.1 +552.5,13.3 +554.1,14.2 +552.5,14.4 +550.7,15 +550.7,13 +555.2,13.4 +555.7,11.4 +556.8,10.2 +555.3,7.8 +557.8,9.2 +559.5,2.1 +542.7,23.8 +540.1,25.5 +544.7,25.3 +540.3,29.2 +544.3,31 +543.6,34 +541,34.8 +542.4,35.1 +543.5,38.9 +541.7,38.9 +550,39.2 +550,29.8 +548.2,21.1 +547.8,23.1 +558.1,35.2 +555.3,37.2 +541,41 +544.5,55.3 +546.5,50.3 +549.5,51.8 +548.3,54.7 +547.5,48.1 +545.3,44.1 +545.2,41.6 +551.8,40.9 +553.3,47.7 +557.4,48.6 +558.1,43.3 +556.6,41.2 +558.8,40.9 +559.8,45.1 +559,79.5 +541.2,68.2 +546.7,73.4 +546,65.9 +545.7,60.7 +546.4,98.6 +550.5,80.8 +540.9,91.4 +546.7,83 +545.2,82.4 +558.7,96.7 +559,95.6 +544.3,109.5 +544.4,110.6 +541.5,114.1 +541.5,119.8 +546.1,104.8 +550.5,100.7 +551.4,106.6 +554.3,116.6 +558.6,114.2 +559.2,106 +558.3,107.9 +555.4,103.9 +542.2,123.4 +544.5,121.3 +540.6,126.1 +542.3,130.5 +541.1,133.4 +545.5,139.4 +544.8,134.7 +545.5,131.7 +554.3,139.8 +543.6,144.1 +550.5,140.4 +554.5,143.4 +552.3,144.9 +552.5,141.8 +554.4,145.6 +553,148.1 +552.7,149.9 +557.1,141.4 +558.8,141.8 +559.8,143.6 +557.6,144.2 +542.5,177.4 +548.5,172.8 +550,174.7 +551.3,167.7 +554.1,170.9 +557.8,179.6 +559.6,174.1 +556.9,169.5 +558.1,169.5 +558,166.2 +559.9,169.7 +556.4,162.1 +557.8,160.5 +541,185.9 +541.5,192 +544.1,192.9 +540.3,198.3 +545.6,195.3 +547.3,195.1 +549.8,197.5 +548.4,198.6 +545.8,194.3 +547.4,193.4 +549,194 +553.7,185.6 +553.8,194 +552.6,195.7 +554.4,199.8 +552.4,199 +556.2,184.1 +558.6,180.3 +544.7,202.5 +546.5,202.7 +549.2,203.5 +551.5,206 +553.5,217.1 +558.1,218.7 +555.5,207.7 +557.4,205.6 +541.4,239.1 +555.7,236 +559.5,233.9 +544.5,227.3 +544.8,229.1 +545.6,228.6 +545.3,227.7 +545.5,227.4 +547.5,227.7 +547.9,226.6 +549.2,225.9 +549,227.4 +553.6,221.2 +550.5,224.1 +540.5,249.9 +544.5,245.8 +559.6,255.1 +541.7,249.4 +543.7,259.4 +549.8,257 +549.2,249.5 +549.2,240.9 +554.6,243.8 +555.5,246.2 +556.3,241.1 +544.2,269.4 +559.2,273.8 +541.4,263.5 +544.5,273.8 +545,274.9 +542.5,274.4 +540.6,276 +542.8,277.6 +542.4,279.5 +546.4,279.8 +546,278.9 +546.3,275.5 +548.3,272.1 +546.2,266.2 +545.1,260.9 +549.9,264.5 +552.5,261.5 +550.4,277.1 +557.5,276.3 +559.7,279.6 +555.6,261.9 +559.8,261.4 +549.6,294.2 +557.5,281 +542,281.2 +544.6,280.7 +544,284 +542.6,289.4 +545.3,287.3 +546.8,285.1 +546.1,281.5 +547.9,282.1 +552.8,280.5 +553.7,281.4 +553.9,296.2 +557.5,299.3 +556.7,291.5 +557.3,289.3 +558.7,282.2 +544.1,307.6 +541.6,302.4 +547.4,302.8 +548.4,300.2 +551.7,316.4 +556.5,311.5 +558.5,300.2 +559.6,339.8 +552.8,345.7 +550.6,354.9 +544,353.6 +546.7,359.6 +548.6,351.5 +549.8,352.7 +547.5,346.3 +549.1,349.3 +550.7,351.3 +554.1,353.8 +559.7,349.4 +557.8,347.6 +558,343.1 +557.4,344.5 +543.9,364.5 +542.3,364.8 +542.9,374.1 +547.1,377.8 +542.5,381.6 +543.7,389.3 +541.2,388.4 +546.1,413.9 +542.5,412.4 +541.9,416.1 +544.8,418.2 +546.6,417.4 +559.1,415.6 +558.8,419.8 +558.3,419.6 +540.6,420.8 +541.2,425.1 +544.2,432.3 +542.5,436.5 +546,433.9 +553.4,426.3 +553.1,429.5 +553.5,436.3 +553.7,438.5 +552,439.7 +557,439 +559.5,436 +557.7,432.2 +556.7,431.1 +555.9,429.8 +556.7,429.1 +559,426.1 +556.2,420.2 +559.7,422 +551.5,445 +540.6,443.7 +544.8,449.9 +543.6,449.5 +542.8,448.5 +540.7,456 +544.8,456.1 +546.3,458.5 +545.5,458.3 +549.4,455.5 +545.3,454.9 +545.4,453.7 +545.1,453.6 +545.9,450.3 +547.3,450.3 +547.8,451.4 +545.2,449.2 +547.1,447.5 +547.5,447.8 +545.3,445.6 +548,444.4 +545.8,440.7 +548.6,442.5 +553,441.3 +552.8,445.2 +552.7,448.4 +551.3,450.4 +552.1,450.6 +552.9,451.8 +552.1,452.4 +550.8,452.8 +551.5,452.4 +551.4,455.4 +551.8,456 +552.9,455.9 +555,458.5 +552,458.8 +551,458.8 +555.1,459.1 +559.7,457 +555.5,454 +559.7,450.3 +558.1,452.5 +558.1,454.6 +557.7,448.4 +558.1,445.2 +559,446.5 +557.4,447.4 +555.5,444.1 +555.2,442.9 +559.8,442 +557.8,442.3 +547.6,454.7 +544.3,460.9 +543.9,462.5 +544.3,464.1 +544.9,464.2 +544,477.4 +546.5,478.8 +545.4,477.5 +546.4,474.7 +546.5,473.8 +549.8,474.8 +545.5,460.1 +549.9,460.7 +548,463.9 +553.1,461.4 +554.6,460.6 +554.5,461.5 +554,461.9 +554.9,464.7 +550.8,464.5 +551.5,478.5 +557.5,475.4 +556.3,472 +555.9,469.4 +555.2,461.8 +556.3,463 +556.8,462.4 +555,460.2 +557.8,461.6 +558.3,460.6 +558.7,461.4 +542.2,485.2 +546.7,491.5 +548.3,490.9 +548,492.6 +550,493.8 +554.8,488.1 +550.1,492.1 +554,493.9 +551.8,494.4 +557.5,498.6 +555.1,495.9 +555,493.8 +559.1,490.8 +558.3,491.8 +559.2,494.9 +555.6,489.9 +555.3,487.6 +558.3,487.3 +558.2,487.7 +558.9,484.4 +561.3,9.5 +563.3,5.2 +564.2,18.4 +565.4,15.9 +567.5,17 +566.8,15.5 +566.1,15.1 +566,10.7 +571.8,10.7 +575.1,19.4 +575.2,14.3 +579,21.3 +560.5,35.4 +561,36.4 +565,31.8 +569.2,32 +573.5,21.5 +572.6,39.2 +577,35.8 +576,32.4 +577.3,31.5 +560.1,40.2 +563.1,45.2 +560.9,53.4 +567.5,58.6 +568.6,48.8 +571.1,40.6 +571.7,47.8 +573.2,45.6 +578.5,40.8 +562.8,72.9 +570.6,73.1 +572.5,72.6 +572.2,74.8 +572.7,76.9 +572.8,75.1 +578.3,75.2 +576.9,75.1 +577.2,77.9 +564.8,98.6 +566.5,96.5 +567.2,98 +566.8,91 +566.2,87.1 +570.1,96.1 +577.4,97.8 +578.5,84 +564.9,102.6 +563.3,102 +562.1,108 +565.5,117.1 +569.2,116.4 +569.7,116.2 +567.4,116.3 +568.8,112.4 +568.9,114.2 +568.8,114.5 +567.1,108.5 +568.2,106.7 +567.6,103.3 +568.4,103 +572.8,109.1 +570.6,113.3 +573.2,110.7 +570.6,118.3 +570.3,118.4 +572.4,117.3 +574.1,117.8 +577.7,118.6 +578,116.7 +579.1,115.1 +578.2,113.4 +579.1,106.4 +577.8,106.6 +577.6,106.2 +575,107.4 +575.7,104.6 +576,108.6 +568.1,122.1 +574.3,121.7 +579.3,128 +577.4,124.9 +563.1,141.9 +563.5,144.6 +567,144.6 +577.5,159.5 +576,156.9 +561.7,163.8 +561.6,165.2 +563,165.4 +562.4,169.3 +564.1,169.4 +562.5,168.8 +562.5,173.8 +560.2,174.2 +560.9,174.9 +561.5,176 +562.8,178.5 +563.7,179 +562.7,177.5 +569.5,177.3 +565.5,171.1 +565.9,174.6 +566.3,174.8 +565.2,167.1 +570.9,163.6 +573,164.2 +573.9,162.1 +573.5,178 +579.2,167.1 +561.9,181.4 +564.3,193.3 +560.2,197.8 +561.6,197.6 +566.5,197.6 +567.5,196.1 +568.9,197.2 +572.2,182.4 +574,186 +571.9,197.7 +572.2,198.5 +574.6,195.7 +578.3,197.1 +578.4,189.7 +579.3,187.7 +576.4,185.8 +579,183.1 +578,181.2 +575.5,184.1 +566.7,219.3 +572.2,217.4 +576.8,219.6 +562.6,215 +569.1,204.2 +578.7,208.4 +578.1,206.9 +578.7,203.8 +579.3,202.1 +567.6,227.9 +569.8,238.9 +578,236.2 +576.3,235.6 +568.4,229.4 +577.8,223.3 +561.5,223.1 +568.6,236.2 +569.2,237.5 +569.8,237.7 +569.7,253.7 +562.7,242.4 +562.1,243.6 +565.3,246.5 +566.9,242.6 +572,245.1 +570.2,248.4 +577.3,246.7 +572.4,267.5 +570.9,263.1 +562.1,263 +563.1,265.4 +564.3,272 +563,277.6 +569.3,276.8 +567.4,276.5 +569,268.8 +571.9,277.9 +577.5,277.4 +579.3,275.2 +578.5,274.8 +577.3,262 +564.5,280.8 +563.3,285.7 +565.3,294.5 +567.1,284.6 +567.2,280.4 +572.8,281 +571.5,297 +574.3,296.5 +579.8,285.3 +563.3,315.7 +578.4,319.9 +563.3,334.4 +566.9,333.3 +568.3,332.1 +565.3,331.8 +565.5,325.4 +576.9,329.9 +575,327.4 +576.5,325.8 +561.7,343.7 +562.3,345.9 +564.4,345.6 +569.2,356 +568.8,353.1 +565.8,345.5 +572.2,347.5 +571.9,350.2 +570.6,352 +572.5,353.4 +574.5,355.7 +579.1,355.3 +577.4,352.3 +576.7,352.4 +564.9,350.2 +576.4,349.2 +569.3,366.7 +568.4,364.1 +574.7,376.5 +577,383.3 +573.7,383.6 +563.6,387.3 +563.8,394 +568.1,397.6 +565.8,394.6 +568.4,389.4 +569.2,387.2 +573.1,392.6 +576.6,395.5 +575,390.7 +579.2,381.5 +561.6,389.6 +578.2,396.3 +567.3,417.2 +570.9,402.1 +565.9,408.1 +564.7,417.7 +565.8,408.7 +574.7,402.9 +571.3,407.4 +573.2,408.4 +574,410.1 +571.4,418.3 +571.7,417.4 +573.7,417.5 +577,412.6 +576.1,408.6 +578,403.7 +566.6,421.2 +575.6,423.8 +560.4,420.5 +564.9,422.4 +561.8,426.2 +564.8,431.5 +560.3,433.5 +561.8,437.1 +561.6,439 +563.8,439.4 +567.7,426.4 +572.2,423 +571.7,424.4 +572.4,421.8 +570.8,428.3 +577.8,426 +578.8,423 +576.2,423.6 +562.3,443.9 +563.9,443.9 +564.7,442 +563,442.2 +560.9,447.5 +560.6,449 +563.6,448.3 +560.2,450.4 +562.1,451.1 +562,453.4 +561.2,454.1 +562.9,453.2 +564.5,452.9 +563.3,451.4 +564.4,450.3 +561.2,457.2 +562.9,456.7 +563.8,457.9 +563.5,456.6 +565.2,457.8 +563.1,455.3 +568.9,452.1 +567,445.8 +567.2,444.6 +566.5,443 +575.6,456 +575.2,450.1 +560.6,462.1 +560.1,463.5 +560.6,464.6 +563.5,462.8 +564,461.4 +562.9,470.8 +562,472.5 +562.4,473.4 +565,473.6 +566.4,465.1 +571,463.6 +570.5,473.2 +573.7,473.9 +561.3,482.9 +560.2,486.1 +561.6,488.7 +560,493.7 +563.3,490.9 +569.3,493.8 +564.8,491.3 +570.6,484.2 +571.8,490.5 +571.5,495.6 +570.3,497.2 +570.5,499.6 +575.8,493.8 +579.1,490.7 +577.1,491.2 +578.6,489.6 +576.5,483.4 +583.2,16.2 +582,18 +582.5,18.5 +584.7,19.8 +588.3,13.8 +585.4,2.3 +594.6,7.2 +592.7,12.5 +583.8,21 +584.7,21.4 +585,22.8 +581.9,23.9 +581.2,24.4 +581.2,21.8 +584.1,30 +589.3,36.1 +588.4,33 +589.7,32.6 +585.3,30.4 +589.4,29.2 +587,23.5 +585.5,22.1 +590.7,32 +590.6,33.9 +588.4,50.2 +588.7,52.6 +590.7,57.4 +594.8,59 +596.3,51.9 +595.8,50.4 +597.5,45.9 +597,45.1 +587.9,40.2 +585.7,68.9 +593.2,62.3 +598.9,66.3 +587.4,85.3 +595.3,81.4 +594,88.9 +583.4,96.3 +584.3,99.4 +586.9,99.4 +592.8,91.9 +593,94.7 +599,86.6 +598.9,85.1 +586.5,82.9 +581.8,102 +581.5,103.2 +583.7,104.3 +583.7,101.8 +584.1,100.5 +583.7,111.3 +583.5,111.8 +580.6,115.6 +586.3,106.4 +589,105.7 +592.6,102.3 +590.9,111 +597.7,117.3 +599.7,115.7 +599.8,103.4 +580.9,121.5 +583.8,123.7 +581.6,129.1 +588.4,136.5 +587.7,120.9 +589.3,124.5 +591.4,120.4 +593.7,122.1 +592.8,133.2 +590.1,134.9 +593.6,136.1 +598.9,125 +581,120.4 +583.5,130.2 +592,139.5 +580.2,143.1 +584.9,143.7 +580.1,150.1 +580.2,151.6 +581.2,152.1 +584.5,158 +580.5,155.4 +585.1,156.5 +588.8,157 +589,145.2 +587.4,147.2 +588.8,142.6 +585.2,141.9 +586,140.4 +592.2,151.6 +591.2,157.7 +591.1,158.4 +595.9,155.8 +598.7,156.4 +597.1,153.8 +580.8,169.3 +585,171.2 +582.4,174.2 +585,179.6 +592,170.2 +599.3,177.6 +598.8,161.1 +589.3,184.2 +581.1,185.1 +580.7,186.8 +584,191.1 +583.6,193.9 +581.3,193.2 +581.1,198.4 +592.4,192.1 +596.4,191.4 +595.9,189.4 +597,188 +599.3,188.4 +581.9,219.3 +581,210 +585.1,208.3 +580.6,210.1 +582.1,211.2 +580.2,211.6 +590,219.7 +585.2,203.2 +593.9,207.9 +595.8,215.5 +595.1,216.9 +589.5,238.9 +584.6,223.4 +581.6,238 +585.5,238.1 +583.1,244.4 +583,246.7 +580.7,264.3 +585,278.6 +584.4,278.1 +584.5,276.2 +585.1,275.3 +581.3,275.4 +587.4,277.4 +588.7,276 +588.9,276.5 +589.6,272.5 +592.9,267.7 +590.9,270.4 +591.9,270.5 +590.4,277.4 +597.3,275.7 +598.7,272.3 +599.6,261.7 +583,262.9 +584.4,284.1 +584.1,282.9 +583.3,286.5 +584.8,286.3 +584.4,293.4 +589.3,298.1 +587.9,282.8 +586.4,283.9 +586.8,282.8 +590.8,280.8 +590.4,281.2 +590.6,292 +593,290.8 +595,293.7 +593,295.5 +593.5,295.9 +595.3,293.8 +580.1,315.5 +588.5,318 +588,317.8 +588.8,313.8 +589.2,301.2 +594,305.5 +594.3,304.9 +593,304 +594.5,301.5 +590.5,314.5 +594.2,315.5 +593.6,311.6 +593.9,311.6 +596.6,317 +599.1,316.9 +596.6,314.7 +595.5,301.5 +596.8,304.4 +595.6,305.2 +598.4,302.2 +596.1,320.3 +582.4,324.1 +583.9,323.1 +587.5,339.7 +594.9,321.6 +595,324.3 +597.4,323.1 +583.3,356.2 +584.1,357.9 +585.7,346.6 +590.4,344.6 +593.4,343 +590.62,349.66 +594,346.2 +594.4,353 +597.2,356 +596.5,353.7 +599.2,353.9 +595.1,343.7 +584.3,361.9 +586.6,379.6 +586.4,361.5 +593,375.1 +594.7,372.1 +593.7,370.5 +595.7,379.4 +595.5,365.1 +580.7,381.8 +580.3,391.5 +581.2,394.8 +583.3,394.6 +587.1,398.3 +585.2,391.5 +589.6,391.1 +587.7,387.6 +586.3,388 +587.7,389 +590.2,382.5 +590.3,387.5 +593.1,390.3 +592.6,396.1 +593.1,398.5 +596.7,397.9 +598.6,398.8 +598.9,398.5 +599,398.3 +599.7,399.3 +599.8,398 +597.7,390.7 +597.6,386.7 +599.2,389.3 +597.6,380.8 +582.2,405.4 +584.8,417.8 +588.5,417.5 +586.8,417 +587.1,416.6 +588.6,407.3 +588.8,402.5 +588.1,401.6 +586.5,401.8 +590.3,402 +591.1,402.5 +594.3,405.2 +593,405.8 +592.5,411.8 +591.3,419.5 +596.7,405.9 +596.8,407.4 +596.3,408.5 +597.8,405.7 +595.8,400.6 +595.1,405.2 +598.1,405.4 +598.7,401.8 +598.3,401.1 +587.2,408.4 +582.7,406 +589,433.4 +582.6,423.6 +590.5,426.1 +591.5,424.2 +598.7,428.1 +581.8,432.8 +595.1,434.6 +595.8,423.1 +586,453.1 +588.2,452.6 +587.7,452.6 +586,448.1 +592.1,458.5 +597.8,456.9 +596.8,459.8 +598.1,445.1 +583.7,463.1 +584.9,472.9 +582.6,478.1 +588.2,462.3 +590.8,464.8 +592,465.2 +590.1,473.2 +597.7,474 +599.8,474 +599.3,468 +598.8,466.4 +597.4,461.5 +597.5,464.2 +597.9,464.7 +590.1,470.3 +596.7,480.7 +583.9,482 +581.3,493 +585.9,498.8 +594.2,484.2 +591.4,491.9 +591.1,493.8 +591.6,497.9 +596,491.6 +598,486.2 +610.6,2.4 +618.8,5.6 +604.4,6 +603.2,10.4 +616.7,0.5 +603.6,32.2 +612.7,28.5 +610.7,22.7 +610.5,21.5 +612.9,40 +600.1,52.1 +603.2,56.7 +608.8,54.1 +611.3,51.9 +610.8,56.4 +614.2,57.3 +617.8,58.6 +617.2,59.9 +619.6,50.9 +616.4,48.3 +619.1,44.6 +602.9,61 +600.2,70.3 +606.7,74.8 +609.3,70.3 +609.7,72.3 +606.8,74.2 +609.8,68.4 +609.1,68.9 +607.5,62.6 +614.2,63.4 +610.5,66.2 +611.4,71.4 +613.9,72.1 +614.4,72.6 +614.1,73.6 +613.9,75.2 +611.5,74.9 +616.8,77.2 +618.2,77.7 +616.7,73.2 +617.7,73 +618.2,69.6 +617.9,61 +619.8,63.3 +617,88.2 +606.4,82.2 +601.9,87.7 +609.8,96.2 +606,85.4 +606.2,83.9 +614,96.1 +616.4,98.7 +603.5,106.9 +602.2,119.5 +615.1,101.2 +615.1,118.3 +612,117 +602.1,122.4 +603.4,125.9 +603.1,132.8 +601.2,134.4 +605.3,137.2 +618.4,137.4 +615.8,130.2 +608,147.2 +603.2,141.3 +602.5,148.4 +600.9,151.3 +601.3,153.1 +605.7,159.4 +604.7,148.5 +607.7,143.3 +611.4,150.3 +618.6,154.8 +619.5,144.5 +607.7,163.4 +600.3,160.5 +603.7,161.9 +600.1,176.2 +608.2,172.4 +609.8,173.3 +609.8,174.8 +608.4,173.7 +608.3,160.9 +609.1,162.8 +613.8,160.7 +614.4,163 +613.5,163.1 +600.3,197.3 +609.7,195.7 +610.6,199.2 +615.4,199.6 +616.2,198.2 +616.4,199.8 +617.8,188.3 +619.4,196.1 +619.3,208.1 +604.2,201.2 +602.4,206.9 +603.3,208.9 +601.8,209.6 +600.8,212.5 +608,212.4 +606.9,209.2 +606.4,205.1 +608.3,205 +609.4,203.7 +614.6,200.6 +612.6,215.4 +613.2,216.7 +616.8,209.4 +616.5,202 +617.8,200.5 +606.2,200.9 +604.7,224.7 +601.3,226.9 +603.2,227.2 +603.5,227.6 +605.8,239.3 +605.6,230.1 +607.3,228.6 +606.6,226.3 +615,221.4 +614,222.6 +610.9,224.8 +616.2,238.1 +619.7,235.7 +615.3,231.9 +605.5,233.8 +619.8,232.6 +606.2,258.5 +608.7,247.5 +611.5,251 +613,251.4 +613.9,250.5 +618.8,258.7 +615.4,254.4 +617.4,251.4 +615.3,249 +616.3,248.6 +617.4,245.5 +618.9,246.3 +602.8,262.9 +602,262.8 +603,266.4 +600.8,268.2 +601.3,270.5 +611.4,260.8 +613.9,262.8 +612.4,276.9 +615.9,266.7 +613.8,289.6 +603.6,282.3 +600.2,294.2 +603.4,299.5 +606.1,295 +609.3,299.5 +612.3,284.6 +616.8,283.8 +618.8,280.5 +610.7,319.3 +602,301.2 +603.3,302.9 +601.2,303.2 +600.1,309.9 +600.1,312.2 +602.7,318.1 +601.8,319.3 +608.8,317.2 +606.7,306.7 +605.5,305.2 +610.7,313.6 +611.3,311.1 +619.3,315.2 +619.4,310 +619.3,313.3 +618.8,313.7 +616.5,308.3 +607.4,339.5 +602.3,320.8 +602.6,320.4 +603.9,320.3 +602,322.5 +607.5,336.9 +609,337.4 +606.6,331.7 +608.8,333.5 +609.3,326.2 +614.7,324.6 +614.1,326 +614.7,329.8 +610.2,330.7 +612.9,330.2 +614.8,333.6 +617.8,337.4 +617.6,330.1 +617.7,332.5 +618,333 +617.8,333.8 +616.2,328.2 +616.9,337.9 +615.9,337.2 +602.8,354.3 +606.4,343.9 +611.8,345 +610.6,348 +617,351 +615.2,345.8 +618.6,344 +607.2,362.5 +613.8,360.6 +616.4,376.6 +619.3,372.4 +615.3,361.1 +618.6,360.7 +607.7,360.2 +604,389.3 +601.7,398.7 +600.8,399.5 +601.6,399.8 +607,393.6 +608.4,386.9 +608.8,388.7 +605.2,384.8 +608.5,383.1 +612.4,399.6 +610.3,398.5 +610.6,414.4 +616.8,410.6 +601.5,401.1 +600.6,404.7 +601.4,403.2 +600.3,405.8 +601.6,406.6 +601,407.5 +603.9,405.8 +604.5,406.5 +607.6,414.5 +611.2,401.2 +613.9,403.1 +616.5,404.3 +603.4,425.5 +601.2,425.5 +604.9,426.8 +605.6,424.3 +617.5,436.1 +616.8,434.7 +600.9,441.1 +600.6,446.8 +603.2,447.9 +602.8,448.6 +603.1,457.7 +605.3,454.2 +607.4,446.2 +605.5,441.7 +614.2,444.4 +613.5,446.4 +614.5,446.5 +614.3,449.1 +613,457.9 +615.6,459.7 +615.8,459.2 +615.2,456.9 +617.6,455.6 +616.5,453.1 +617.8,452.4 +618.8,453.7 +618.8,446.2 +619.4,441 +601,460.8 +601.2,462 +601.2,466.9 +600.8,478.3 +608.9,473.5 +607,463 +612.6,477.7 +614.9,472 +619.2,470.2 +619,465 +619.4,469.6 +618.9,462.4 +618.5,463.5 +618.3,463.2 +617.6,461.8 +602.5,481.1 +602.4,482.2 +601.5,482.5 +602.5,485.4 +602.2,486.4 +603,489.6 +605.1,487.2 +606.9,484 +618.3,496.6 +622.5,4.3 +623.2,13.9 +630.5,2.2 +630.8,4.3 +634.6,0.6 +636.1,10.1 +637.2,5 +639.6,1.2 +635.9,1.5 +637.4,25.7 +633.5,53.6 +621,42.6 +621.1,46 +623.6,47.7 +620.9,53.6 +623,54 +629.2,56.6 +628.5,55.9 +627.7,56.2 +626.3,55.7 +627.9,55.2 +634,54.8 +632.1,56.7 +633.1,59 +639.8,57.8 +639.3,59 +629.9,55.6 +627.8,45.3 +636.4,60.2 +621.7,61.6 +621.2,61.9 +622.1,63.1 +623.8,62.4 +624.4,60.8 +623.8,63.8 +623.1,64.7 +621.7,65.6 +621.3,66.8 +623.4,67.4 +624,70.1 +620.7,76.6 +626.5,69.9 +625.8,68.6 +628.8,68 +632.8,61.7 +630.2,61 +632.1,63.2 +633,63 +634.1,64.9 +635.9,65 +635.7,67 +635.6,67.8 +638.5,67 +635.6,64.7 +635.9,64 +637.2,62.1 +637.5,98.8 +623.8,95.5 +628.1,98.5 +628.5,94.3 +627.5,94.5 +627.1,93.8 +626,92.2 +632.1,89.8 +635,86.7 +631.2,92 +636.1,98.3 +639.5,88.8 +622.1,100.9 +623.5,102.8 +624.4,113.3 +628.8,113.8 +628.3,111.9 +627.7,111.3 +626,113.8 +626.9,108.9 +630,102.3 +630.8,111.9 +637.4,118.3 +623.8,122.8 +624.8,120.4 +620.8,124.8 +621.8,128.9 +624.9,133.2 +624.2,138.9 +623.7,136 +625.7,137.5 +625.4,139.7 +626.8,139.8 +628.7,139 +629,130.9 +628.7,130.1 +628.3,127 +628.3,123.9 +625.9,124.3 +625.5,122.3 +627.5,122.5 +633.3,124.8 +634.4,126.9 +633.7,131 +634.1,136.4 +635.8,135.4 +635.7,133.8 +622,142.3 +621.2,144.8 +621.9,144.9 +622.4,150.8 +621.2,151.1 +625.4,156.7 +625.1,150.2 +632.8,150.6 +633.1,159 +620,157.8 +621.7,163.2 +622.8,165.2 +622.2,176.4 +620.1,196.5 +625.2,197 +628.5,199.3 +627.3,192.1 +633.5,194.4 +630.2,199.3 +634.7,197.9 +634.7,195.2 +635.2,192.1 +636.3,186.8 +638.4,180.5 +622.6,214.9 +626.4,206.8 +629.5,208.7 +629.7,208.2 +628.4,207 +629.3,205.7 +629.1,205.3 +630.1,203.6 +630.5,204 +631,204.8 +633,203.8 +632.2,202.5 +631.6,202 +630.8,205.7 +631.5,205.6 +630.2,207.8 +630.5,208.9 +635.7,217.6 +638.3,219.3 +636.8,216.6 +638.9,209.5 +637,206.2 +630.4,218.9 +624.1,226.1 +624.4,235.6 +625.6,235.2 +629.2,236 +627.2,233.3 +626.7,228.2 +628.1,224.4 +628.4,224 +630.2,224 +632.1,224.6 +633.2,222.8 +634.5,221.4 +631,221.8 +634.6,234.4 +632.4,236.5 +637.1,226.7 +639.6,226.1 +635.4,221.2 +638.7,222.5 +624.5,243.4 +620.4,249.4 +620.3,252.7 +628.8,257.6 +626.3,245.9 +628.5,248.6 +629.9,240.6 +630.8,240.4 +631.9,243.9 +630.7,247.4 +632.5,249.9 +633.2,250.3 +633.8,248.1 +634.4,247 +637.5,253.1 +638.2,252.1 +635.1,247.7 +637.8,248.1 +636.8,243.9 +639,243.7 +639.1,241.1 +621.1,260.6 +620.7,270.9 +624.3,274.4 +625.8,278 +629,277.9 +625.4,272.6 +626.4,273.1 +627.6,274 +629.1,265.7 +630.5,261 +633.3,264.4 +634.6,260.4 +631.7,271.9 +632,273.7 +630.3,277.3 +633.2,275.1 +638.7,279.2 +639.2,278.2 +638.8,277.9 +637.3,277.2 +621.1,282.1 +624.7,283.2 +626.1,297.8 +624.2,281.6 +622.9,290.1 +624,285.1 +624,294.6 +624.4,292.3 +624.2,291.8 +628.8,299.4 +629.4,295.9 +626.6,296.5 +627.9,296 +626.7,295.2 +627.7,294.3 +629.4,292.8 +627.7,287.5 +625.5,287.1 +629.3,287.8 +629.9,286.5 +627.4,285.3 +625.4,281.9 +630.3,283.9 +629,281.1 +629.1,280.8 +627.9,280.1 +630.6,280.1 +633.5,284.1 +634.7,283.2 +633.3,280.7 +630.6,286.6 +631.5,286.4 +632.4,287.2 +631.2,288.6 +631,289.4 +633.7,289.3 +632.7,286 +634.2,286.6 +634.3,286.3 +634.5,287.6 +631,290.8 +631.1,291.2 +631.5,292 +630.8,292.3 +629.1,292.4 +634.7,294.4 +631.1,297.7 +634.1,297.7 +634.6,296.1 +633.4,295.2 +632.3,296.2 +635.7,296.3 +635.2,297.4 +639.3,295.2 +639.4,294.4 +639.5,291.4 +635.1,285.8 +634.9,287.3 +636.7,285.2 +636,283 +636,284.2 +637.8,281.9 +631.6,296.3 +628.5,289.6 +626,289 +625.9,318.8 +620.2,302.9 +623,300.4 +623.1,309.4 +627.1,302.9 +629.7,300.3 +630.8,303.2 +633.1,302.4 +633.7,302.9 +633.7,303.6 +634.1,302.7 +634.1,306.8 +634.4,305.6 +635,306.1 +636.1,301.9 +639.1,301.3 +621.3,322.9 +622.1,329.6 +623.9,326.1 +620.7,330.8 +621.1,331.2 +621.7,333.9 +623.3,333.5 +624.2,334 +624,329.3 +620.6,335.9 +623,338 +623.9,337.1 +621.5,335.7 +626.1,337.5 +626.1,330.3 +626.1,332.1 +626.8,332.6 +629.6,334.5 +629.6,331 +626.2,329 +639.6,320.2 +624,327 +622.8,334.8 +623.5,333.3 +623.2,354.4 +621.3,357.9 +625.9,358 +628.9,357 +625.2,350.8 +625.4,342 +629.4,343.6 +633.3,347.9 +630.3,358.9 +632.1,356.8 +637.5,356.8 +636.5,350.7 +637.6,351.9 +621.1,363.7 +623.3,379.5 +626.8,362.1 +631.5,375 +631.4,376.6 +635.5,375.5 +638.8,376 +637.2,372.4 +635.8,367.9 +636.3,389.7 +633.8,391 +623.3,384.1 +625.9,391.7 +626.1,386.5 +638.9,382.8 +638.1,398.5 +638,387.5 +620.6,414 +620.5,415.3 +628.2,409.1 +631,409 +633.2,418.6 +638.8,413.6 +624.5,423 +624.8,434.3 +626.4,437.2 +629.4,438.9 +629.2,436.7 +627.3,437 +626,430.9 +627.8,434.5 +631.8,426.9 +632.4,429.2 +631,430.4 +631.6,431.1 +631.2,437 +633.5,436.7 +633,435.4 +635.4,439.3 +636.4,435.5 +639.5,434.5 +635.1,426.7 +636,427.5 +637.2,427.6 +639.2,429.4 +637.9,425.6 +636.9,420.5 +631.6,436.5 +621.3,441.3 +623.8,440.8 +623,447.5 +621.9,447.4 +625.1,459.7 +627.1,455.5 +625.8,451.3 +629.5,450.7 +629.2,444.5 +631.9,442.4 +632.7,443 +632.7,452.3 +633,452.1 +633.3,451.6 +632.2,450.4 +635.6,451.2 +636.7,446.5 +637.4,447.3 +635.9,448 +637.8,443.3 +620.7,461 +620.7,461.3 +620.2,463.5 +624.2,461 +625.2,461.1 +626.2,464.9 +627.7,464.4 +633.6,467.1 +630.9,470.8 +633.8,473.4 +635.9,470.6 +639,471.2 +635.7,469.8 +638.8,466.5 +636.8,463.5 +637.7,464.8 +638.2,464.8 +638.8,461.2 +620.6,486.1 +623.9,491.2 +621.1,495.5 +621.6,497.8 +620.5,499.1 +629.1,494 +631.3,490.9 +631,492.6 +633.6,491 +632.5,496.6 +630.9,497.9 +633.9,499.2 +638.8,495.9 +639.2,489.3 +639.5,480.5 +640.5,3.7 +642.8,18.7 +648.9,16.2 +645.7,6.4 +646.1,1.1 +646.8,4.4 +648.1,4.3 +649.4,2.5 +650.2,2.3 +651.1,2.3 +651.2,4.3 +650.6,8.1 +651.7,5.9 +650.8,10.8 +651.9,12.7 +653.1,14.4 +653,12.9 +652.3,16.3 +656.8,10.8 +656.6,13.8 +655.4,9.1 +659.3,9.8 +659.6,9 +654.1,11.9 +651.8,25 +659.8,35.9 +640.8,25.1 +643.6,34.3 +644.4,34.6 +649,35.3 +652.1,37.2 +658.8,38.7 +657.5,36.2 +657.8,34.3 +658.4,24.9 +656.5,56 +642.5,49.8 +644.4,54 +644.4,51.7 +649.4,52.1 +648.6,51.9 +645.8,48.5 +645.6,49.4 +646.3,49.3 +653.6,49.5 +655.6,41.9 +640.5,61.8 +641.8,63.7 +644.9,60.1 +641.7,65.3 +648.5,62.8 +654.5,79.6 +651.1,86.8 +640.9,86.9 +642,92.9 +645.8,86.5 +649.3,84.8 +658.9,95.1 +659.8,94.7 +640.3,109.3 +641.99,115.13 +642.9,115.2 +658.9,118.3 +659.6,117 +642,126.7 +641.6,127.1 +640.8,127.1 +644.7,129.9 +644,126.4 +641.8,131.9 +644.7,134.7 +648.2,130.8 +647.5,125.5 +649.5,126.7 +645.1,124.4 +647.6,122 +650.5,131.1 +652.2,130.6 +652.3,132.1 +653.3,134.7 +654,132.4 +653.3,130.1 +655.2,138.5 +656.8,137 +657.9,137.8 +655.2,131.6 +658.5,134.3 +659.3,134.9 +658.6,132 +658,131 +657.9,130.2 +655.1,125.9 +657.8,128.6 +658.4,129.1 +657.1,125.9 +658.1,123.1 +658.1,131.3 +656.9,147.7 +646.5,152.8 +648.5,151.3 +649.5,150.5 +647.7,150.7 +645.9,145.3 +646.2,147.7 +649,149.6 +653,144.7 +654,140.4 +650.4,148.5 +653.6,149.4 +653.3,150.8 +658.1,155.1 +657.5,159.1 +656.5,153.1 +659.5,150.4 +655.8,147.9 +655.5,141.4 +658.6,144 +657.7,140.8 +640.8,175.6 +645.3,175.8 +650.7,165 +656.4,173 +659,173.7 +657.9,170 +644.9,180.6 +650.4,199.2 +654.6,197 +650.8,186 +642.8,186 +647.4,195.9 +656.7,199.4 +657.4,198.9 +640.3,215.9 +640.6,218.9 +641.5,218.7 +645.1,211.2 +650.2,202.5 +658.5,218.9 +659.2,215.6 +656.5,207.6 +656.9,208.9 +658.8,207.7 +658.1,201.9 +644.5,224.9 +640.9,229.7 +644,229.8 +644.9,229.7 +644.1,233.6 +646.5,238.4 +647.7,239.4 +645.3,222.4 +652.9,224.7 +653,223.5 +653.8,221.9 +651.3,226.1 +653.9,229.1 +653,230.4 +652.6,236.7 +653.1,239.2 +654.7,235.3 +657.6,237.3 +658.7,238.5 +659.8,237.8 +656.7,234.1 +657.4,233.7 +658,234.1 +658.7,234.8 +659.8,233.2 +655.2,226.3 +655.8,228.3 +657,228.4 +659.4,226 +658.8,225.7 +655.1,222.4 +657.3,222.9 +657.2,224.5 +657.1,224.7 +659.6,224.4 +658.1,220.2 +658.8,234 +655.3,239.6 +654.9,224.4 +640.6,240.9 +641.1,241.1 +643.2,244.4 +643.9,240.2 +644.7,249.2 +643.1,245.4 +641.4,254 +641.8,253.5 +641.3,251.7 +644.6,251.6 +643,251.8 +642.1,258 +644.4,258.5 +645.6,253.6 +647.6,251.6 +647.4,249.7 +646.6,243.5 +651.3,251.3 +657.2,257.3 +656.9,259.1 +659.5,255.9 +657.9,255.8 +655.7,252.5 +656.1,254.2 +656.5,253.4 +657.8,253.2 +659.2,251.3 +656.5,248.5 +656.2,241.2 +655.7,243.3 +640.6,259.3 +654.2,245.4 +641.3,263.2 +644.4,264.7 +642.2,265.5 +643.7,266 +641.9,271.4 +643.6,273.6 +643.6,274.4 +641.9,274.6 +642.5,274.7 +644.5,271.9 +642.6,276.2 +640.2,279.8 +647.6,279.3 +645.5,273 +646.6,273.4 +649.7,270.2 +647.4,269.4 +647.8,265.3 +647.1,261.3 +651.7,271 +650.5,279.1 +655.5,266.2 +643.4,294.5 +644,293.5 +644.1,293.3 +642.3,291 +640.9,295.1 +642.9,295.9 +647,296 +649.3,292.8 +649.3,291.2 +645.9,285.4 +650,285.9 +645.6,280.2 +646.4,282.7 +649.2,284.5 +647.9,281 +648.1,282 +651.5,284.6 +652.3,288.6 +650.5,288.6 +653,286.7 +650.9,291.4 +652.5,292.6 +651.9,294.5 +652.8,295.2 +654.9,295 +659.4,296 +659.7,282 +657.9,281.3 +651.2,303.8 +640.5,307.8 +652.4,306.7 +654.1,307.2 +653.7,315.8 +653.1,318.1 +650.4,319.6 +655.6,315.1 +640.5,311.6 +644,315.2 +647,319.3 +649.3,314.4 +647.2,310.8 +646.7,305.7 +655.7,318.6 +658.6,316.6 +655,309.6 +657.1,306.9 +659.3,305.4 +656,300.4 +643.1,322.9 +645.1,322.9 +641,327.4 +641.4,327.7 +643.4,330.9 +649.8,333.9 +647.4,329 +647.9,324.2 +648.9,321.9 +652.7,320.4 +654.2,334.7 +651.5,339 +655.2,339.7 +652.9,336.3 +657.4,335.3 +644.3,344.2 +647.6,352.4 +648.3,354.8 +646,347.5 +649,346.4 +645.8,342.1 +650.9,340.2 +652,340.4 +650.6,343.1 +652.9,347.9 +651,352.8 +657.5,358.1 +655.8,358.8 +656.6,358.9 +659.3,350.2 +656.1,346 +658.1,340.3 +655.5,343.7 +640.4,354.5 +649.2,376.8 +646.8,379.4 +653.6,376.5 +640.5,361 +642.4,362.5 +640.3,368.8 +644.3,365.5 +644.1,371.5 +644.7,377.4 +645.4,376 +645.9,375.2 +647.1,372 +649.6,372.9 +649.1,370.8 +646.1,367.9 +649,366.9 +650.3,370.4 +653.7,370.2 +652.7,376.2 +653.7,378.4 +658.7,371.9 +655.3,366 +659.3,364.6 +640.6,387.5 +641.1,387.4 +640.4,396 +643,399.6 +644.4,399.6 +645.8,385.3 +645.5,389 +648.8,386.3 +645.4,382.4 +654,382.5 +653.9,385.7 +651.5,399.5 +658.7,399.3 +658.8,396.6 +658.8,395.7 +652.3,393.6 +655.1,385.4 +659.6,384.4 +642.1,404.1 +644.2,404.1 +642.5,400.4 +641.7,406.4 +644,413.4 +642.2,417.6 +644.4,416.9 +644.8,418.9 +645.6,406.4 +648.7,406.2 +652.5,405.3 +658.7,411.2 +659.3,410 +658.1,410.4 +655.9,400.2 +643,400.2 +642.7,409.1 +646.6,415.2 +640.9,420.3 +644.1,422.5 +640.5,438.8 +646.2,437.8 +649.8,433.5 +647.1,420.3 +645.3,421.6 +645.2,422.9 +645.8,424.3 +648.3,423 +658.6,431.8 +659.3,424 +658.7,423.4 +658,421.1 +644.8,424.7 +645.7,430.5 +654.1,424.1 +654.5,421.2 +640.2,440.7 +642,442.2 +644.3,441.3 +642.4,446.9 +644.3,448 +641.7,445.2 +642.3,452 +644.7,455.3 +645.5,459.5 +649,453.5 +649.2,444.1 +654.8,446 +650.8,450.7 +651.3,454.4 +654.8,454.6 +654.9,450.2 +653.2,450.3 +652.5,458.4 +656.3,452.3 +658.5,451.4 +657.8,451.2 +658,450.1 +659.4,446.2 +659,445.5 +656.4,441 +641.8,441.4 +640.7,462.5 +644.5,460.4 +642.9,460.1 +640.7,468.4 +640.4,468 +640.5,468.7 +643.2,469.7 +640.3,470.7 +640.5,471.1 +641.9,471.8 +644.6,470.7 +641.2,475.3 +640.8,478.4 +644.5,478.6 +643.1,477.4 +646.4,476.4 +648.1,479.8 +646.1,466.2 +653.5,460.5 +650.3,465.3 +652.4,467.2 +653.4,472.1 +656.4,475.8 +656.2,479.1 +658.6,477.4 +657.7,475.4 +659.9,474.2 +657.6,467.3 +659.7,460.7 +642.5,482.9 +643.9,480.9 +642.4,489.2 +643.4,486.1 +640.2,491.7 +642.1,495.8 +645.5,490.8 +645.8,485.9 +649.3,486 +654.3,481.3 +652,486.6 +659.4,499.2 +658.4,493.2 +656.1,483.8 +656.3,484.2 +656.5,482.9 +658.6,483.4 +659.6,481.1 +661.7,8.2 +661.9,10.2 +662.5,11.1 +662.2,14.3 +660.3,14.6 +661.4,14.9 +662,16.9 +671.9,19.3 +674.7,19.4 +676.9,18.3 +677.8,19 +675.2,18 +661.8,19.6 +667.6,11.5 +664.4,23.5 +663.5,24.4 +662.5,24.6 +662.3,33.2 +661.5,35.1 +662.5,35.4 +663,39.1 +661.2,38.6 +662.3,37.8 +669.1,33.3 +670.5,26.8 +668.5,25.7 +670.8,22.3 +672.8,20.3 +671.7,27.6 +679.2,37.3 +679.4,29.8 +676.5,25.8 +679.8,25.6 +671.2,21.5 +669.3,53.8 +677,55.5 +677.2,56.2 +678.1,57 +661.1,61.2 +664,62.2 +664.1,63.9 +663.6,65.6 +663.3,70.6 +663.5,72.9 +663.3,74.6 +663.9,74.9 +661,76.5 +660.5,76 +664.3,79 +667.3,76.1 +666.7,75.1 +669.8,76.7 +667.5,74.6 +665.3,73.4 +665.9,71.8 +666.8,72.1 +666,69.7 +666.9,69.5 +666.3,67.6 +665.6,66 +665.4,60.8 +672.3,61.9 +674.8,63.1 +672.2,63.6 +671.7,64.1 +670.2,65.2 +670.1,66.9 +671.4,68.1 +673.9,72.1 +671.1,72.3 +673,74.9 +670.5,78.3 +678.5,77.7 +678.7,73.5 +679.1,72.6 +679.4,72 +679.3,70.8 +676.7,72 +677.6,75.1 +678.8,73.7 +675.6,68 +675.7,83.4 +664.4,81 +661.7,99.6 +668.5,97.8 +667,94.4 +666.5,83.6 +661.4,81 +665.2,80.7 +672.3,82.2 +675,89 +674.4,89.5 +678.1,91.2 +678,88.7 +675.5,85.9 +675.6,83.2 +676,82.8 +661.1,107.4 +664.3,112.7 +660.8,116.7 +660.4,118.4 +663.5,119.6 +666.4,113.7 +674.9,105.6 +671,111.8 +677.9,122.4 +671.6,138.2 +660.9,121.5 +662.4,121.4 +662.3,125.3 +660.5,124.7 +664.8,125.8 +663.7,127.7 +660.6,127.8 +662.2,130.7 +662.4,133.9 +660.4,136.7 +664.7,136.6 +665.3,138.8 +669,130.2 +666.7,122.5 +669.8,120.5 +670.2,124.7 +676.2,135.2 +672.9,124.6 +668.4,144.3 +676,140.9 +661.5,144.7 +660.7,147.8 +661.5,149 +661.9,149 +660.3,152.3 +661.7,150.9 +664.5,153.1 +663.8,152.9 +662.2,154.3 +661.7,156.8 +660.3,157.9 +660.5,159.6 +662,159.2 +667.6,156.5 +665.3,153.9 +666.2,154.1 +665.8,153 +665.2,150.6 +668.4,149.8 +665.5,147.1 +671.5,156.1 +679.2,156.8 +679.6,153.7 +675.2,151.3 +676.1,148.7 +676.3,147.5 +678,160.5 +664.8,163.4 +661.9,164.3 +662,166.8 +675,166.2 +675,167.6 +672.6,166.8 +679.1,170.1 +677.2,170.8 +676.5,166.4 +678.3,168.5 +676.4,168.9 +665,195.1 +669.9,199.9 +674.3,184.5 +673.9,200.9 +678,207 +665,202.4 +660.2,208 +660.3,210.9 +662.9,216 +661.6,215.7 +661.5,219.1 +670.4,206.8 +679.5,219 +661.4,223.9 +663,224.6 +662.7,222.7 +664.9,222.3 +664,226.4 +660.1,227.6 +663.6,227.5 +661.2,226.6 +663.8,232.9 +661.6,232.6 +661,232.9 +661.5,235.9 +660.4,239.7 +669.8,234 +668.4,233.6 +666.8,233.5 +666.4,232.7 +672.4,226.9 +672.5,232.8 +670.6,237.5 +671.5,237.4 +677.2,224.1 +663.7,252.6 +676.5,242.2 +678.3,251.4 +665.8,257.2 +667.9,256.7 +669.3,256.5 +665.2,253.3 +668.5,253.4 +669.4,254.5 +671,253.3 +673.5,251.3 +666.8,248 +666.3,249.6 +669.7,242.1 +666.7,243.8 +672.9,242.1 +672.1,243.7 +673,244.7 +670.4,249.8 +675.5,257 +676,251.9 +675.9,249.9 +675.9,247.2 +676.7,247.2 +679.3,245.1 +668.1,266.8 +676,285.5 +679.4,287.5 +664.4,282.8 +661,286.5 +667.3,295.8 +668.2,296.2 +665.2,297.9 +673.6,295.3 +674.3,296.9 +678.7,295.3 +671.7,297.9 +668.7,319.7 +669.2,314.8 +670.8,307.6 +661.7,302.7 +661.3,306.8 +661.8,309.8 +664.1,319.5 +661.4,317 +668.3,304.2 +668.7,302.5 +673.5,303.4 +673.2,309.9 +673,309.9 +675.2,319.3 +675.7,311.2 +675.5,307.2 +670.6,333.3 +666.9,337.9 +669.3,336.5 +670.4,332.8 +672.6,324.2 +673.2,336 +674.2,336.4 +674.8,339.2 +676.1,339.2 +673.5,336.8 +677.7,321.8 +673.5,320.7 +663.4,344.2 +661.9,342.2 +664.5,340.8 +662.6,350.7 +666.6,356.8 +668.7,359.5 +669.9,352.8 +668.4,347 +669.8,349.8 +669.8,349.4 +666.3,341.6 +667.2,341.5 +668.4,342.4 +668.2,341.1 +667.7,344 +674.4,340.4 +673.8,344.3 +673.9,346.5 +674.1,347.8 +671.3,348.3 +671.6,351.3 +670.8,353.4 +670.4,354.7 +672.6,356.9 +670.8,358.8 +670.1,359.5 +674,357.7 +677.8,356.6 +679.3,356.1 +675.6,350.8 +675.8,347.8 +678.2,346.6 +675.6,340.6 +665.7,374.6 +668.4,367.1 +664.1,361.2 +662.5,363.4 +660.4,368.5 +669.5,360.8 +671.7,360.6 +674.7,361.1 +660.2,382.9 +661.2,388.3 +662.4,398.8 +660.6,397.2 +669.7,397.6 +671.9,398.7 +673.8,398.6 +674.8,397.8 +677.3,397.5 +677.8,397 +678.5,394.2 +677.3,394.3 +679.8,391.2 +663.9,391.9 +666.5,408.1 +663.1,404.9 +661,406.8 +664.6,407 +665.7,406.1 +666.8,404.3 +665.8,401.2 +671.7,405.1 +673.9,406.1 +671.5,416.1 +673.8,419.7 +677.3,418.3 +676.2,417 +679.6,410.8 +677.8,407.6 +677.2,401 +670.4,405.5 +665.1,434.6 +661.6,428.8 +662,428.8 +660.5,426.8 +661.8,426.6 +662.6,431.9 +663.8,436 +669.6,436.6 +666.8,422 +665.2,420.1 +671.5,425.1 +671.4,425.9 +676.5,433.6 +660.1,441.9 +660.1,443.4 +662.5,444.7 +661.2,448.4 +661.8,448.2 +664,452.3 +661.5,455.9 +663.3,456.1 +661.2,459 +660.5,457.9 +666.1,457.2 +665.2,452.4 +668.6,450.9 +665.9,451.5 +667.6,450 +666.8,443.2 +672.3,441 +670.5,451.6 +671.4,454.1 +672.3,454.2 +671.4,456.5 +668.8,457.8 +662.5,461.6 +661.3,474.6 +664.6,477.1 +661.2,478.9 +668.6,479.3 +666.5,472.8 +669.3,471.9 +665.3,472.5 +665.4,470 +666.8,461.1 +665.2,462.6 +666.9,460.3 +673.7,471.2 +675,473.3 +672.6,476.3 +671.8,477 +679.5,479 +679.5,466.9 +671.5,473.8 +678.7,478.1 +663.2,490.2 +663.2,496.1 +666.5,494.9 +668.4,491.5 +669.4,488.4 +666.6,488.1 +667.4,487 +666.7,481.5 +673.3,487.1 +673.2,497.5 +676.5,483.2 +675.9,482.2 +663,484.8 +664,485.9 +664.2,486.3 +663.5,487.5 +685.6,12 +686,11.2 +682.6,23.4 +681.5,24.2 +681.5,25.5 +683.3,25.1 +684.1,25.7 +684.7,25.3 +684.7,28.9 +681.6,27.3 +685.6,32.7 +687.3,31 +685.5,25.8 +687,25.2 +686.4,23.4 +691.2,28.7 +693.2,27.7 +691.3,24.6 +694.8,27.4 +686.5,56 +693.2,48.3 +697.1,58.6 +695.6,42.1 +689.8,62.3 +698.2,61.1 +699.2,69.3 +683.6,74.5 +681.4,75.2 +682.7,79.7 +698.3,72.5 +681.3,84.8 +690.7,89.5 +684.8,82.7 +683.4,82.6 +681.9,86.5 +683.7,85.9 +682,85.3 +683.8,85.3 +684.3,86.2 +681.8,89.2 +680.3,90.2 +680.2,99.7 +685.5,85.4 +687.6,85.7 +699.4,96.3 +695.8,103 +694.8,109.9 +698.2,115.4 +695.5,115.2 +698.7,117.7 +698,111.4 +699.1,114.6 +689.4,134.2 +699.9,138.4 +687.4,138 +689.3,137.9 +693.1,127.2 +692.3,132.4 +694.9,132.1 +692.9,138.3 +694.3,138 +697,139.5 +696.7,130.6 +699.1,128.7 +699.2,123.1 +694,159.1 +681.5,154.8 +681.4,154.4 +683.5,155.3 +686.7,141.6 +694.6,144.8 +694,141 +692.3,153.9 +698.4,149.8 +699.2,143.6 +699.2,142.3 +680.5,150.5 +687,166.1 +689.8,160.2 +682.2,160.2 +684.6,177.6 +686.6,173.3 +687.3,173.1 +694.7,167.5 +690.4,174.6 +693.5,172.7 +691.5,170.8 +690.4,176.9 +693.5,178.7 +697.2,169.6 +695.5,180.3 +681.8,189.1 +686.1,197 +688.2,197.6 +685.8,184.1 +688.2,182.4 +687.2,181.7 +693.5,180.2 +693.4,180.5 +693.1,190.4 +694.2,195.9 +693.6,195.8 +696.5,198.5 +696.3,190 +681.9,202.9 +683.5,203.6 +681.2,218 +685.1,210.5 +686.5,208.7 +687.9,209.1 +689.8,208.6 +688.9,205.6 +685.1,202.4 +687.1,203 +692.4,201.3 +690.4,203.6 +691.4,203.6 +692.2,205.7 +691.9,205.2 +691.3,213.9 +694.3,215.2 +691.3,218.8 +692.1,219 +692.9,219.3 +694.4,218.9 +693.6,218.3 +696.1,218.2 +696,218.5 +695.5,218.8 +698.4,217.2 +698.6,216 +697.5,218 +696.5,210.5 +695.8,206.1 +695.5,207.4 +696.1,202.4 +698.8,203.8 +698.4,201.6 +681.2,222.5 +682.1,222.3 +686.1,228.1 +685.9,229 +686.7,220.9 +693.9,223.7 +694,222.1 +692.2,221.3 +691.8,228.7 +693.1,228.6 +693.4,226.4 +692.5,225.9 +692.1,230.4 +690.6,239.1 +695.3,239 +698.9,236.3 +698.4,229.6 +695.6,226.3 +695.7,229 +696,220.6 +698.7,222.6 +697,223.1 +696.7,223.9 +698.2,220.4 +682.9,254 +688.9,256.9 +687.6,259.5 +684,253.1 +689.2,252 +686.4,243.8 +685.8,244.6 +689.6,241.5 +691,242.1 +694.2,240.3 +695.2,245.9 +699.3,247.4 +699.5,247.1 +698.9,245.6 +697.8,246.6 +685.8,267.3 +687.7,279.4 +699.7,278.5 +694.5,265.9 +689.8,274.5 +687.6,260.4 +691,261 +691.4,262.6 +691,276.4 +692.5,276.6 +698.9,269.8 +689.9,281.8 +686.2,297.8 +680.5,300.6 +680.8,303.2 +683.5,302.9 +686.1,310.4 +688,314.1 +688.3,301.3 +693.3,314.5 +695.2,309.7 +696.2,301.7 +694.5,306.2 +680.4,325.3 +684.1,329.8 +686.3,333.2 +688.5,330.5 +685.4,326.9 +686,323.6 +692.1,339.2 +699.8,337.9 +697.9,334.4 +696.8,330.1 +684.4,348.4 +683.8,347.6 +684.6,350.2 +686.3,353.2 +688.3,350.4 +688.8,349.7 +690.2,344.6 +691.9,344.4 +691.5,341.1 +698.8,345.8 +696,342.1 +695.5,343.8 +688.5,346.4 +687.8,340.4 +684.4,375.8 +693.2,360.6 +694.9,378.5 +694.7,377 +682.7,385.4 +682.1,390.9 +682.4,391.2 +684.7,394.4 +685.5,394.1 +684,390.5 +681.5,394.5 +680.3,394.6 +681.3,394.8 +682,396.9 +682.6,397 +685.9,392.2 +688.9,393.2 +688.6,385.3 +694.1,393.2 +693.1,392.8 +694,396.5 +698.1,393.8 +682.1,409.9 +684.8,405.8 +681.6,405.6 +689.8,417.9 +687.5,407.4 +691,419.5 +693.6,417.4 +692.5,415.8 +697.8,419.2 +697.7,407.9 +695.6,406.1 +684.8,422 +684.8,433.8 +681.9,431.5 +689.2,435.2 +687.5,424.1 +690.8,421.8 +694.6,421.4 +692.8,420.3 +690.8,426.8 +692.6,429.1 +690.5,433.3 +694.4,438.1 +692.7,435.3 +695.9,438.5 +698.3,436 +695.5,434.9 +697.4,434.5 +699.3,434.7 +698.6,432.5 +699.1,427.1 +689.6,436.4 +684.2,444.8 +683.9,440.5 +684.4,448.2 +680.5,454.5 +687.7,451.5 +694.4,449.9 +693.6,452.2 +694.2,458.1 +686.1,447.9 +681.4,461.7 +689.6,478.5 +687.5,460.6 +694.2,474.6 +695.5,472.1 +697.2,471.2 +696.8,463.5 +684.7,488 +681.5,494.8 +682.4,490.1 +690.3,489.1 +691.2,493.6 +697.8,497.6 +699.8,496.7 +695.7,492.9 +697.2,493.2 +719.8,12.6 +705.7,12.9 +710.6,3.1 +715.9,6.6 +711.8,3.6 +713.2,13.1 +713.5,18.5 +714.2,26.3 +716.5,21 +701,43.8 +713.3,59.7 +705.2,79.7 +704.7,77.5 +714.3,79 +702.8,65.7 +705.1,72.8 +702.4,72.6 +706,70.2 +709.6,67.2 +710.3,69.8 +713.4,66.4 +718.3,72.3 +718.5,70.2 +719,61.9 +705.1,62.2 +719.6,83 +711.9,92.5 +719.5,92.9 +716.8,88.5 +712.9,118.6 +705.4,118.7 +712.3,106.5 +701.2,116 +709.2,124.9 +702.5,124.5 +703.3,123.9 +703,131.1 +707.8,134.2 +707.7,132.7 +706.3,130.9 +708.2,123.1 +711.8,126 +710.9,130.6 +712.5,130 +711.5,139.4 +702.5,142.4 +719.2,142.3 +700.9,151.1 +709.1,157.2 +709.5,155.2 +707.8,149.4 +710.1,140.3 +710.5,140.7 +710.8,142.1 +713.3,141.6 +714.4,153.9 +714.3,156.5 +704.8,164.7 +702.3,160.4 +700.8,169.4 +704.4,169.9 +700.4,171.8 +704.6,173 +702.3,174.6 +705.7,176.7 +705.9,173.8 +708.9,174.7 +707.1,167.9 +707.3,169.6 +710.5,160.2 +712.8,166.2 +710.8,179.6 +719,160.8 +705.7,179.6 +718.7,161.2 +703.9,181.5 +702.3,188.8 +702.3,194.7 +718.8,197.8 +701.4,204.8 +703.4,203.9 +700.2,218.8 +701.1,218.9 +700.4,215.4 +705.2,211.9 +713.2,207.3 +719.3,213.8 +704.4,215.3 +713.7,237.3 +702,223.9 +703,224.4 +704.2,223.5 +703.7,222.5 +702.2,221 +702.6,225.3 +702.2,234.1 +704.5,239 +701.6,237.6 +709.1,239.1 +708.6,236.9 +708.1,235.5 +705.3,233.3 +708.5,234.7 +709.8,235 +709.8,231.6 +706.5,223.8 +708.4,223.8 +709.1,224.4 +712.5,233.7 +711.6,231.8 +710.2,236.4 +711.1,237.9 +713.5,239 +712.9,237 +715.3,235.5 +715.7,239.2 +717.7,235.7 +715.4,226.6 +715.3,224.1 +700.4,235.9 +709.8,227.8 +717.5,250.5 +701.6,245.8 +700.3,248.2 +703.6,249.2 +704.2,246.7 +704.6,245.8 +702.2,256.3 +705.9,253.9 +707,251.7 +709.3,250.4 +707.3,245.3 +705.4,243.6 +706.2,243.4 +707.8,244.9 +708.3,243.9 +709.3,242.3 +708.3,240.9 +707.1,240.5 +711.4,243.1 +712.8,243.9 +713.8,240.4 +713.1,245.1 +710.4,257.6 +718.2,256.6 +716.9,255 +716.7,248.3 +719.2,248.6 +718.3,245.9 +719,241.9 +718.8,240.3 +718,241.2 +708.5,297.4 +709,290.4 +711.1,288.7 +714.3,288.8 +710.7,294 +714.9,290.9 +714,295.9 +716,295.5 +717.9,292.3 +719.2,289.3 +719,286.5 +718.2,316 +712.1,318.3 +700.2,300.1 +700.1,302.4 +704.7,309.1 +700.3,312.2 +703.4,310.1 +704.6,317.5 +709.5,316.5 +705.4,312.6 +705.8,309.4 +711.1,311.6 +713.7,311 +713.4,316.4 +715.2,318.1 +716,316.7 +717.4,318.2 +715.5,312 +717.9,314.5 +712.3,333 +703.1,322.4 +703.1,334.8 +700.9,335.7 +705.6,339.3 +706.8,336.9 +708.9,334.8 +707.5,332.1 +708.3,325.7 +705.5,322.2 +705.9,323.9 +708.1,323.3 +709.9,322.1 +706.7,324.2 +710.4,321.6 +713,323 +710.2,325.2 +714.5,325.1 +717.5,335.9 +715.7,333.7 +718.1,330.1 +716.9,330.4 +716.1,329.4 +718.2,327.3 +719.6,325 +718.3,324.6 +719,324.1 +710.9,327 +700.8,356.1 +714.4,340.5 +704.5,343.8 +707.5,345.1 +709.7,344 +708.2,340.6 +707.7,341.4 +713.7,347.4 +713.6,346.8 +714.8,350.8 +715.3,358.4 +716.3,354.7 +719,353.1 +717.5,348.6 +715.6,344.4 +708.1,379.6 +705.9,366.5 +710.3,361.9 +714.9,360.2 +711.3,366.8 +713.6,369 +714.7,368.7 +712.1,366.3 +717.9,375 +719.7,371.3 +715.2,365.1 +717.4,366.2 +716.1,364.4 +718.4,360.7 +700.1,384.9 +701.6,394.7 +704.9,393.9 +704.6,393.4 +704.2,396.3 +708,385.6 +706.5,385.7 +709.9,383.5 +711.2,384 +709.9,385.7 +710.5,389.7 +715.2,388.2 +716.3,380.3 +711,386.2 +704.8,407 +700.8,418.8 +702.3,418.6 +705.5,417.4 +706.3,415.7 +705.2,407 +707,405.3 +706.1,402.2 +713.6,412.2 +714,416.5 +714.1,415.9 +713.5,415.5 +719.3,417 +718,416.2 +718.2,406.5 +719.5,405.2 +715.3,414 +705,424.1 +700.2,431.4 +700.6,436.2 +701.4,436.9 +702,438.2 +704.4,435.1 +705.8,436.3 +706,437.4 +706.7,436.1 +707.6,434.4 +709.1,429.2 +709.7,428.5 +713.5,420.8 +713.5,439.6 +717.2,432 +717.2,430 +719.1,426.5 +718.5,425.8 +718.2,423.7 +719.1,420.8 +702.2,445 +702.9,444.1 +703.3,442.5 +703,441.7 +702.9,440.8 +702.1,441 +700.7,443 +702,446.6 +703.1,451.1 +706.9,452.7 +706.5,445.7 +706.4,446.7 +706.3,443 +707.4,440.1 +712.2,441.9 +713.1,442.8 +711.8,444.8 +712.3,444.2 +713.7,442.8 +714.8,454.6 +713.3,459.4 +717.3,456.6 +717.6,454.7 +719.7,453.4 +719,450.2 +716.8,450.2 +715.8,447.4 +717.3,449.5 +719.3,447.6 +717.7,450.6 +702.4,461.6 +704.1,462.2 +704.5,470 +700.4,475.2 +704.7,477.9 +705.4,462 +711.1,464.5 +710.6,467.7 +714.5,467.9 +712.5,473.9 +713.1,470.3 +719.6,479.5 +719.7,476 +716.9,475.6 +716.4,465.1 +718.7,463 +715.7,463.1 +712.7,462.3 +714.4,476 +702.5,499 +703.6,499.7 +706.7,497 +706.1,487.6 +708.9,485.9 +709.5,483.6 +710.9,483.1 +711.8,495.6 +718.2,499.5 +717.9,485.3 +716.6,485.3 +724.6,7.6 +729.2,7.8 +729.6,5.8 +727.1,13.5 +729.1,31.2 +724,21.1 +730.1,36.3 +736,26.6 +734.9,57.8 +728.2,52.2 +720.5,69.1 +723.2,72 +720.3,76.3 +728.4,79.8 +727.7,75.5 +734.8,79.4 +739.8,79.5 +720.6,82.3 +722.3,83 +721.9,87.4 +720.8,94.4 +721.5,96.6 +725.4,95.8 +727.7,97.3 +729,93.2 +725.4,90.8 +733.9,82.2 +732.5,87.1 +731.1,90.8 +732.3,92.7 +735.2,95.6 +736.8,95.7 +739.5,99.5 +739,92.2 +738.5,89.4 +739.8,86.4 +739.6,85.2 +735.1,85.8 +738.1,83.2 +738.9,81.1 +735.5,80.3 +722.9,86.2 +736.1,93.7 +733.8,104.5 +734.1,110.4 +736.7,113.1 +738.8,105.1 +737.8,103.7 +735.4,103.2 +735.7,100.2 +738.9,103.2 +727.3,123.3 +731.3,127.2 +722.9,120.4 +724,121.4 +727.8,125.4 +729.5,121.8 +729.8,123.7 +733.3,122.2 +732.8,124.8 +738,136.5 +739.3,135 +736.4,125.1 +738.6,124.6 +720.5,140.3 +723.4,142.5 +729.1,141.5 +739.7,154.8 +737.3,140.3 +734.3,167.7 +734.4,169.3 +734.1,179 +737.6,177.1 +737.7,175.8 +739.3,163.5 +736.7,162.2 +725.4,160.7 +729.8,165.1 +732.4,161.6 +734.1,166.4 +739.4,172.6 +722.4,183 +721.1,190 +720.1,194.5 +720.9,198.8 +728.8,195.9 +725.5,194.5 +725.7,198.7 +729.8,190.3 +727.8,192 +726.6,183.8 +732.7,199.5 +739.3,180.4 +721.1,192.4 +725.3,193.9 +724.1,219 +727.3,212.9 +728.2,210.4 +730.7,209.9 +734.9,205.7 +731.3,213.3 +734.6,213.7 +736.9,219 +739,219.2 +736.9,210.4 +737.1,202.5 +737.1,201 +735.7,203.4 +732.1,233.8 +724.8,229.9 +724.7,235.4 +721.2,236.3 +731.1,220.6 +734,238.2 +734.8,238.7 +735.8,238.8 +738.8,233 +738.5,231.6 +735.7,228.6 +731.7,223.5 +728.7,255.9 +731.3,259.1 +735.8,257.5 +721.2,241.2 +723.6,241.4 +723.8,243.7 +721.3,244.6 +721.8,249.7 +722.2,252.8 +724.9,251.3 +721.2,256.9 +722.6,258.3 +725.3,257 +729.5,257.2 +727.4,259.1 +725.5,249.7 +726.2,242.4 +730.7,241.6 +732,240.4 +732.2,244.5 +733.6,241.6 +731.1,244.4 +730.9,247.9 +730.7,253.5 +735.4,259.8 +739,259.6 +737.3,248.8 +736.2,245.6 +735.5,246.6 +733.4,245.5 +738.2,268.3 +733.9,261.8 +727.8,276.9 +733.1,272.6 +732.5,271.1 +733.2,274.7 +734,274.7 +732,274.5 +733.1,279.4 +734.3,279.9 +734.8,277.3 +739,275.7 +738.2,278.8 +736,274.5 +724.9,284.7 +724,286.3 +723,288.1 +723.8,290 +721.2,299.8 +725.8,299.8 +728.3,297.8 +726.3,294.3 +729.3,289.8 +726.5,287.3 +727.2,285.1 +730.1,280.9 +734.5,283.6 +731.5,283.5 +730.3,285.2 +730.8,286.7 +735.1,286.2 +734.9,289.1 +732.7,287 +738.6,298.6 +735.7,291.5 +738.8,285.1 +739.2,283.2 +736.6,281 +737.5,280.2 +737.8,281.8 +735.5,281.8 +735.6,284.6 +735.9,284.8 +736.2,283.9 +728.6,282.7 +730.2,292.8 +735.2,286.8 +736.4,288.6 +736.5,289 +721.7,303.9 +721.7,306.9 +721.1,309.7 +720.2,310.1 +722.5,313.4 +722.2,314.7 +727,313.5 +730.5,304.1 +733.2,305.2 +732.6,304.8 +732.2,301.5 +733.5,308 +733,311.1 +730.3,315.4 +734,319.4 +735.4,303.5 +738.8,302.1 +725.5,317.2 +725.7,308.2 +732.9,306.5 +721.4,320.1 +721.4,324.5 +724.3,324.4 +722.1,323.9 +724.3,331.1 +722.7,331.1 +722.4,333.2 +724.3,333.2 +720.5,339.1 +723.8,335.4 +725.2,335.9 +726.9,330.3 +728.5,328.4 +726.7,329.9 +725.8,323.6 +726.5,325.2 +732,326.2 +732,327.2 +733.5,329 +733.7,334.1 +733,339.1 +732.8,339.3 +734.7,337.7 +738.5,337.8 +736.8,335.9 +736.3,334.8 +735.7,333.8 +735.9,330.2 +736.2,329.9 +739.5,334.8 +738.6,329.4 +739,327.6 +739.4,326.8 +736.7,325.3 +722.2,320.2 +722.7,321.2 +736.8,340.2 +724,342.5 +724.1,348.4 +724.5,349.9 +726.4,355.4 +729.6,355.2 +728.9,351.5 +728.2,350.2 +727.6,346.8 +725.5,345.2 +728.1,344.7 +726.4,340.9 +731,343.7 +732.2,343.7 +735.1,341.7 +733.3,345.8 +731.3,350 +734.5,354.8 +735.2,357.2 +739.8,354.5 +736,347.8 +736.6,348.1 +739.5,347 +738.4,343.5 +735.5,340.7 +737.2,342 +735.2,344.5 +737.6,345.1 +739.3,344.1 +720.9,345.4 +731.9,369.6 +721,365.5 +720.3,366.3 +720.3,368.1 +721.5,370.3 +722,375.4 +723.7,376.9 +728.4,377.4 +729.3,379.2 +727.4,368.7 +729.3,364.2 +731.7,360.7 +733.9,361.7 +732.6,372.8 +733.8,371.1 +734.5,370.4 +738.7,378.8 +735.3,376.2 +738.7,370.1 +736.5,368.5 +735.2,365.9 +736.8,362 +731.7,365.3 +739.8,363.8 +730.3,395.4 +720.7,390 +724.5,398.8 +727.2,398.7 +725.7,395.1 +729.3,396.7 +726.9,393.9 +725.9,392.8 +727.3,388.7 +729.8,384.1 +727.4,380.5 +726,383.9 +731,382 +730.2,380.8 +730.1,382.1 +732.2,383.2 +734.8,381.8 +732.5,385.7 +729.9,399.5 +733.8,397 +736,399.3 +738.1,395.7 +739.8,392.7 +735.4,394.1 +734.8,394.6 +737.2,386.5 +739.8,384.8 +735.9,381.7 +735.3,383.4 +720.3,397.8 +724.8,400.3 +733.3,412.3 +722,400.4 +722.8,400.5 +721.8,401.8 +722.2,402.8 +722,404.4 +720.3,408.1 +721.1,409.3 +722.7,410.1 +723.8,406.8 +723.1,407.1 +725,408.3 +724.2,413.4 +723.9,410.4 +721.2,417.8 +723.8,418.9 +724.7,417.4 +728.7,415.9 +727.5,413.5 +728,412.9 +729.5,412.8 +728.5,411.9 +727.7,411.7 +726.4,411.1 +728.8,410.8 +729.8,408.7 +727.2,406.1 +725.9,406.6 +725.7,408.9 +727.4,404.8 +728.5,402.4 +728.1,400.5 +726.1,400.2 +725.5,401 +727.6,402.1 +727.6,403.7 +725.7,403.9 +731.2,404 +734,403.2 +731,406.8 +730.3,409.4 +730.2,409.8 +733.3,408.9 +734.8,406.2 +731.8,411.1 +731,412.6 +730.3,415.9 +738.8,409.6 +738,405.9 +736,407.1 +736.1,408.9 +737.8,404.2 +737.7,400.3 +735.1,401.1 +722.4,425.1 +724.5,425.6 +721,430.3 +723.2,432.9 +720.1,435.8 +720.9,436.9 +727.8,437.2 +730.1,439.6 +729.9,437.9 +729.3,436.6 +727.3,437 +727.6,434.8 +726.5,435.5 +726.1,434.8 +728.8,429.7 +725.6,432.3 +729.9,425.2 +729.9,422.8 +733,428.4 +733.6,437.6 +735.7,432.1 +739.2,429.9 +739.1,429.1 +736.2,423 +727.5,429.1 +728.2,422.7 +723.6,455.3 +721,441.9 +724.5,443.5 +721.6,446 +728.8,457.4 +728.3,457.2 +727,453.3 +727.8,449.9 +725.1,446.1 +732.7,441 +734.9,444.1 +733.1,443.4 +730.3,453.9 +733.3,451.8 +734.3,450.3 +734.7,459.1 +738.5,455.2 +739.6,450 +735.7,449 +735.2,448.1 +738.9,448.9 +738.5,445.3 +726,447.2 +724,465.4 +721.8,466.4 +721.3,479.6 +724.9,476.8 +727.1,479.8 +730.1,479.3 +729.1,475.8 +728.7,476.4 +727.8,475.8 +726.2,476.8 +725.4,476.9 +728.4,477.7 +728.7,474.9 +726.8,471.1 +728.3,468.4 +727.4,467.2 +728.9,463.3 +728.1,460.4 +726.8,461.5 +733.1,467 +734.9,467.4 +730.6,474.8 +732.5,475.8 +734.8,475.7 +734.8,476.2 +732.2,477.2 +731.9,477.7 +731.3,477.7 +731,478.8 +732.2,478.3 +737.6,474.1 +736.9,474.2 +736.6,473.3 +736.3,472.3 +738,469.3 +735.3,473.8 +737.8,494.1 +724.7,498 +725.7,490.1 +729.6,484 +728.3,482.4 +725.7,484.2 +731.9,490.3 +734.2,487.6 +733.4,486.5 +731.8,491.4 +733.6,498.1 +734.9,499.9 +736.6,498.8 +736.4,499.8 +737.2,499.2 +738.2,498.5 +739.8,496.5 +739.7,495.1 +739,486.7 +738.5,485.2 +737,488.6 +739.7,482 +738.2,483.5 +735.4,482.6 +736.5,484.6 +724.7,488.7 +730.5,493.3 +748.2,17 +744.4,6.2 +741.8,14.3 +745,15.4 +747.2,12 +746.5,9.4 +752.5,2.5 +751.6,8.4 +754.5,10.5 +754.5,18.5 +754.1,15.2 +756.4,30.2 +755.3,24.7 +746.4,44 +742.8,46.6 +746.9,43.6 +742,64.4 +744.7,79.6 +747,75.4 +747.1,75.8 +747.6,76.7 +748,76.4 +747.2,77.7 +746.2,78.5 +749.4,78.1 +746,71.5 +752.8,74.7 +755.7,73.6 +758.8,78.7 +757,79.5 +755.5,71.1 +756,62.1 +743.7,90.9 +743.5,83.3 +742.1,81.6 +740.8,83.9 +743.1,84.4 +743.5,80.8 +744.3,80.4 +741.2,87.1 +742.3,88.4 +744.6,93.8 +744.6,97.7 +749.3,93.5 +749.3,92.7 +747.2,86.7 +745.9,88.2 +747.2,88.7 +748.2,87.8 +749.7,85.9 +745.3,81.6 +745.7,84.1 +748.3,81 +753.2,84.1 +751.7,83.9 +751.9,82.9 +751.1,84 +750.8,84.6 +751.7,87.7 +751.6,89.4 +752,88.2 +754.2,85.6 +751,94.7 +751.7,96.9 +758.8,98.2 +756.2,93 +759.5,92.3 +757.7,91.1 +757.6,90.2 +756.7,89 +757.9,80.3 +745.8,84.8 +757.2,82.3 +749.1,100.1 +751.8,114.6 +753.9,115.6 +753,115.1 +757.3,100.1 +741,131.5 +742.8,125.6 +742.5,125 +741.5,137.4 +744.6,139.1 +748.6,139.5 +747,133.6 +754.1,124.7 +750.7,134.1 +753.2,130.6 +751.8,137.1 +758.8,135.8 +756.6,131.5 +757.8,131.4 +756.7,128 +757.9,128.1 +742.2,140.5 +742.7,149 +741.5,155.1 +742.4,152.8 +744.8,155.1 +746.1,156.4 +747.2,157 +745.6,151.9 +747.4,154 +747.5,152.2 +746.8,145.1 +748.1,143.7 +747.9,140.5 +751.9,142.3 +754.5,141.1 +756.2,143.2 +743.2,163.5 +744.4,163.1 +741,165.2 +740.8,168.5 +749.5,164.7 +751,171.4 +759.3,167.9 +759.6,161.2 +757,198.2 +755.4,193 +757.8,193.6 +757.4,190.4 +755.3,186.4 +758.3,188.4 +740.8,200.1 +749.9,213.2 +749.4,212.2 +752,203.1 +752.7,201.1 +751.5,208.5 +750.7,208.8 +752.5,213.9 +751.8,214.8 +752.8,211.9 +754.8,218.8 +758.7,213.8 +755.9,209.5 +756.6,201.9 +756.8,203 +759.5,201.2 +748.9,232.9 +741,229.2 +740.5,232.3 +742.1,235.1 +741,236.4 +746.2,231.2 +747.2,221 +745.5,221.8 +750.1,223.1 +756.1,222.8 +757.7,224.6 +744.2,256.9 +740.1,250.6 +741.5,243.9 +745.1,244.6 +754.9,250.7 +750.8,249.6 +748.8,253.4 +754.1,259.6 +757.4,259.7 +748.4,268.5 +740.1,263.9 +755.5,263.4 +743.2,274.3 +741.8,276.9 +740.2,279.1 +740.8,279 +747.8,275.5 +751.5,279.3 +756.4,273.5 +758.9,269.2 +759.8,268 +756.8,266.1 +757.9,264 +744.1,280.5 +742.3,292.5 +743,290.8 +743,293.3 +741.2,294.3 +742.1,294.3 +742.2,295.3 +740.5,296.6 +741.9,299 +744.1,299.6 +744.1,296.3 +744.4,295.1 +743.5,295.4 +745.7,297.7 +745.6,298.9 +749.4,296.2 +748.6,295.9 +747.2,295.5 +750.3,283 +750.8,291.2 +750.2,294.8 +752.3,294.8 +750.7,297.6 +750.2,299.4 +759.4,290.4 +755.8,289.2 +756.3,288.6 +752.6,298.9 +746.2,301.2 +746.5,310.5 +744.2,313.2 +740.4,300.4 +741.3,300.5 +742.3,302.2 +742.6,304.1 +743.2,301.9 +742.8,309.8 +744.7,305.8 +742.8,305.3 +748.2,313.3 +742.7,310.6 +742.2,319.2 +747.2,316.8 +747.3,316.5 +745.1,313.4 +746,313.4 +747,306.5 +745.2,306.2 +745.3,308.4 +746.3,309.2 +746.3,303.3 +748,301.1 +752.3,302.8 +751.8,304.6 +751.8,304 +753.3,304.5 +754.7,301.9 +754.8,306.4 +751.7,313.1 +752.1,314.3 +756.8,313.9 +757.4,314.7 +757.7,311.6 +757.3,308.2 +755.4,300.4 +755.6,301.3 +755.2,301.6 +757.4,300.9 +757.5,304.4 +758.1,303.9 +740.5,338.4 +758.3,331.8 +752.4,334 +740.2,323.6 +742.5,323.8 +744.1,324.2 +741.9,327.3 +740.1,328.8 +740.4,329.5 +743.1,326.7 +740.8,337.8 +740.1,339.2 +744.6,339.8 +745.5,335.1 +746.5,338.5 +747.8,336.7 +748.3,338.7 +747.3,335.4 +746.3,330.2 +746.1,331.7 +747.3,334.3 +748.2,334.5 +747.7,333.6 +749.5,334.8 +747.4,330.6 +746.4,326.5 +747.1,322 +746.4,324.8 +746.8,323.3 +751.2,324.6 +754.4,323.1 +754.5,322.5 +753.8,325.1 +750.6,330.1 +753.2,329.4 +750.4,336.6 +751.5,339.7 +756.9,326.4 +758.2,320.2 +758,356.7 +740.6,342.4 +742.7,343.9 +744.4,342.1 +743.2,342.2 +743.8,341.7 +744.3,341.5 +742.6,340.9 +742,347.3 +743.6,347.3 +744.6,353.3 +743.1,352.4 +742.6,352.2 +744.3,350.1 +742.3,352.9 +741.2,356.7 +747.1,352.9 +746.7,349.2 +746.6,342.2 +747.1,343.5 +747.1,344.6 +749,342.9 +750.2,344.7 +750.3,350.1 +753.1,358.9 +757.9,358.3 +742.5,364.4 +755.3,377.6 +742.1,361.6 +740.3,368 +743.1,369.6 +744.3,366.6 +743.2,366.2 +744.7,371.3 +746.5,379 +745.9,368.3 +745.6,369.5 +746.3,368.5 +747.8,369.5 +749.3,368.6 +749.9,366.7 +746.8,363.6 +750.4,360.1 +752.8,364.5 +753.6,361.7 +753,360.9 +750.7,366 +750.2,368.1 +752.2,368.6 +753.5,367.5 +753.4,368.9 +754.7,368.6 +752.4,370.9 +754.6,370.2 +758.7,372.5 +756.5,366.2 +758.4,369.6 +757.4,367.4 +755,360.3 +756,364.3 +740.8,389.8 +742.1,389.8 +748.9,386.1 +740.4,405.4 +740.8,400.3 +743.7,403.6 +741.7,406 +740.5,410.6 +743.9,418.5 +746.1,416.2 +746.2,419.4 +747.6,419.5 +746.1,413 +746.9,413.6 +753.3,405.2 +751.5,414.1 +751.7,417.1 +751.8,418.8 +753.7,416.5 +756.8,415.1 +756.1,411.8 +755.2,412.6 +759.7,408.3 +757,400.3 +741.4,421.1 +742,423.5 +744.5,422.7 +742.2,428.5 +741.7,436 +742.2,437.4 +746,424.9 +746.3,425 +748.3,424.4 +750.7,422.1 +750.2,424 +751.2,424.1 +751.4,424.7 +753.8,422.4 +753.2,420.2 +753.9,429.5 +753.1,433.2 +754.3,431.6 +751.4,437.4 +755.7,435.4 +758,437.4 +756.5,439.5 +758.4,438.2 +758.7,435.5 +756.9,430.6 +756.7,431.4 +759.2,434 +759.8,429.2 +759.8,428.6 +758.6,420.4 +741.1,441.9 +741.9,441.9 +743.2,440.7 +741.5,448.9 +741.7,450.5 +743.5,453.7 +744.2,453.7 +742.3,450.5 +747.9,454 +748.1,447.4 +753.3,445 +751.3,454.9 +753.8,454 +753.8,450.4 +754.6,450.7 +755.3,450.2 +753.1,456.6 +753,456.4 +754.8,458.9 +753.2,456.3 +756.6,458.8 +756.3,459.8 +755.6,452.4 +757.6,446.8 +757.2,443.3 +759.5,444.9 +759.8,443.2 +758.5,440.3 +755.3,467.4 +743.7,472 +744.8,475.6 +749.3,479.2 +748.2,478.8 +748.8,475.9 +746.9,475.3 +745.2,476 +746.1,471.5 +746.7,473.7 +747.7,474.2 +749.8,470.9 +746.7,469.6 +748.7,468.5 +748.3,467 +748.7,466.7 +752.8,462 +753.7,460.5 +750.4,467 +752.7,467.2 +753.3,466 +753.1,465.4 +753.6,465.4 +750.2,475.3 +750.1,479.8 +751.1,479.4 +758.8,473.7 +759.9,472.6 +757.7,471.8 +758.6,469.2 +758.3,465.6 +757.9,465 +757.7,460.5 +758.5,461.6 +756.2,462.4 +759.7,462.2 +748.7,489.6 +740,480 +744.3,484.1 +740.5,487.8 +743.2,489.5 +745,489.9 +743.8,487.3 +744.6,485.2 +743.2,495.6 +744.1,498.6 +747.9,497.8 +747.2,498.4 +749.6,495.7 +746.4,493.2 +745.3,485.3 +745.1,488 +747.1,488.5 +746.8,481.4 +746.2,483.7 +746.1,484.9 +749,483.7 +746.8,481.4 +747.9,481.2 +749.9,480.7 +751.6,480.6 +750.7,483.6 +754.2,484.3 +753.5,482.8 +752,488.2 +750.7,490.3 +750.5,491.9 +751.5,498.7 +755.7,498.5 +757.5,496.4 +755.1,491.2 +756.7,491.3 +755.2,487.2 +756.8,487 +757.6,489.7 +755.1,480.4 +755.7,481.1 +757.4,481.3 +763.8,17.7 +762.7,19.6 +760.8,18.1 +769.4,17.2 +767.8,4.8 +772.1,4.1 +772.2,5.6 +772.7,6.5 +771.3,8.6 +770.5,11.4 +779,4.2 +777.3,1.8 +777.1,38.8 +760.2,21.3 +761,24.3 +761.1,22.8 +764.8,26.9 +764.8,23.2 +769.3,23.4 +773.5,22.3 +771,23.6 +773.8,28.5 +772.7,32.1 +779.7,28.4 +769.7,24.9 +776.1,44.3 +776.5,59.3 +779.3,55.6 +778.8,58.3 +767,74.2 +769.9,71.2 +762.5,77 +771.6,92.4 +762.7,92.1 +765.5,98.7 +764,103.5 +763.4,107.7 +764.2,110.4 +761.9,113.5 +778.2,117.2 +778.5,101.3 +767.8,111.5 +766.2,109 +772.2,103.4 +773.3,113.2 +768.9,133.7 +779.4,136.7 +762.4,126.3 +762.3,127.7 +769.2,131.2 +774.8,136.6 +776.2,129 +768.5,156.9 +768.4,144.1 +763,153.2 +769.6,158.3 +768,149.5 +772.6,142 +774.4,140.4 +773.6,152.5 +774.6,151.2 +770.8,158.9 +777.1,150.3 +765.2,161.5 +765,164.5 +761.8,164.9 +761.5,166 +761.5,166.5 +762.6,166.3 +762.6,168.8 +760.7,168.6 +761.1,166.7 +762,167.8 +765.1,174.7 +763.9,174.7 +764.3,176.6 +763.7,179.7 +766.3,178.3 +765.9,177.8 +765.5,177.6 +766.3,176.6 +768.3,177.8 +767.2,178.5 +765.9,172.4 +765.4,168.1 +765.7,167.2 +766.5,167.7 +767.7,168.6 +769.1,163.8 +770.4,163.8 +772.3,162.3 +771.2,165.1 +771.6,165.2 +770.5,167.9 +770.8,178.1 +761.6,191.9 +768.1,199.6 +761.4,180.3 +763.9,181.1 +764,183 +763,183.6 +764.8,187.4 +760.5,189.6 +762.2,186.7 +760.5,191.2 +763.5,193.1 +762.8,194 +761.6,197.3 +766.2,199.4 +768.8,194.1 +766.4,189.5 +765.3,188.6 +766,188 +765.3,185.2 +769.2,188.9 +766.8,183.3 +765.2,180.2 +767.8,181 +771.6,187.1 +770.8,189.9 +772,190.5 +770.1,195.4 +775.9,186.6 +770.2,191.1 +770.7,214.2 +761.3,201 +764.9,202.5 +760.9,207.4 +763.9,207.2 +762.8,209.4 +762,209.8 +762.1,207.8 +764.6,210.2 +765.3,206.2 +768.6,200.8 +770.3,202.5 +771.9,207 +778.9,201 +760.2,221.4 +768.3,225.4 +768.1,228 +771.3,228.2 +762.9,244.6 +763.5,258.8 +763.4,248.3 +762.3,259.4 +763.8,256.2 +770,257.4 +766.8,250.5 +766.8,248.5 +774.8,255.2 +776.5,258 +777.9,245.1 +763,279.2 +771.3,263.3 +763.3,261.4 +766.3,264 +763.4,265.2 +763.5,267.3 +774.4,267.6 +776.3,267.6 +779.7,267.2 +778.1,261.1 +763.9,274.3 +763.7,273.5 +765.7,275.8 +767.1,275.1 +765.8,273.3 +766.7,272.5 +762.1,261.9 +764.6,273.1 +766.5,282.9 +778.1,291.4 +779.6,287.5 +776.9,286.3 +763.5,295.2 +761.3,297.3 +770,282.6 +770.8,293.7 +772.2,297.8 +771.2,297.4 +774.5,298.3 +775.5,297.6 +776.3,298.8 +779.4,281.4 +775.8,308.6 +761.7,302.7 +764.5,301.9 +764.2,300.5 +760.8,307.3 +760.7,309.7 +762,312.9 +762.3,312.6 +764.5,319 +764.8,319.7 +765.9,319.4 +766.2,311 +765.7,304.2 +767,301 +774.8,300.2 +775.1,305.4 +771.7,311.8 +778.8,307.1 +776.2,302.1 +778,302.1 +776.9,301.2 +777.1,304.7 +777.8,300.4 +760.3,323 +761.5,320.6 +760.6,320.4 +764.6,325.9 +763.1,328.3 +765.7,322.6 +769.6,323.5 +771.2,320.4 +774.4,329.8 +770.7,328.8 +770.9,326.9 +777.7,326.9 +763.8,351.7 +762.9,358.3 +760.5,359.6 +765.4,356.1 +765.1,355.3 +767.3,357.9 +765.2,351.4 +769.5,354.8 +767,352.6 +767.8,349.7 +769.7,344.2 +772.7,351.3 +772.8,352.4 +770.7,355.9 +776.5,357 +778.4,355.9 +777,353.2 +776.7,350.4 +777.1,349.8 +779.1,351.3 +760.9,361.4 +761.8,368.2 +761.2,367.7 +768,368.5 +769.6,360.9 +772.5,363.1 +770.8,363.3 +772.4,372.2 +773.6,371.3 +774.4,373 +772.9,374.2 +772,373.5 +773.1,376.6 +772.3,379.1 +775.7,379.2 +776.6,376.6 +778.8,377.1 +776.5,379 +779.3,373.5 +776.6,366.9 +778,365.1 +778.9,365.1 +779,369.4 +778.2,366.7 +775.2,361 +778.1,360.7 +778.2,363.8 +777.3,363.4 +777.8,362.5 +778,362.9 +770.5,399.5 +779.1,393.7 +763.8,386.5 +760.5,389.7 +769.1,387.4 +772.2,380.6 +774.2,380.3 +770.4,383.2 +771.3,387.2 +773.7,385.6 +772.4,390.6 +778.8,385.8 +776.9,388.1 +777.2,380.3 +778.1,382.2 +760.7,417.8 +779.5,414.2 +761.7,400.7 +762.7,402 +763.3,402.9 +763.2,404.1 +763.8,410.3 +762.8,412.7 +764,414.5 +761.5,413.9 +760.9,415.9 +762.6,416.5 +766,418.8 +765.7,416.6 +766,415.6 +768,419.4 +765.2,414.2 +766,413.7 +765.5,411.7 +765.4,410.2 +767.2,410.4 +768,411.8 +767.4,414.7 +766.7,414.7 +766.8,409.5 +774.2,404.5 +771.7,404.5 +773.8,405.2 +773.2,409.3 +772.5,407.5 +773.2,407.1 +773.9,412.1 +771.9,415.8 +779.1,416 +779.3,419.7 +777.1,411.7 +779.5,410.6 +779.4,411.7 +775.6,406.8 +777.2,406.8 +778.7,406 +779.1,409.3 +779.5,409.8 +777.1,409.3 +775.7,404.2 +775.9,402.5 +775.5,401.9 +776.1,400.7 +778.5,403 +764.1,402.5 +774.3,436.1 +763.5,424.2 +760.5,422.5 +762.6,425.8 +763.1,426.3 +764.3,426.3 +763.5,428.6 +760.4,427.5 +761.2,431.8 +762.1,431.5 +762.5,430.7 +764.1,431.5 +761.7,433.4 +761.3,435.4 +763.7,436.8 +764.9,435.4 +764.4,435.8 +764,439.6 +769.6,437.7 +767.2,432 +769.2,434.4 +767.1,425.8 +767.3,425.2 +767.5,427 +765.6,421.5 +765.8,420.5 +766.7,421.5 +769.1,421.6 +767.8,423.1 +769.1,424.5 +773.9,424.3 +772.1,423.9 +771.2,425.1 +771.3,426 +770.5,427.8 +772.2,428.4 +770.8,431.4 +773.7,431.5 +774.1,431.8 +770.5,439.6 +776.8,455.3 +779.3,448.5 +773.8,443 +760.8,440.1 +763.5,441.3 +764.7,442.9 +762.6,444 +761.8,443.4 +764.1,446.2 +764.4,450 +761.8,450.1 +765.2,455.4 +767.5,456.3 +766.6,442.5 +773.6,441.2 +774.3,454.6 +770.5,453.9 +770.6,455.9 +772.6,459 +775.3,458.7 +779.5,457.4 +760.6,460.7 +762.1,461.8 +763.2,460.1 +762.4,461.1 +762.8,465.5 +763.2,464.5 +761.9,464.3 +760.3,464.8 +761,463.2 +762,466.6 +764,466.5 +764.5,469 +762,469.6 +761.1,468.1 +762.6,471.9 +764.5,470.7 +763.8,473 +763.7,474.5 +763.1,474.6 +761.7,474.4 +760.6,473.6 +763.8,479 +761,477.9 +766.9,478.3 +767.1,475.3 +767.3,476.5 +769.8,478.6 +768.1,478.7 +767.5,479.2 +769.1,474.2 +768,474.6 +766.5,467.5 +765.3,463.7 +766.2,461.5 +767.5,461.5 +769.9,462.5 +774.4,467.4 +771.8,468.6 +773.5,470.9 +773.6,479.1 +775.2,479.2 +775.2,478 +779.5,476.1 +775.2,472.7 +779.3,473.4 +775.2,460.1 +767.5,460.8 +760.7,481.3 +761.8,480.4 +763.2,481.5 +764.6,483.3 +762.8,492.2 +764.6,494.9 +764.2,495.9 +762,498.1 +768.5,495.7 +769.2,499.1 +765.8,486.5 +767.2,489.1 +767.1,489.7 +767.5,482.1 +766.8,481.9 +768.8,480.9 +768.6,482.3 +770.6,480.2 +771.7,481.5 +772.5,483 +770.5,485.4 +771,485.9 +774.5,488.3 +771.5,488.7 +772.9,486.7 +774.2,492.3 +771,493.9 +774.4,495.7 +773.7,498.8 +776.6,497.9 +775.1,497.3 +779.4,495.4 +779.5,496.3 +779.5,499.1 +778,498.5 +776.9,494.7 +777.9,490.4 +777.9,491.5 +775.7,489.2 +776.5,487.9 +775.2,485.8 +778.9,488.7 +776.6,480.8 +777.3,480.2 +778.1,483.1 +783.9,2.1 +783.2,4.3 +783,4 +781.2,4.8 +784.5,8.4 +785.7,11.2 +785.8,8.8 +786.6,5.4 +787.7,9.1 +785.2,4.8 +785.6,0.8 +785.5,0.2 +789.2,1.7 +787.4,1.6 +790.2,0.5 +791.4,1.4 +791.4,3.7 +790.2,3.4 +790.6,9.1 +790.7,8.1 +792.3,7.3 +792,14 +794,16 +797.3,8.7 +783.2,38.8 +780.9,37.5 +788.8,21.2 +781.2,43.8 +782.4,56.3 +785.5,44.5 +796.2,69.8 +785.7,60.6 +791.9,68.1 +792.6,71.1 +794.1,74.4 +784.5,92 +792.8,80.1 +780.2,100.5 +784.2,112.3 +794.6,105.7 +799.3,115.4 +791.5,122.1 +786.5,128.7 +784.8,132.1 +790.2,127.4 +788.4,130.6 +781,161.2 +798.3,173.4 +781,162 +783.1,170.2 +788.3,176.4 +785.1,166.8 +788.5,167.9 +791.2,167.6 +797.5,184.1 +790.5,196.7 +783.8,194.6 +782.8,191.3 +782.6,199.7 +785.3,191.1 +786.6,189.1 +788.5,188.7 +782.1,204.4 +795.6,215.1 +795.4,230.6 +780.2,228.3 +782,232.1 +780.5,231.9 +784,235.7 +782.9,238.9 +783.5,239.7 +798.6,237 +794.7,246.2 +781.8,244.1 +783.7,244.5 +783.2,243.4 +784.2,242.5 +780.7,258.2 +787.2,252.6 +785.8,247.6 +785.8,244.3 +792.5,241.6 +799.6,252 +798.4,248.9 +781.7,245.8 +782,245.7 +782,254.2 +796.5,276.3 +785.6,277.4 +794.5,261.1 +790.9,271 +790.4,274.9 +798.9,272.7 +798.8,270.3 +795.2,266.7 +797.1,265.7 +790.6,280.3 +783.9,286.4 +785.1,288.6 +783.1,291.5 +784.7,297.7 +787.2,299.4 +786.3,293.7 +788.9,290.5 +790.5,287.4 +790.4,290.6 +799.4,295.1 +797.1,286.9 +799.9,285.3 +782.3,301.2 +796.6,307.9 +795.8,300.5 +793,330.4 +789.4,339.5 +789.6,334.3 +788.8,334.2 +793.8,327.2 +793.3,337 +793.6,338 +793,336.5 +798.8,336.8 +798.5,328.3 +799.3,327 +797.3,329.5 +799,325.8 +795.7,328.5 +799.4,325.1 +798.9,321 +781,353.1 +782.2,359.5 +781.5,358 +786.5,357.3 +786.1,355.1 +789.8,355.9 +787.5,357.8 +785.2,350.6 +788.9,354.4 +785.4,349.1 +787.9,347.8 +789.5,348.7 +787.2,348.3 +787.7,344.4 +790.2,348.2 +794.2,355.9 +793,358.1 +796.2,358.2 +783.1,364.7 +782,364.3 +780.4,361.8 +781,362.6 +780.7,365.5 +781.8,366.8 +783.6,366.4 +784.2,365.9 +784.2,366.9 +784.7,368.8 +785.1,369.9 +782.1,369.4 +782.1,369.1 +783.7,368 +780.3,371 +781,371.1 +783.8,370.3 +784,372.5 +788.4,375.1 +788.4,375.7 +787,374.8 +785.3,370.2 +788,370.4 +789.1,370.7 +789.6,374.4 +784.9,366.9 +789.3,365.1 +785.5,363.4 +785.6,362.8 +786.2,364.6 +793.1,360.5 +794.5,360.5 +794.8,362.9 +793.8,363.2 +792.4,362.4 +792.5,365.2 +793.7,373 +792.3,373.5 +793.3,379.3 +796,379.3 +798.6,375.9 +797.7,370.4 +799.4,370.9 +799,372.2 +795.7,367.6 +799.4,366.5 +798.9,368.9 +795.2,363 +797.2,360.5 +783.1,374 +798.8,365.2 +783.5,384.5 +783.9,395.7 +783.1,396.6 +785.7,399.3 +788.5,399.2 +789.5,394.1 +792.8,385.2 +798.6,394 +799.3,389.8 +798.1,381.2 +780.7,382.4 +791.2,416.2 +780.3,400.3 +784.5,403 +783.2,404.7 +783.8,405.4 +784.8,407.1 +784.5,407.4 +784.1,408.3 +783.7,409.5 +782.2,415.8 +783.5,418.6 +786.9,419.6 +786.2,413 +788.6,411.3 +787.5,409.5 +788.9,401.9 +798.6,417.6 +798.9,411.6 +799.1,414.5 +798.5,404.9 +799.2,409.3 +796.8,408.9 +799.7,437.9 +782.1,426.4 +780.6,439 +781.3,437.9 +788.7,439.1 +787.5,437.9 +787,430.5 +789,429.8 +790,430.2 +788.3,434.4 +786.4,428.2 +785.3,423.1 +786.1,420.4 +789.2,424.2 +790.4,420.5 +792.1,420.3 +793.8,420.2 +794.6,421.3 +791.8,422.5 +790.7,429.3 +792.5,426.2 +797.8,430.4 +795.8,426.7 +796,425.5 +797.5,426.3 +798.5,427.4 +797.4,422.5 +783.6,456.3 +784.2,458.1 +785.7,456.6 +786.6,455.3 +787.9,455.2 +788.4,444.9 +790.1,454.4 +792.3,459.8 +798.2,458.3 +796.2,458.8 +795.4,456.6 +797.2,450.4 +781.9,475.7 +781,462.2 +782.8,463.8 +784.2,464.4 +783.8,461.5 +781.1,469.3 +782,469.7 +781.4,472.9 +783.7,471.8 +783.5,474.8 +782.8,475.5 +784.8,476.3 +783.4,479.2 +780.5,479.1 +786.5,477 +786.8,476.5 +788.5,475.7 +788.8,477.5 +788.7,478.6 +785.8,474.3 +785.8,470.9 +786.2,470.1 +789.2,470.8 +789.6,472.2 +787,471.5 +785.6,466.8 +785.4,465.8 +787.1,466.9 +787.7,469.4 +785.8,463 +787.5,463.9 +791.1,461.7 +791.9,467.3 +793.2,470.5 +794.6,472.8 +794.3,473.8 +793.4,474.5 +790.8,474.6 +791.5,475.3 +792.2,475.9 +794.7,475.1 +789.9,479.7 +795.8,475.3 +799.2,474.5 +797.5,474.8 +795.9,466 +799.8,465.5 +799.5,468.2 +798.5,469.6 +798.3,468.3 +798.3,467 +796.5,462.6 +796.9,463.8 +784.1,477.6 +786.2,468.6 +794.7,468.4 +791.2,471.8 +795.3,472.3 +793.4,496.5 +784.9,480.7 +781.2,484 +784.6,489.5 +783.1,489.6 +781.9,496.2 +783.2,498 +781.7,498.6 +782.5,496.5 +785.6,494.1 +786.6,486.9 +787.3,484.9 +785.9,484.5 +788,481.7 +791.1,480.8 +790.3,490 +793.9,492.9 +796.2,493 +795.9,491.2 +796.5,491.5 +797.8,495 +797.5,492.8 +796.5,486.7 +796.7,487.1 +799.4,483.4 +797.1,482.8 +803.7,3.5 +801.6,7 +809,11.2 +807.4,7.6 +805.2,11.3 +805.2,6.9 +805.8,0.4 +812.1,7.4 +813.1,9.1 +816.3,19.6 +819.7,17.4 +818.4,17.3 +818.8,12 +816.1,3.8 +819.8,0.6 +817.3,1.1 +802,7.4 +805.6,8.3 +819.1,39.2 +802,54.3 +819.5,50.2 +803.8,63.4 +811.5,61.7 +807.3,64.6 +809.7,72.5 +810.5,62.9 +802.7,76.3 +813,132.7 +810.8,145.5 +801.9,154.3 +813,153.5 +818.3,149.4 +808.3,171.3 +810.8,169.4 +811.9,188.6 +805.6,200.5 +802.5,209.1 +809,215.5 +800.5,234.5 +801.8,238 +806.7,229.5 +809.9,224.8 +812.8,221.9 +812.7,221.4 +813.8,225.9 +814,229.7 +817.3,233.6 +817.3,221.1 +801.4,248.1 +807.3,252.2 +805.9,256.1 +813.3,250.2 +810.5,246.2 +808.8,244.5 +805.5,247.3 +805.2,252.2 +806.2,259.7 +808.5,247.6 +810.8,245.4 +816.8,241.4 +816.3,244.6 +801.3,269.1 +809.1,270.5 +800.8,271.3 +800.4,274.3 +801.7,271.9 +811.8,277.4 +813.7,278.4 +819.4,267.5 +819.6,266.2 +804,299.2 +802.7,288 +807.6,280.1 +810.8,281.7 +814.6,280.1 +812.1,292.7 +817.8,285.4 +816.3,281.5 +817.6,281.9 +815.3,316.3 +810.2,301.4 +808.2,305.3 +800.6,314.2 +804.7,310.1 +806.4,314.6 +805.4,312.5 +805.4,311.2 +805.1,310.9 +809.2,310.2 +809.6,309 +806.2,309.2 +813.5,311.5 +812.1,316.7 +813.2,318.5 +815.2,317.8 +808.5,318.5 +811.9,339.8 +810.1,320.2 +803.3,320.5 +801,327 +803.2,330 +807.9,328.9 +805.1,323 +808.3,321.8 +813.7,324.1 +814.5,324.4 +810.8,324.8 +816.7,325.6 +818.6,343.7 +812.9,351.5 +800.2,342.1 +801.8,343.2 +801,359.6 +807.1,357.5 +809.9,355.1 +806.5,353.6 +805.6,350.8 +808.8,353 +808.6,346 +806.1,348.1 +810.2,344.6 +814.2,341.9 +811.5,342.7 +811.6,347.9 +813.4,350.6 +813.5,356.7 +813.5,358.8 +814.8,358.1 +816.2,353.3 +816.6,353.5 +809.7,356.7 +816,359.7 +801.9,351.1 +810.8,363.6 +800.3,365.2 +804.1,369.8 +807.6,378.2 +807.4,368.3 +806.7,366.2 +806.1,365.2 +808.6,362.3 +813.9,366 +811.3,370.7 +813.5,371.4 +817.6,376.4 +817.3,376.9 +818.6,363.5 +819,362.3 +805.8,360.3 +800.5,396.7 +801.1,380.9 +804,386 +804.4,392.6 +806.1,394.1 +808.5,390.9 +809.7,383.5 +807.5,380.5 +811,382.6 +814.4,388.2 +817.2,396.6 +803.4,392 +810.3,391.2 +800.3,408.8 +802.1,412.4 +802.1,416.6 +809.9,415.1 +812.2,405.2 +814.9,414.7 +818.4,416.3 +818.3,410.2 +804.6,419.6 +815.4,419.1 +800.1,435.8 +808.6,435.1 +809.1,434.3 +808.5,433.7 +808.9,432.4 +808.5,430.5 +806.4,425.3 +805.6,421.9 +814.1,422.6 +811.1,427.8 +810.9,430.9 +811.4,436.5 +813.6,435.7 +812.2,439 +814.2,437.3 +814.3,435.9 +817.3,424.9 +814.8,438 +818.6,437.1 +816,433.9 +815.2,434.2 +816.8,427.1 +816.3,423.3 +817.5,421 +819.2,423.9 +801.2,453.1 +800.6,457.8 +804.8,459.4 +808,458.1 +807.9,454.3 +805.3,453.1 +813,442.1 +813.5,442.6 +814.5,441.8 +813.3,440.1 +810.4,440.4 +810.7,449.1 +814.2,451.7 +814.2,454 +810.7,458 +818.7,450.4 +815.6,450 +818.6,446.7 +819.3,445.9 +818.9,445.3 +819.1,440.6 +818.5,444.6 +819.5,442.4 +818,473 +808.9,478.4 +803.1,466.4 +805.3,469.5 +804.8,471.1 +801.3,472.3 +801.1,473.9 +800.3,474.6 +800.1,475.1 +801.3,476.9 +804.2,476.9 +804.9,477.1 +805.9,478.2 +808.6,475.7 +806,475.8 +806.9,474.9 +808,468.8 +810.3,466.6 +806.8,465.1 +805.7,465.4 +809.7,462 +810.4,463.5 +811.8,465.9 +813.8,471.4 +811.5,470.8 +814.3,479.7 +817.2,478.3 +817.3,476.5 +818.2,469.1 +817.4,465.6 +819.8,464 +816.1,462.2 +801.7,488 +804.2,491.1 +805.8,497.2 +800.8,484.8 +803.8,484.5 +804.4,481.2 +805.4,483.5 +803.5,486.5 +803.4,486 +804.8,485.7 +804.5,488.4 +802.4,490.6 +801.4,491.2 +802.3,491.2 +803.1,491.1 +804,492.8 +804.7,494.4 +805.3,493.4 +808.9,495.8 +809.1,497.9 +809.8,497.7 +805.7,494.8 +808.4,493.5 +809.9,492.7 +807.7,491.5 +809.9,490.5 +809.1,490.1 +806.8,488.9 +806.2,487.5 +808.2,486.2 +805.8,481.2 +806.4,480.2 +807.4,480.2 +807.3,481.2 +807,481.9 +810,480.7 +812.5,480.8 +814.4,481.5 +814.7,481.5 +811.4,494.3 +813.1,493.2 +811,491 +810.8,490.3 +811.1,496.2 +817.9,495.1 +817,493.4 +817.2,490.3 +819.1,490.2 +819.6,487.2 +818.3,484.1 +822.3,2.7 +823.1,6.1 +824.5,12.3 +820.5,14 +821.6,11.6 +823.9,10.4 +823.5,17.9 +823,17.7 +825.2,19.6 +826,18.6 +828.3,15.6 +826.7,18.1 +825.2,0.5 +825.3,2.6 +823.6,4 +824.8,3.8 +831.2,5.7 +833.3,8.2 +829.2,26.8 +823,25.3 +828.8,39.6 +828.9,23 +838.5,34.2 +835.7,43.8 +824.6,52 +823.9,51.3 +824.5,53 +825.8,57.7 +827.4,56.4 +829.2,54 +828.1,51.5 +828.3,42.4 +827,41 +830.8,52.7 +839.4,58.6 +827.3,46.6 +825.7,78.3 +825.5,77 +829.8,69 +837.5,74.9 +835.4,65.5 +839.9,69.4 +824.1,95.2 +825.4,82.3 +831.9,98.7 +839.3,96.8 +822.5,109.2 +824.6,115.1 +824.6,116.2 +825.8,109.3 +827.7,102.8 +833.9,105.3 +832.2,106.9 +830.8,118.7 +835,118.2 +835.3,117.6 +835.3,111.6 +839.3,103.9 +832.5,102.9 +824.3,107 +820.8,120.5 +837.9,137.6 +839.2,128.1 +821.6,127.1 +833.3,123.6 +834.4,120.6 +820.1,162.6 +820.4,172.6 +837.8,173.4 +838.2,165.8 +839.4,164.7 +824.4,196.9 +830.2,195.3 +835.5,187.5 +833.4,183.3 +827.5,189.6 +827,193.5 +835.3,197.9 +833.7,206.8 +832.6,211.4 +835.5,203 +828.2,233.5 +827.6,228.7 +823.8,229.8 +825.2,233.8 +833.2,220.9 +831.6,228.4 +839.4,239.2 +835.9,225.9 +838.5,255.9 +824.3,245.2 +822.6,245.9 +822.4,256 +823.8,258.8 +827.5,255.8 +829.7,252.9 +824.9,248 +831.1,243.5 +833.5,249.8 +833.1,250.9 +839.8,250.7 +839.4,249.8 +836.5,250.6 +836.9,245.5 +839.8,248.7 +836.4,246.2 +839.9,266.8 +824.9,261.5 +824,271.6 +829.7,275.3 +837.3,271 +837.6,271.9 +839.2,268.1 +836.1,269.4 +836,265.3 +837,264 +837.6,270.8 +828.6,280.8 +831.9,287.6 +830.8,289.3 +830.3,295.9 +837.2,284.1 +821.6,312.8 +825.5,318 +825.7,305.7 +836.2,306.1 +833.9,330.3 +839.4,333 +822.9,323.7 +824.9,320.3 +822.8,322.1 +821.8,321.7 +821,331 +823,330.4 +824.5,338.5 +826.5,334.2 +830.7,335.4 +838.2,337.6 +837.4,338 +837.1,338.5 +837.2,332.5 +838.4,331.9 +823.5,337.2 +821.9,321.1 +822,351.9 +822.8,358 +822.7,356.1 +828.1,349.8 +826.7,345.5 +834.1,340.9 +833.6,348.8 +831.7,347.1 +832.1,346.3 +836.7,359.1 +837.3,346.1 +835.3,344.1 +835.8,344 +831.5,344.8 +822.2,362.8 +822.2,364 +821.3,368.6 +822,369.6 +823,369.7 +823.3,368.6 +824.5,365.9 +822.4,371.8 +822.6,377.5 +825,377.5 +829.3,378 +827.6,372.9 +828.8,374.6 +827,362.7 +829.7,360.7 +838.3,364 +824.7,381 +820.2,389 +820.5,389 +837.3,390.9 +835.2,396.2 +836.7,402.4 +820.1,406 +825.9,402.6 +832.2,404.9 +834.9,401.9 +831,407 +831.3,409.8 +832.8,411.7 +833.4,410.9 +834.6,419.6 +836.1,416.8 +836.7,419 +837.2,418.5 +839.9,416.4 +837.8,414.4 +839.5,414.8 +839,414 +839.3,412.6 +839.8,412.3 +838.8,411.3 +838.8,410.5 +836.5,405.5 +836.4,400.9 +837.8,419.8 +821.3,420.9 +824,423 +823.2,423.8 +820.7,429.9 +822.2,429.7 +825.4,429.9 +820.5,431.7 +823.3,432.2 +824.5,426.6 +824.1,435.9 +822.6,437.5 +826,431.8 +833.7,420.4 +833.2,422.7 +831.8,425.5 +838.3,431.3 +838.1,428.1 +839.3,427.3 +838.7,426.1 +839.4,420.6 +839,421.8 +836.8,424.6 +838.6,420.5 +833.3,436.7 +822.1,442.7 +822,449.7 +822.3,449.8 +822.5,452.3 +837.2,446.3 +820.2,462.6 +820.5,464.8 +822.2,463.8 +824.5,464.1 +822.3,462.8 +823.4,465.6 +822.1,466.7 +822.2,465.5 +824,472.3 +822.7,479.8 +826.8,471.5 +837.5,470.4 +821.5,480.6 +821.5,482.1 +822.2,482.8 +822.1,488.9 +824.8,490.1 +822.3,486.2 +820.5,494.1 +821.3,494.4 +823.1,494 +821.1,498.6 +821.4,497.9 +826.2,498.8 +826.1,492.5 +828.9,494.3 +825.1,488.2 +826,486.5 +827.1,489.3 +826.2,490.2 +827,489.9 +827.9,488.3 +827,487.5 +826.8,486.3 +827.5,487.4 +830.2,489.7 +824.7,482.2 +827.7,482 +828,482.7 +828.5,483.5 +830.5,483.5 +833,489.1 +830.8,494.3 +832,493.7 +833.1,493.6 +833.2,498.4 +834.5,495.2 +836.7,497.4 +838.3,498.9 +822.6,487.1 +837.6,496.6 +847.1,5.8 +840.2,12.7 +848.2,38.4 +853.8,25.5 +841.6,34.1 +851.8,36 +840.1,47.2 +844.3,53.2 +844.8,54.2 +843.1,55.3 +844.2,58.3 +844.4,58.5 +843,59.3 +844.2,77.6 +848.7,78.8 +850.2,73 +846.1,66.3 +850.8,68.8 +851.2,77.4 +857.2,71.2 +845.2,62 +842.7,94.1 +843.7,94.9 +841.1,93.7 +843.9,95.2 +840.5,98.1 +846.8,96.8 +846.3,91 +849.8,82.5 +852.3,81.1 +852.5,88.8 +850.3,93.8 +850.8,99.6 +857.8,98.2 +852.6,101.3 +854.8,100.7 +853.5,106.1 +851.3,109 +859,116.9 +856,109.1 +856.7,106.9 +859.7,102.8 +857,103.9 +851.4,124 +855.1,139 +859.3,126.4 +859.1,127.9 +857.8,122.7 +849.3,157 +846.5,151.9 +845.8,146.9 +851.5,142.2 +851.8,169.3 +840.2,164.7 +843.2,172.3 +846.7,171.8 +847.7,164.5 +854.4,164.8 +852.7,169.8 +855.8,162.6 +858.7,184.9 +851.1,187.6 +845.9,204.8 +842.9,211.9 +840.3,203.8 +842.5,203.1 +850.2,200.4 +855.3,206 +858.7,209.8 +857.3,203.6 +856.6,219.6 +848.2,238.6 +844.8,233 +842.2,231.9 +843.7,237.9 +841.8,238.5 +847.3,238.9 +846,236.4 +846.7,231.3 +850.7,231 +854.9,229.1 +857.1,237.9 +856.9,236.9 +858.2,220.5 +859.1,221.1 +846.9,244.2 +840.7,243.1 +843.4,241.9 +843.8,244 +842.9,245.4 +842.6,249.4 +848,250.9 +846,252.9 +845.2,255.3 +848.3,256 +849.6,255.4 +854.2,249.6 +852.3,255.4 +850.1,259.4 +853.5,259.8 +854.2,255.6 +858.7,259.2 +858.8,253.8 +857.2,254.6 +856.9,244.4 +855.7,273.9 +842.9,273.8 +843.1,271.1 +842.4,278.8 +848,271.5 +845.7,267.5 +853.6,263.4 +851.7,269.3 +855.7,263.6 +840.1,280.9 +844,286.7 +840.9,287.5 +844.7,290.7 +843.2,293.2 +851.2,290.2 +850.8,293.3 +851.5,292.6 +851.6,298 +853.1,297.1 +859.8,299.4 +859,284.1 +851.4,298.6 +858.6,294 +849.3,290.3 +853.4,282.2 +850.5,286.6 +849.4,306.4 +847.5,300.2 +851.8,302.4 +853.6,302.4 +857.4,300.5 +857.8,303.7 +858.4,304.6 +856,308.6 +846.1,323.7 +847.4,320.4 +851.7,324.9 +852.2,326.3 +846.9,328.4 +844.2,346.3 +843.4,349.3 +854.9,355.9 +859,348.8 +857.5,344.7 +843.5,366.1 +843.3,370.7 +847.2,379 +846.1,370.7 +847.1,374.3 +841.5,383 +840.6,382 +840.8,388.3 +841.4,393.9 +847.8,399.3 +847.6,394 +854.2,394.5 +843.9,402 +844.6,403.2 +843,402.8 +843.5,405.4 +843.9,406.5 +844.5,409.5 +840.9,406.5 +842.6,411.5 +844.8,414.5 +840.7,413.1 +841.2,415.3 +841.3,416 +842.6,415.6 +845.8,419 +848,415.6 +848.5,418 +846.9,419 +846.8,410.1 +849.2,411.8 +847.2,412.4 +845.8,406.7 +845,406.5 +846,405.6 +846.1,406.1 +848.3,406.3 +848,407 +848.9,407.2 +846.8,400.6 +848.9,403.6 +847.9,404.1 +847.5,403.5 +853.4,402.7 +853.4,403.9 +852.1,404.5 +850.2,407 +850.7,406.1 +853.7,407.5 +852.1,409.4 +850.1,409 +851,408 +850.3,407.2 +851.6,410.6 +853.9,410.7 +854.4,411.5 +852.27,412.17 +853.3,415 +850.2,414.6 +850.1,417.1 +853.8,415.8 +852.7,419.8 +850.6,418.8 +850.6,418.8 +855.6,418.6 +858.1,417.1 +859.1,418.2 +856.8,419.1 +856.5,419.8 +854.5,413.2 +855.9,412.4 +855.2,412.1 +856.8,414.8 +855,407.6 +858.1,405.9 +859.6,406.8 +855.6,405 +841.6,409.2 +848.5,420 +842.7,420.6 +844.8,420.5 +844.8,424.9 +840.6,422.7 +840.6,426.6 +842.2,428.1 +842.8,426.5 +844.1,426.2 +845.7,431 +847.3,432.1 +847.9,433.5 +845.5,429.1 +845,428.2 +846.2,420.9 +848.4,421.5 +851.9,421.1 +853,422.3 +854.2,423.9 +854.5,424.9 +850.1,424.6 +850.6,424 +851.5,423.6 +850.2,426.7 +853.5,426.1 +854.8,426 +853.1,427.1 +853.6,427.9 +854.9,428.8 +854.6,429.9 +852.4,428.8 +851.3,428.5 +850.6,427.9 +851.7,432 +852.3,431 +858,427 +859.3,429.8 +857.7,425 +856.8,424.1 +856.9,420.2 +858.3,422.6 +858.2,423 +841.7,423.2 +841.3,451.9 +845.7,445.9 +841.4,479.9 +846.5,471 +847.1,471.3 +853.8,462.8 +851.5,466.8 +852.9,466.5 +840.4,484.5 +840.5,488.4 +853.8,484.8 +852.9,487.1 +850.1,487.4 +851.3,494.6 +859.8,494.1 +866.1,0.8 +877.8,7.5 +871.4,24.8 +861,29.7 +875.2,23.5 +874.9,33.8 +874.8,34.8 +873,30.6 +878.6,30.9 +861.7,21.2 +866.2,43.1 +860.8,41.8 +867.4,61.6 +876.6,63.8 +868.9,79.9 +875.3,76.2 +870,80.6 +860.6,92.8 +862.1,103.2 +862.8,109.1 +861.7,108.6 +860.7,108.8 +873,105.8 +873.2,109.6 +870.5,106.1 +878.8,113.6 +877.5,111.3 +876.8,108.9 +860.1,120.8 +860.6,121.5 +863.6,120.2 +862,128.4 +863.5,127.3 +864.6,131.9 +866.5,133.1 +867.1,130.6 +870.4,130.5 +867.1,125.1 +866.8,128.7 +877,138.3 +864.7,144.6 +862.8,144.7 +863.1,149.4 +861.1,149.8 +863.2,152.4 +861.5,156.6 +860.8,157.2 +868.9,145.1 +873.9,165.5 +860.4,166.9 +862.7,175.7 +869,192.9 +864,205.7 +863.4,218.5 +863.4,219.6 +861.7,216.5 +864.4,215.6 +870.1,233.7 +875.5,221.3 +868.1,226.9 +865.4,226.2 +860.5,220.8 +863,222.6 +867.5,222.8 +871.3,221.8 +873.6,237 +876.8,235 +879.9,234.5 +878.4,231.2 +868.8,259.2 +868,257.7 +871.8,250.7 +871.4,254.1 +877.1,256.8 +878.4,253.8 +877.5,245.6 +863,262.4 +863.3,261.2 +866.7,267.7 +867.2,262.9 +870.1,261.3 +861.5,286.3 +860.4,298.1 +873.6,283.5 +875.2,299.2 +876.1,297.5 +874.8,288.5 +862.5,312.5 +862.4,301.8 +869.9,317.5 +868.4,309.1 +869.5,304.5 +871.9,300.1 +870.4,303.1 +873.3,309.9 +875.3,304.7 +871.5,312 +871,313.4 +874.3,313.8 +872.5,312 +870.8,315.4 +871.7,315.7 +872.1,319.6 +873.5,316.4 +876.6,315.9 +877.5,312.5 +879,306.5 +878.8,302.4 +869,306 +872.9,310.8 +873.3,329.6 +873.7,339.4 +879.1,330.8 +878.8,328 +863.3,335.9 +865.1,343.4 +864.1,343.9 +864.8,345.5 +869.9,351.9 +868.8,346 +871.5,346.9 +873.5,359 +878.2,358.5 +878.6,356 +876.6,357 +874.6,365.5 +872.9,370.8 +872,375.6 +876,379.5 +878.9,364.6 +878,360.9 +876.9,362 +876.4,360.7 +860.5,386.3 +864.8,399.6 +874.2,387.8 +873.7,394.6 +874.5,399.3 +874.7,396.8 +875.1,399.3 +879.4,399.8 +876.7,394.8 +878.9,392.5 +867.8,389.4 +875.9,395.4 +875.6,418 +860.4,401.8 +861.7,400.2 +860.9,416.5 +865,419 +869.2,415.5 +866.6,417.3 +869,413.1 +870.4,414.2 +870.8,412.5 +872.4,401.6 +870.8,407.7 +870.7,415.5 +873.1,416.5 +871.4,417.1 +878.2,400.8 +860.8,423.3 +862.6,424.9 +862.7,424.2 +864.6,423.5 +864,420.6 +864.5,420.3 +860.2,427.6 +863,421.1 +860.1,429.1 +864.8,428 +861.9,434.8 +864.8,431 +867.8,437.3 +869.5,431.9 +868.5,429.3 +870,425.5 +867.4,426.2 +861.2,426 +867.1,422.3 +869.1,420.2 +866,422.1 +865,435.5 +874.7,432.8 +870.9,435 +871.6,437.8 +873,438.6 +860.7,446.6 +865.6,455.4 +867.5,457.1 +870.6,447.9 +878.6,453 +870.8,478.3 +865.5,471.8 +866.4,472.5 +865.8,465.3 +872.4,475 +870.1,468.8 +870.9,476.4 +877.5,478.4 +876.8,469.7 +868.8,467.3 +873.9,467.5 +863.6,488.7 +874.9,483.7 +861.5,480.6 +860.3,493.4 +867.2,498.7 +870.5,482.2 +870.6,497.1 +872.9,498.1 +889.2,18.5 +892.8,25.5 +898,23.6 +897.4,39.5 +880.2,52.6 +897.5,54.6 +885.3,64.8 +885.4,70.5 +887.3,61.7 +880.6,72.1 +880.4,74.7 +882.5,77.7 +882.2,75.4 +894.2,93.6 +892.9,94.5 +885.3,83 +885.4,103.1 +895.3,108.5 +893.2,100.6 +883.5,113.9 +882.5,119.9 +886.2,104.6 +888.7,104.2 +885.6,124.3 +893.8,133.4 +893.5,173.3 +897.2,172 +898.8,179.6 +883.4,184.2 +886.2,189.4 +887.2,199.9 +888.2,195.7 +891,186 +891.7,195.8 +891.7,196.6 +892.5,198.1 +889.8,213.2 +886.8,209.7 +890.4,210.9 +890.8,219.6 +884.5,222.4 +883.6,233.1 +881,235.7 +884.9,238.5 +889.9,220.7 +890,223.6 +899.8,238.5 +889.6,253.6 +880.1,247.8 +884.2,259 +887.1,242.6 +889,243.1 +893.5,242.9 +896.1,255.6 +895.3,252.9 +895.9,246.8 +896,243.6 +883.6,262.5 +883.9,268.8 +889.72,274.59 +888.8,269.5 +893.9,271.8 +894.4,273.6 +894.7,271.3 +894.8,270.5 +894,275 +892.1,278.7 +891.5,278.8 +897.8,276.7 +896.3,272.9 +897.3,271.4 +897.3,265.2 +898,264.2 +885.8,297 +886.2,299.1 +890.1,297.2 +891.5,299.8 +892.8,299.3 +891.4,298 +891.8,297.4 +892.3,296.9 +892.2,295.5 +895.7,288.8 +896.4,288.7 +898.7,283.9 +897,281.1 +893.4,299.3 +883.8,283.4 +892.6,288.3 +882.6,300.6 +884.5,304.2 +883.3,303.8 +883,301.6 +881.5,304.3 +880.6,305.2 +880.3,307.1 +881.2,307.3 +881.2,306.2 +880.5,312.5 +887.7,305.7 +888.6,304.5 +888.3,303.8 +885.2,302.3 +890.3,302 +890.7,301.9 +891.2,301.2 +891.5,300.9 +891.9,301.6 +892.5,301.2 +894.5,301.6 +893.5,302.4 +892.1,303.5 +892.8,304.7 +893.3,304.8 +890.2,307.3 +893.2,308.4 +893.6,311.5 +897,308.3 +897,304.7 +896.5,304.1 +897.2,303.5 +897.3,303.4 +896.9,301.9 +898.5,302.5 +898.3,303.8 +896.3,303.3 +884.2,306.1 +888.6,301.4 +890.6,300.5 +893.7,306.3 +896.6,318.3 +880.1,306.7 +893.3,330.4 +897,331.4 +896.8,325.8 +896.6,326.7 +899.4,322.6 +899.7,322.2 +885.3,325.5 +897.8,333.8 +883.9,354 +880.7,359.9 +887.4,347.1 +885.2,346.9 +894.8,342 +890.7,342 +890.5,345.5 +891.7,345.5 +893.5,351.8 +889.9,359 +889.7,358.6 +897.5,340.8 +898.1,343.9 +885.4,357.1 +897.7,340.3 +881.1,363.9 +882,361.2 +884.8,361.2 +886.5,378.3 +885.7,369.6 +899.2,378.4 +898.9,373.4 +899.6,368.5 +896.7,361.7 +882.5,385.9 +884.1,394.5 +887.5,386 +887.8,384.8 +891,390.7 +897.7,399.7 +898.3,389.7 +899.2,387.5 +899.7,384.8 +897.9,383.1 +898.3,383.2 +893.5,386.1 +881,408 +881.1,407.1 +880.4,409.2 +883,419.1 +883.8,416 +888.9,409.4 +891.8,401.1 +892.2,407.8 +893.2,409.5 +891.9,408.6 +891.6,408.2 +893.8,406.4 +889.1,406.7 +890.9,412.5 +894.4,415.3 +898.5,413.5 +895.7,415.9 +896.7,409.2 +899.4,404.6 +890.7,419 +889.4,432.4 +883.1,422.5 +884.2,426.1 +883.8,426.7 +882,435.8 +880.6,436.9 +881.7,438.1 +881.3,437.5 +886.3,439.1 +887.1,427.5 +890.2,426 +885.8,424.6 +894.6,422.3 +895.1,423.5 +890.7,423.7 +896.2,426.4 +899.6,426.1 +880.9,440.4 +883.2,440.6 +882.9,444.8 +883.5,449.9 +881.5,450.1 +880.8,459.2 +889.1,451.5 +887.2,445.1 +888.1,440.4 +886.9,440.5 +887.4,443.9 +890.8,444 +890.6,447.4 +896.3,443.6 +890.4,451.6 +887.2,458.3 +885.4,444.9 +894.4,469.3 +882.7,464.3 +881,466.5 +880.8,469.9 +881.1,472.6 +881.4,478.8 +886.4,471.3 +886.6,466 +889.4,462.6 +899,479.1 +898.5,476.6 +897.2,470.5 +897.7,468 +898.7,466.3 +896.4,466.4 +895.5,463.8 +888.2,499.1 +899.3,480.2 +890.2,497.5 +887.1,490.9 +887,486 +889.5,483.4 +893.6,485.2 +899.7,483.1 +899.8,481.9 +916,13 +914.4,7.3 +900.4,16.4 +910.5,11.5 +909.8,12.8 +911.5,58.5 +900.8,48.2 +913.9,48.4 +917.6,51.8 +915.5,53 +918.3,45 +912,75.5 +908.2,83.9 +905.9,93.8 +911.7,88 +915.9,91.9 +901.1,104.2 +909.6,114.5 +916.2,110.4 +914.1,135.2 +907.6,130.9 +903.2,125.4 +906.7,138.5 +907.3,137.5 +917,131.6 +916.8,132.3 +913,146.1 +916.2,140.6 +904.1,144 +910.8,140.4 +914.2,146.7 +917.8,164.2 +900.5,164.3 +905.6,192.1 +918.8,191.1 +918.8,192.8 +917.5,184.2 +900.7,201.6 +913.8,210.1 +919.4,212.7 +916.8,209.5 +909.5,235.4 +907.6,223.4 +904.2,232.6 +903.3,231.7 +902.5,239.7 +908,236.2 +906.2,234.6 +908.4,229.4 +911.4,229.9 +914.3,228.1 +911.2,231.4 +912.6,231.3 +912,234.7 +914.2,239.8 +919.4,239 +919.4,233.7 +916.6,232 +919.8,228.5 +918.9,225.2 +903.7,249.8 +904.5,246.4 +900.9,251.1 +902.1,251.4 +906.6,256.7 +909.2,251.7 +906.1,253.7 +907.5,245.3 +908.1,244.7 +905.2,241.3 +913.3,241.8 +914.5,240.5 +910.7,250.6 +911.7,253 +912.9,253.8 +914.6,253.7 +911.3,257.9 +917.5,256.1 +915.8,256 +919.2,253.1 +915.1,250.3 +918.3,246.6 +918.3,245.8 +916.9,245 +918.6,242.2 +918,241.3 +902.5,264.8 +904.2,262.6 +902.7,265.9 +909.1,262.5 +907.2,262.5 +908.2,260.4 +911.9,262.5 +912.8,278.2 +918.8,279 +918.7,276.3 +919,274.1 +918.5,271.7 +918.1,267.7 +904.2,283.6 +904.4,297.5 +908.6,295.7 +908.2,292.4 +905.8,294.8 +908.2,284.2 +913.7,285.2 +912,285.4 +917.8,299.4 +919.8,298.5 +919.3,296.5 +918.1,292.8 +915.6,288.4 +900.2,291.3 +904.8,314.2 +903,316.5 +904.5,317.4 +906.1,319.4 +906.6,318.9 +907.5,317.3 +908.3,315.8 +906.5,316 +907.1,313.7 +906.7,313.6 +909.4,313.5 +909.2,312.2 +909.1,311.1 +906.2,311.3 +905.2,305.9 +907.5,302 +905.7,302.5 +912.8,308.3 +914.1,307.4 +910.5,311.8 +911.4,314.2 +913.4,312.5 +913.7,316 +910.5,317.7 +912.5,315.4 +919.1,316.9 +917.6,317.1 +918.5,314.8 +915.7,311.9 +918.3,301.2 +901.1,320.7 +901,325.4 +900.7,335.8 +907.9,320.5 +906.7,320.9 +913.9,323.9 +918,335.2 +918.2,330 +915.2,331.4 +918.2,325 +919.5,323.6 +918.5,322 +919.7,321.3 +918.7,320.6 +916.4,322.6 +915.4,346.6 +912.2,350.8 +902.5,353 +904.4,351.4 +902.8,356.3 +900.7,358.3 +902.5,359.2 +904.8,358.1 +903.7,356.7 +903.7,356 +904.5,356.2 +906.5,356 +905.6,357.2 +906.2,351.3 +917.8,351.8 +901.2,360.8 +901.2,363 +903.9,363.7 +904.8,369 +908.8,376.2 +907.3,362.4 +906.7,363.4 +912.8,365.2 +917.4,370.8 +916.5,373 +916.1,364 +910.3,388.6 +900.5,384 +902.1,389.6 +902,389.3 +904,386.8 +904.4,399.5 +904.8,397.3 +904.9,396.6 +904,396.5 +908.8,399.9 +912.8,382.9 +913.5,381.6 +912.1,385.6 +910.3,386.3 +911.4,399.9 +917.3,384.3 +912.4,410.5 +916.5,406.1 +902.8,408.8 +901.6,413.6 +900.7,418.5 +910.2,401.7 +913.1,406.9 +911,414.4 +918.8,415.1 +916.7,411.3 +918.6,410.2 +914.2,437.3 +902.4,432.1 +902.2,436.1 +914.3,437.9 +904.6,449.4 +901.8,459.1 +908.4,455.4 +913.4,456.2 +918.6,455 +917.8,457.9 +915.2,443.4 +901.1,461.8 +901.2,462.9 +904.1,463.1 +900.3,473.4 +909.1,478.2 +909.4,477.1 +908.5,475.3 +906.8,479.2 +909.1,472.4 +910.5,471.7 +909.9,460.5 +914.4,460.6 +910.7,471 +910.8,476.4 +913,476 +919.3,460.9 +915.2,461.9 +901.7,483.8 +904.8,485.9 +903.7,493.8 +904,495.9 +901,498.8 +906.2,498.1 +905.2,495.6 +908.9,496.6 +909.5,492.9 +906.7,487.7 +905.7,486.2 +911.1,485.5 +912.3,492.2 +912.6,493.6 +910.4,495.2 +918.7,498.9 +915.3,485.2 +932.7,37.2 +927.1,41.2 +931.2,55.4 +925.5,76.8 +935.8,71.6 +922.4,99.8 +928.6,87.8 +933.7,83.8 +923.9,86.9 +924.8,95.9 +934.1,81.1 +920.3,117.5 +934.6,119.3 +923,114.1 +936.4,100.5 +925.1,116.3 +920.6,136.9 +933.2,129.5 +931.3,136.1 +921.6,157 +926.7,143.8 +939.1,159.4 +931.2,162.2 +937.4,176.1 +924.6,184.6 +925,198 +936.3,189.6 +938.2,186.7 +938.8,186.1 +920.3,211.4 +923,221.5 +924.7,231.4 +923.5,232.2 +920.4,232.5 +920.3,235.2 +923.2,235.7 +922.2,239.6 +927.3,221 +930.1,239.1 +936.5,254.8 +925.1,257.1 +929.5,242.5 +936.5,244.3 +937,240.1 +922,267 +924.8,271.6 +922.8,279.6 +921.6,279.9 +929.2,279.1 +926.1,264 +937.8,275 +938.9,272.3 +920.8,284.8 +923.6,283.6 +924.2,289.1 +922.7,287.5 +923.4,290.3 +921.9,292.4 +920.3,295.3 +924.9,295.9 +924.2,298 +924,299 +922.1,297.6 +921.6,298.4 +925.3,295.5 +926.1,295.3 +927.1,296.2 +925.4,292.5 +925.4,290.4 +929.4,292.5 +925.6,287.4 +925.2,286.3 +925.3,281 +927.7,280.8 +929.9,282.5 +930.7,285.3 +932.2,288.1 +932.2,290.2 +934,298.5 +937.5,299 +938.3,294.7 +938.6,282.8 +920.9,290.6 +926.3,290.1 +935.4,300.1 +921.2,304.8 +926.9,319.8 +927.2,316.1 +929.2,311.7 +929.6,307.5 +927,303.2 +927.7,301.5 +927.2,300.2 +928.7,301.3 +929,302.1 +928.6,303 +929.1,304.4 +931.4,300 +933,300.9 +932.2,303.8 +931,304.8 +930.1,302.9 +932.8,302.9 +934.8,309.8 +933.1,309 +930.3,307.4 +931.8,307.5 +930.8,311.1 +932.9,310.1 +934,312.6 +932.9,315 +930.1,313.7 +931.7,313.2 +935.2,315.6 +935.3,313.9 +936,313.1 +935.5,303 +938.5,303.1 +927.5,313.1 +932.8,334.4 +921.4,324.6 +921.2,326.3 +924.8,327.5 +924.1,326.4 +922.6,326.5 +928.3,332.1 +932.9,320.4 +933.7,325.3 +930.3,328.2 +936.5,325.3 +922.8,353.8 +925.5,358.9 +928.1,352.9 +929.3,359.1 +926.9,354.2 +930.7,353.6 +931.8,351.2 +933.7,354.6 +933.9,351.1 +930.2,355.1 +930.7,355.8 +934.2,358.9 +930.8,359.5 +931.4,356.5 +935.7,355.9 +935.1,353.2 +926.7,352 +923.7,367 +924.3,375.3 +929.7,360.3 +929.6,361.9 +931.1,361.5 +930.5,366 +931.4,368.2 +932.3,368.3 +933,378.9 +936,363 +922.5,390.1 +921.8,382.1 +929.8,394.7 +931.9,387 +931.3,394.5 +933.8,398.6 +935.3,397.9 +932.3,399.8 +930.5,409.9 +929.8,415.4 +928.9,419.2 +926.2,412.7 +928.2,413.9 +928.2,414.6 +926.2,404.7 +932.2,413.5 +922.4,426.5 +932.9,422.9 +923.3,421.2 +929.9,425.6 +926.4,424 +934.6,433.9 +933,433.9 +932.9,436.4 +937.1,434.5 +924.7,442.2 +923.5,458.1 +925.1,459.4 +927,451.5 +929.6,453.7 +931.2,451.8 +933.3,451.7 +937.7,459.4 +938.2,452.3 +921.5,463.4 +920.9,469.5 +922.6,465.2 +924.3,474.5 +922.8,475.9 +924.9,479.3 +927.8,475.5 +926.1,472.6 +927.9,473.1 +927,460.4 +935.4,464.9 +937.3,460.8 +939,493.4 +923,480.3 +925.9,498.8 +929.3,498.4 +928.9,486.9 +927.8,480.7 +928.5,482.9 +933,481.5 +938.4,493.1 +937.3,488.5 +935.8,481.9 +938.9,480.5 +957.4,5.1 +957.4,1.6 +949.3,22.5 +950.6,28.9 +950.3,29.9 +946.1,34 +948.4,23 +942.7,55.9 +944.3,40.4 +949.1,53.9 +953.9,51.3 +942.5,64.9 +945.3,61 +945.1,78.1 +954.3,76.9 +956.1,64.1 +940.8,81.6 +944.5,84.1 +948.8,84.4 +952.5,83.4 +954,81.1 +952.6,88.7 +940.7,109.1 +940.9,115.4 +953.1,119.1 +956.5,119.5 +943.2,130.4 +954.7,132.9 +955.7,131.4 +940.4,155.8 +953,143.5 +951.5,158.3 +945.4,164.5 +954.9,165.6 +956.8,165.8 +951.7,162.4 +955.4,164.6 +950.6,162.8 +943,183.4 +940.3,190.3 +956.1,180.1 +946.4,225.5 +947.3,227.3 +953.1,226.1 +954.2,229.2 +954,227.3 +958.1,239.8 +956.3,226.7 +951.7,249.4 +947.9,253 +949.9,252.9 +945.3,250.9 +949.3,244.6 +953.8,243.7 +954.8,245.5 +957.1,250.6 +950.4,251.5 +959.3,253.7 +956.4,252.6 +955.2,250.4 +956.7,250.1 +958.4,251.3 +959.6,245.3 +956.2,247.2 +957.5,248.8 +958.1,247.5 +956.7,244.2 +957.8,243.7 +953.2,274.8 +944.2,275.2 +944,276.7 +943.8,277.3 +949.6,277.9 +947.5,276.6 +946.5,276.8 +945.3,276.8 +949.5,274.4 +948.6,273.6 +949.5,271 +949.3,270 +948.1,270 +948.3,271.3 +947.1,270.8 +946,269.5 +947.5,268.6 +948.8,267 +945.2,263.5 +954.2,263 +953.4,266.6 +951,273.2 +950.2,274.7 +950.8,274.3 +954.1,273.6 +954.2,271.9 +954.9,271.3 +954.6,270 +953.4,270.1 +952.9,270.9 +954.8,273.9 +953.5,275.9 +952.5,279.2 +950.5,278 +950.9,277.5 +957.8,274.3 +957,274.2 +959.9,271.3 +959.7,270 +955.7,269.2 +959.5,275.7 +959.3,278.3 +946.6,280.2 +940.6,283.4 +942.4,282.4 +941,289.1 +942,292.6 +941.2,293.9 +943.8,294.5 +944.2,293 +942.1,297 +943.7,298.5 +947.1,296.5 +948.9,291.1 +948.4,292.1 +948.8,292.8 +950.4,280.8 +952.8,288.4 +952.4,293.5 +944.6,296.3 +940.3,303.4 +947.4,319.2 +957.3,317.7 +947.4,306.5 +945.7,307.8 +951.5,300.2 +951.4,301.2 +953.2,300.6 +953.9,307.9 +951.3,313.1 +953.5,315.3 +954.1,319.3 +953.3,318.6 +951.5,318.5 +955.6,318.4 +959.8,319.2 +955.3,315.2 +959.1,314.6 +959,314.1 +956.4,310.6 +955.6,310.9 +959,308.9 +945.6,321.9 +948.2,336.7 +949.4,339.8 +949.9,334.4 +946.3,331.5 +948.2,325.6 +950.3,326.1 +953.5,322.4 +950.3,321.2 +950.1,323.3 +952.1,323.3 +952.7,326.9 +951.3,327.2 +953.1,328 +951.8,329.5 +954.1,329 +953.7,328.3 +954.3,327.3 +952,332.3 +951.8,333.9 +952.3,332.4 +954.6,339.6 +957.4,332.2 +959.4,333 +958,325.5 +955.5,326.4 +957.4,321.4 +951.4,321.4 +940.3,345 +942.3,351 +942.4,351.5 +943.7,355 +947.2,355.2 +948.7,353.3 +948.3,351.8 +953.4,340.3 +941.9,356.5 +940.8,372 +948.8,376 +949.4,371 +949.4,369.2 +952,360.6 +954,366.2 +951,375.5 +950.5,375.7 +953,376.1 +956.2,377.9 +958.3,377.9 +958.9,374.6 +958.3,371.1 +955.7,374.3 +945.9,380.7 +943.9,389.4 +942.9,386.5 +953.2,383.1 +954.3,382.7 +952.8,389.7 +953.9,394.6 +953.8,393.2 +954.8,398 +954.8,399.3 +952.3,398 +956.7,388.1 +956.8,384.2 +957.2,384.2 +956.9,382.7 +957.8,380.4 +942.2,402.5 +952,404 +956.7,418.9 +955.9,416.1 +959.3,417.3 +945.3,426.4 +949.4,427.5 +950.5,425.2 +950.4,430.5 +951.4,433.8 +955.3,439.2 +955.3,432.4 +954.8,427.6 +942,446.3 +949.8,452.1 +951,444.5 +957.2,452.9 +943.3,460.5 +944.3,467.1 +940.3,472.8 +941.8,474.6 +946.8,462.6 +953.4,466.4 +955.8,479.5 +958.7,472.4 +955.4,463.2 +940.7,476.9 +941.2,479.4 +941.8,479.9 +944.7,479.3 +957.2,464.2 +950.4,480.3 +941.2,482.7 +941,488 +945.2,489.9 +942.5,486.2 +943.7,495.2 +948.2,491.2 +949.3,490.8 +946.3,487 +945.3,485.7 +952.2,490.3 +958.6,489.6 +964.3,6 +964.1,11.9 +964.3,17 +967.7,16 +965.3,12.1 +967.4,10.9 +965.8,6.4 +974,4.8 +973.2,5.6 +974.8,9 +974.8,15.3 +975.4,16.5 +976.3,15.8 +978,18 +976.8,12.5 +965.5,20.9 +973.3,32.8 +972.5,33.7 +971.7,32.3 +976.6,20.9 +973.6,53.6 +960.5,55.4 +977.5,85.4 +968.8,84.6 +976.5,96.9 +964.2,103.3 +977.2,115.9 +979.8,106.8 +975.8,102.3 +964.1,129.2 +966.1,134 +971.8,123.3 +973.8,127 +974.7,129.8 +972.1,130.5 +976.5,120.7 +964.4,145.2 +966.7,153.6 +966.4,141.9 +968.3,142.2 +974.4,143.6 +972.5,146.4 +973.5,145.6 +974.5,145.5 +973.7,147.9 +971.6,154 +974.7,156.6 +967.4,176.9 +962.2,162.1 +960.8,178.6 +965.8,162.6 +974.4,162.1 +974.6,174.6 +975.4,168.6 +979.8,169.5 +973.1,186.9 +974,190.3 +979.8,190.6 +978,194.1 +976,183.7 +968,205.4 +974.5,200.3 +974,209.8 +978.9,204.8 +977.4,200.1 +973.5,230.6 +964.9,245.2 +973.9,251.8 +966,257.9 +963.5,250.4 +964,248.1 +964.3,248 +966.3,248.8 +968.9,249.1 +968.3,249.1 +968,248.8 +967.7,248.2 +969.4,246.6 +966.1,249.6 +969.6,248.2 +968.8,264 +964.9,262 +962.3,267.1 +963.8,279 +968,275 +970.2,272.8 +966.5,272.3 +965.6,264.4 +973.9,270.2 +974,271.1 +973.2,271.6 +972.5,271.6 +971.6,272.8 +974.2,275.5 +970.9,275.6 +972.1,277.5 +975.2,273.1 +975.1,273.9 +975.4,270.5 +977.6,273.4 +978,272.7 +975.9,269.4 +961,276.8 +960.7,299.3 +960.9,286.1 +963.3,289.7 +964.8,297.5 +965.3,298 +963.5,294.8 +963.8,294.7 +969.2,297.7 +967.2,293.9 +968.8,289.9 +968,283.9 +968.6,281.1 +971.4,287.5 +970.2,287.6 +971.7,285.9 +973.2,285.5 +976.7,287.3 +971.4,290.5 +972.1,294.1 +974.5,293.7 +971.1,295 +971.2,296.1 +971.2,296.4 +973.2,296.2 +975.9,296.4 +976.5,293.4 +979.5,281.7 +963.4,283.4 +964.3,287.1 +973,287.4 +968.7,293.2 +978.2,295.7 +975.9,286.9 +964.2,316.5 +963.8,302.4 +960.6,304.6 +960.5,307.4 +960.4,308.6 +962.9,318 +965.3,318.9 +965.8,319.3 +968.8,318.8 +965.4,317 +969.4,318 +964.7,312.1 +969.8,310.5 +969.9,313 +966.9,309.6 +969.8,301.6 +972.1,319.7 +977,319.7 +978,314.7 +963.2,302.9 +978.1,301.7 +962.1,307.6 +960.6,318.9 +965.2,318.6 +979.8,301.7 +965.5,326 +961.3,322.6 +961.3,325.6 +962.5,327.5 +961.9,329.7 +960.8,328.1 +963.2,333 +963.1,335.2 +964.1,336.2 +964,337.2 +966,338.5 +962.7,339.3 +967.5,339.8 +967.4,331.4 +964.6,329.8 +972.5,322.2 +974,324 +974.5,321.7 +974,320.5 +976.1,337.7 +977.1,333 +979,329.1 +979.1,328.6 +975.1,326.1 +978.3,326.6 +977.8,325.5 +977.5,323.1 +964.8,321.5 +962.7,320.6 +962.6,342.7 +962,357.5 +966.1,356.1 +971.7,358 +965.4,342.6 +968.3,341 +972.3,340.8 +974.3,345.4 +974.7,353.8 +977.8,354.3 +975.5,345.1 +962.1,359.8 +960.6,359.3 +969.8,358.2 +968.9,351.5 +962.5,362.6 +961,364.1 +962.5,365.6 +964,366.8 +960.7,366.7 +960.5,373.1 +961.7,372.7 +962.5,374 +963.5,374.2 +965.6,375 +962.3,376.8 +963.3,378.3 +969.6,379.6 +968.3,376.5 +971.5,375.4 +969.2,375.9 +969.6,373.8 +966.3,371.9 +968.6,370.7 +969.8,371.1 +965.3,368.8 +965.3,365.6 +967,363.1 +968.1,360.1 +970.4,360.9 +974.3,360.2 +973.8,365.6 +971.8,365.4 +972.7,367.5 +971.7,369.2 +975,368.5 +974.1,366.7 +971,372 +971.7,373.2 +975.2,373.3 +974.4,377.9 +973.7,377.6 +979.4,375.8 +977.2,375.2 +978,372.9 +978.8,372.8 +978.7,371.5 +975.7,370.1 +977.7,369.4 +978.1,368 +978.6,366.3 +975.8,362.4 +974.1,368.7 +973.8,371.1 +973.8,379.2 +975.3,366.3 +979.9,366.6 +965,383.1 +963.1,381.5 +962.3,384.8 +962.8,394.6 +964.9,392.3 +960.7,399.5 +960.5,398.6 +971.2,391.9 +969.1,383 +968,380.4 +971.6,381.3 +974.4,386 +974.5,388.4 +971.7,390.2 +974.6,390.9 +971.8,391.8 +974,396.1 +977.9,390 +978.3,387.1 +978.8,387.2 +978.5,385.7 +976.3,385.7 +976.1,385.4 +976,387.9 +976.7,384.6 +979.8,383.6 +979.3,382.8 +979.5,380.3 +976.2,380.3 +976,381.9 +965.2,399.4 +966,397 +970.7,394.8 +970.8,395.1 +978.3,384.3 +963.8,401.2 +960.9,401.1 +961.8,401.5 +974.7,408.6 +978.8,401.5 +977.3,401.2 +976.2,404.3 +961.3,430.1 +963.2,433.5 +966.4,438.9 +962.4,440.6 +960.4,458.5 +974.6,452.7 +979.5,456.8 +978.8,450 +977,448 +977,462.9 +977.4,464.8 +961.6,479.6 +973.3,476.2 +965.2,489.1 +973.5,486 +973.5,489.2 +971,489.3 +978.2,494.8 +976.5,485 +961,492 +961.9,483.3 +966.2,495.2 +967.6,486.6 +992.1,5.2 +983.6,7.1 +981.8,12.2 +981.2,13.3 +982.8,18.6 +990.3,9.9 +990.8,38.1 +981,21.4 +981.5,30.2 +988.8,30.8 +991.9,23.4 +982.5,54.5 +982.8,55.9 +989,54 +984.1,75.9 +983.9,77.7 +993.3,75.9 +996.3,77.6 +989.2,91 +995.1,85.9 +982.2,104.1 +988.7,115.7 +985.5,112.5 +998,136.8 +980.9,123.5 +985.3,124.9 +981,124.6 +983.6,127 +983.8,130.2 +980.6,127.4 +984.8,133.1 +980.5,132.5 +982.5,138.5 +986.1,134.8 +985.8,120.7 +993.2,135.2 +984.2,128.8 +988.1,138.8 +993.6,133.5 +991,132 +999.7,141.7 +980.5,151.1 +981.4,152.5 +982,158.9 +986,151.7 +989.5,153.1 +988.9,153 +985.1,149.4 +985.6,149.1 +986.1,149.5 +986.6,148.8 +986.6,148.6 +986.2,147.4 +985.8,146.6 +993.2,159.6 +992.9,159.2 +990.4,159.6 +990.5,158.7 +995.2,159.4 +997.7,158.1 +998,159.5 +999.2,146.7 +983.5,162 +982.9,165.1 +983.7,166.7 +981.3,172.2 +984,171.5 +989.5,170.1 +986.4,168.6 +986.1,163.2 +988.7,163.9 +987.2,162.8 +994.5,161 +992.4,167.2 +990.2,171.5 +990.7,172.6 +996.5,176.7 +997.7,166.4 +997.2,161.2 +988.4,193.1 +980.4,198.9 +989.5,186.5 +987.5,181.2 +989.3,183.5 +990.4,218.7 +982.3,205.1 +980.4,212.1 +981.3,211.3 +997.7,218.1 +998,219.3 +998.2,214.3 +997,214.5 +983.7,229.4 +992.3,230 +997,220.4 +984.2,239.8 +989.5,232.9 +992.7,236.4 +999.8,223.1 +990.9,233.2 +983.8,255.1 +983.6,255.6 +986.5,258.7 +985.9,257.4 +987.4,254 +987.5,253.5 +997,250.4 +995.7,268.8 +984.8,276.4 +985.4,268.5 +986.6,266.6 +986,264.5 +986.7,261.4 +994.7,263.1 +995.4,269.5 +994.7,279.1 +996,276 +996.6,276.6 +998,271.2 +995.3,265.4 +998.4,269.5 +983.3,275.2 +992.8,269.2 +983.6,280.3 +983.8,292.8 +980.8,295.5 +980.7,295.2 +980.7,296.8 +984,299.6 +983.2,298.9 +982.8,297.5 +986,299.8 +989.8,297.4 +986.1,293.5 +988.8,288.5 +992,282.4 +994.7,289 +991.9,289.1 +993.7,290.3 +992,296.1 +999.9,295.4 +999.4,291.1 +999.3,282 +993.8,307.5 +983.2,300.5 +984.5,301.8 +985,302.5 +984.1,304.6 +981.3,305.5 +984.3,307.8 +981.4,308.1 +987.3,319.3 +985.4,306.7 +989.2,307 +986.9,304.8 +986.6,302.6 +989.3,304.9 +987.8,303 +991.5,310.1 +995.2,311.1 +995.7,305.1 +996.2,305.5 +999.6,301.6 +981.9,321.9 +980.2,326.5 +984.9,331.5 +986.2,339.8 +987,335.2 +987.4,330.5 +986.8,329.6 +985.7,328.7 +986.4,326.2 +986.4,323.9 +986.9,320.6 +988.1,323.7 +994.5,321.5 +994.2,330.9 +990.4,332.2 +994.9,336.2 +993,339.4 +990.6,339.4 +998.9,339.1 +998.7,330.6 +997.4,331.5 +996.1,321.8 +989.2,344.6 +985.1,344.9 +982,348.1 +983.9,351 +984.5,352.5 +984,353.9 +989.3,358.7 +988.2,350.8 +989.9,345.1 +989.7,348.8 +985.5,343.9 +986,341.5 +988.8,341 +989.7,343.3 +993.8,340.4 +993.2,343.6 +990.5,342.4 +994.6,359.4 +996.9,357.1 +996.2,351 +999,353.4 +996.2,345.2 +997.7,346.3 +995.5,340.5 +997,342.6 +980.7,361.8 +980.7,365.7 +980.8,368 +980.8,371.5 +983.7,372.3 +984.9,379.1 +983.5,379.5 +983.2,379.4 +980.4,379.5 +985.8,378.5 +992.2,364.8 +990.5,362.8 +990.6,374.5 +993.7,376.4 +991.5,379.3 +991,378.4 +990.3,378.8 +996.6,377.9 +996.5,371.2 +997.5,367.6 +998.7,366 +996.6,363 +998.4,364.2 +987.9,391.5 +986.9,395.7 +982.8,380.8 +984,381.1 +984.4,380.9 +984.8,381 +984.6,381.3 +983.7,382.9 +984.5,383.7 +983.7,384.4 +980.8,382.3 +981.2,385.2 +980.9,385.7 +981.6,386.9 +984.2,386.6 +984.5,385.5 +984,388.7 +983.7,388 +982.1,388.5 +980.9,390.6 +983.3,396.8 +981.5,399.7 +986.1,391.4 +985.7,388 +986.3,386.9 +985.1,385.2 +988.5,388.7 +987.8,388.5 +987.3,389.5 +986.9,383.2 +986,383.2 +987.4,382.2 +986.7,382 +988.5,380.8 +989,382.9 +987.6,383.7 +988.9,385.1 +988.1,383 +993.7,381.5 +994.2,384.5 +991.9,386.2 +994.7,385.7 +993.8,386.7 +993.2,388.3 +990.6,389 +991.8,388 +994.2,391.2 +993.2,393.3 +995.7,395.8 +998.3,390.9 +996.7,386.8 +995.4,386.2 +996.7,385.1 +997.1,386.8 +995.6,384 +996.7,381.8 +990.9,407 +984.3,401.5 +986.8,416 +988.2,418.6 +989.2,404.9 +987,403.5 +992.6,402.8 +991.2,405.9 +991.9,405.5 +994,414.7 +995.5,410.9 +998.1,414.5 +997,409.8 +996.5,408.4 +997.2,406.3 +995.5,400.2 +991.6,431.8 +982.3,426.7 +986.4,438.1 +987.3,427.9 +986.8,421.5 +993.1,421.5 +993.7,422.9 +990.9,427.7 +992.2,429 +994.2,434.8 +992.6,438.3 +997.9,435 +984.6,448.9 +980.5,448.5 +980.7,450.5 +984.7,451.1 +982.8,453.9 +989.6,459.5 +986.1,451.6 +986.5,450.4 +988.4,454.6 +985.4,446.9 +987.8,448 +989.3,449.8 +991.2,443.4 +993.5,450 +993.7,453.3 +991.4,455.8 +980.3,468 +983,461.7 +983,467.8 +980.2,471.5 +983.6,472.6 +986.3,475.4 +986.7,475.5 +988.7,471.5 +996.9,465 +982,483.5 +983,498.8 +980.8,499.6 +987,493.2 +991.2,484.6 +990.2,485.9 +993,489.3 +176.1,215.5 diff --git a/newt/data/banana_X_train b/newt/data/banana_X_train new file mode 100644 index 0000000..089cc3c --- /dev/null +++ b/newt/data/banana_X_train @@ -0,0 +1,400 @@ +6.589057299999999673e-01,-9.850710900000000381e-01 +1.958616400000000035e-01,1.270151299999999983e+00 +-1.416592199999999968e+00,-3.268785200000000057e-01 +1.823589600000000033e+00,1.430471999999999966e+00 +-4.872555100000000028e-01,-5.356469099999999761e-01 +2.345759900000000120e-01,1.483795900000000056e+00 +1.378726899999999977e+00,1.312957100000000099e-01 +-1.444892800000000088e+00,-7.462710899999999703e-02 +-1.630115000000000036e-01,-3.978232999999999908e-01 +1.252929799999999982e+00,-3.219923100000000038e-01 +1.331716799999999923e+00,-8.022561400000000065e-02 +-1.797805199999999992e-01,5.947293000000000163e-01 +-1.285663199999999895e+00,-7.544733999999999607e-01 +4.085632599999999837e-01,1.445537199999999967e+00 +-1.452814599999999956e+00,-1.250770699999999902e+00 +-1.315223100000000089e+00,9.947845499999999896e-01 +1.022658000000000067e+00,-7.284184000000000214e-01 +1.717776399999999981e+00,2.911119699999999838e-02 +-1.781950399999999934e+00,-6.867617400000000094e-01 +-9.660203000000000539e-01,-6.276028000000000162e-01 +-1.574562199999999912e+00,3.777993800000000180e-01 +1.115956600000000076e+00,-6.592506299999999486e-01 +2.348831900000000056e-02,-1.798704700000000045e-01 +-1.174768899999999894e+00,1.563695999999999975e-01 +-1.050130900000000034e-01,-4.424531099999999828e-01 +-2.562212600000000062e-01,-1.151159900000000014e-01 +4.137094599999999733e-01,1.157424199999999903e+00 +6.172945899999999764e-01,-9.597238699999999928e-03 +-1.373886399999999952e+00,6.483221599999999807e-01 +7.735301199999999877e-01,6.167366100000000184e-01 +2.292945500000000081e+00,1.316948800000000031e+00 +5.011798299999999928e-01,5.526289399999999991e-02 +9.812914999999999832e-01,5.316238100000000300e-01 +1.410627700000000040e-01,9.466890099999999975e-02 +4.357881799999999833e-02,1.258857600000000063e-02 +-1.842819599999999891e+00,-1.641373299999999924e+00 +-1.223293699999999928e+00,-1.185190799999999989e-01 +-1.164427300000000054e+00,4.101544200000000195e-01 +1.248372100000000096e+00,-2.627893799999999908e-03 +1.031111399999999900e+00,1.293032799999999982e+00 +7.727592500000000086e-01,-7.466186599999999896e-01 +-2.973833400000000168e-02,-1.581664600000000087e+00 +2.171569100000000085e-01,-1.209161099999999989e+00 +-1.056898899999999974e+00,1.031038099999999957e+00 +-1.932576800000000095e+00,-9.204139599999999755e-01 +5.977166000000000423e-01,8.554826300000000217e-01 +-1.694521200000000061e+00,-1.270982000000000056e+00 +8.222126800000000291e-01,1.918468899999999921e-01 +-5.394306399999999613e-01,1.405422399999999961e+00 +1.678395799999999900e-03,2.623303499999999899e-01 +-7.423452099999999776e-01,-1.022048800000000091e+00 +-1.306072200000000016e+00,-9.028140599999999738e-01 +8.778118799999999888e-01,-1.380551899999999943e+00 +4.647794100000000039e-01,-1.257964599999999988e+00 +-1.106384800000000057e+00,-8.212915600000000316e-01 +2.407824600000000093e+00,1.262246399999999991e+00 +-1.564962100000000023e+00,-1.718958700000000062e-01 +-8.545233399999999646e-01,-1.051301299999999994e+00 +5.193268399999999719e-04,2.821983399999999920e-01 +-1.884685799999999967e+00,-9.791979600000000339e-01 +-8.949546000000000445e-01,-1.066691599999999962e+00 +2.353867500000000057e-01,7.720871200000000156e-01 +1.867734199999999900e+00,-1.157898899999999953e+00 +7.347609200000000396e-01,1.628912799999999939e+00 +7.367929700000000470e-02,1.237730799999999937e-01 +-3.369706900000000172e-01,1.853280999999999956e+00 +-1.997380099999999936e-01,1.735570400000000069e+00 +1.204798299999999989e+00,-2.128500100000000061e-01 +3.816324600000000067e-01,1.748179800000000117e-01 +1.029305399999999926e+00,-1.911264199999999913e-01 +-7.392441899999999677e-01,1.324546599999999907e+00 +9.518155499999999547e-01,6.155185300000000082e-01 +-1.567189399999999955e+00,-2.048274300000000048e-01 +7.740081500000000059e-01,-1.220426599999999917e+00 +1.212393800000000077e+00,-1.236448500000000061e+00 +1.096689400000000036e+00,1.312375500000000084e-01 +3.973670000000000258e-01,5.126427999999999541e-01 +1.069430399999999892e+00,-1.381901400000000057e+00 +-1.374491899999999989e+00,-2.049564299999999950e-01 +-3.297977699999999901e-01,8.292726400000000053e-02 +-7.180142800000000047e-01,-1.254153599999999980e+00 +-6.756865199999999572e-01,1.300465399999999994e+00 +4.336309899999999939e-01,2.194080200000000147e+00 +-8.820720800000000361e-01,-5.819773999999999781e-01 +1.163381699999999963e+00,-7.682681999999999567e-01 +5.719143700000000052e-01,-1.241801300000000108e+00 +-7.568276200000000342e-01,2.983929900000000246e-01 +6.701902899999999663e-01,1.151917700000000044e+00 +3.011988700000000144e-02,-8.944401100000000104e-01 +1.219755900000000004e+00,1.345779800000000082e+00 +-6.846416300000000010e-01,9.893230499999999816e-01 +-3.768005899999999908e-01,-6.169640899999999650e-01 +1.297272100000000095e-01,1.412756600000000029e+00 +-1.094874099999999961e+00,1.393531099999999912e+00 +1.147976799999999908e+00,-1.137021099999999896e+00 +6.479018199999999617e-01,1.406239699999999981e+00 +9.809346199999999794e-01,9.206187100000000345e-02 +3.181982199999999761e-01,1.395211800000000002e+00 +-5.006173499999999610e-01,7.866341699999999937e-02 +-1.124847500000000000e+00,4.939230399999999799e-01 +-1.813804600000000100e+00,-9.935396899999999754e-01 +-7.484379199999999788e-01,-3.399938500000000002e-02 +5.442299600000000126e-01,1.410526399999999958e+00 +-9.443831400000000098e-01,1.017614800000000042e+00 +-5.325497999999999621e-01,1.216976999999999975e+00 +-4.459116900000000272e-01,-1.275572799999999951e+00 +4.976746699999999857e-01,-1.023484499999999908e+00 +-9.543475900000000234e-01,-1.190910700000000100e+00 +1.649782800000000105e+00,4.041520400000000318e-02 +-2.029068899999999953e+00,3.591198700000000077e-01 +-2.352718299999999874e-01,-1.216284099999999979e+00 +6.270806500000000172e-01,-6.287912199999999563e-02 +-1.931790099999999955e+00,5.253025900000000137e-01 +2.347317400000000109e+00,1.136318699999999904e+00 +2.333266099999999899e-01,-2.747742699999999871e-01 +-7.807989699999999811e-01,-1.132378799999999991e-01 +-3.270540499999999851e-01,6.115863200000000166e-01 +-1.407638399999999956e+00,1.087934200000000073e+00 +4.891460100000000200e-02,-1.364299399999999940e+00 +2.526953499999999853e-01,-1.304696599999999984e+00 +-1.245954999999999924e+00,-6.564092000000000260e-01 +-1.585734900000000058e+00,3.224793100000000190e-01 +1.988021999999999956e+00,1.180537699999999912e+00 +2.203745399999999854e+00,1.256686600000000098e+00 +-1.718598899999999874e-01,-1.313433099999999909e+00 +2.759918299999999936e-01,3.974040300000000192e-01 +9.489019499999999940e-01,4.223767399999999728e-01 +1.064003400000000044e+00,8.423188999999999815e-01 +1.979872899999999936e+00,1.274078299999999997e+00 +-2.791571500000000206e-01,-9.276780099999999696e-02 +2.749118700000000026e-01,1.176506100000000110e+00 +-3.176566299999999954e-01,3.172154300000000204e-01 +-1.315506199999999959e+00,-8.802628099999999789e-02 +-6.953306000000000209e-01,-7.359447999999999546e-01 +9.158905200000000413e-01,1.630146700000000060e+00 +9.743190400000000251e-01,5.121348199999999906e-01 +-5.667159899999999473e-01,-1.698974699999999949e+00 +1.376813800000000088e+00,7.485285000000000410e-01 +5.214431800000000061e-01,-1.042015700000000100e+00 +1.134897400000000056e+00,4.287649199999999938e-01 +-5.111806399999999639e-01,1.496393000000000084e+00 +1.457946299999999917e+00,-9.101734000000000213e-01 +1.669039799999999962e+00,8.769250400000000445e-02 +-9.649074699999999893e-01,-9.782779999999999809e-01 +-2.000090199999999818e+00,-1.724718900000000055e+00 +-1.431995499999999977e+00,-1.023520399999999997e+00 +7.385516300000000145e-01,1.392493499999999940e-01 +3.690601099999999968e-01,4.900270600000000143e-01 +-1.636443099999999928e+00,-1.796066099999999999e+00 +-1.312605299999999975e+00,-4.982927899999999855e-01 +4.400203300000000151e-01,9.002257799999999754e-01 +-1.593721100000000002e+00,-1.356794899999999915e+00 +-7.238595100000000393e-01,2.154671200000000120e-01 +1.325518299999999927e+00,1.426732200000000006e+00 +-9.099958599999999898e-01,-6.071261300000000416e-01 +1.186452999999999980e+00,9.972611000000000114e-01 +-9.141385499999999942e-01,4.246167499999999873e-01 +1.114387599999999923e+00,3.748486799999999897e-01 +-1.461770699999999978e+00,-2.978280500000000108e-01 +-3.275840799999999997e-01,2.315836900000000087e-01 +9.406169500000000072e-01,-7.889827799999999947e-01 +-1.823873999999999995e+00,8.850274099999999589e-01 +9.491587499999999678e-02,-1.559466100000000077e+00 +8.743679500000000049e-01,-7.686894699999999858e-01 +-9.668240500000000459e-01,5.923083500000000390e-01 +8.032911400000000146e-01,1.354565999999999937e+00 +-5.213501000000000385e-01,7.262533899999999987e-01 +-7.596841199999999628e-01,-6.977184300000000006e-01 +-2.251748899999999942e+00,-1.962996100000000022e+00 +-7.492357200000000494e-01,1.008951299999999961e-02 +-6.261164300000000155e-03,-9.593575699999999928e-01 +1.008553000000000033e+00,-9.838846599999999665e-01 +1.983796899999999974e-01,-1.188232699999999920e+00 +5.438491500000000034e-01,1.634342600000000090e+00 +-2.129320899999999905e-01,2.588329799999999903e-01 +-7.290366400000000136e-01,1.368189700000000064e+00 +1.118591099999999977e-01,2.853862899999999939e-02 +-9.682294999999999652e-01,-9.115852100000000346e-01 +-5.659486299999999526e-01,-1.329923900000000048e+00 +-2.713112699999999933e-01,1.588610999999999995e+00 +1.559259900000000032e+00,-3.672827599999999859e-01 +8.971609799999999968e-01,1.071291999999999911e+00 +1.385655000000000081e+00,-4.979415099999999761e-01 +9.677016099999999343e-02,-2.254040200000000105e-01 +3.738382199999999989e-01,9.661258999999999819e-01 +-7.911598300000000084e-01,-1.255014599999999980e+00 +6.150616199999999756e-01,5.575504000000000016e-01 +-3.908945600000000020e-01,1.812746399999999869e-01 +-1.531393100000000007e-01,-1.200733700000000015e+00 +2.980384099999999759e-01,5.091629600000000533e-01 +5.572330199999999678e-01,-1.036433699999999902e+00 +-1.090596399999999994e-01,-1.067219600000000046e+00 +4.123011799999999893e-01,-1.236379600000000051e-01 +-1.551934300000000100e+00,-1.468650500000000081e+00 +1.904261100000000040e+00,1.268644300000000058e+00 +8.376922900000000061e-01,-1.153615900000000000e-01 +2.430914299999999972e-01,1.614185600000000109e+00 +4.798740499999999964e-01,-1.102450799999999953e+00 +8.543364199999999853e-01,-5.374806199999999645e-01 +-2.939569700000000188e-03,5.129387499999999989e-01 +1.343970200000000004e+00,8.780624099999999599e-01 +8.650076799999999871e-03,-9.175870600000000099e-01 +1.821634999999999893e+00,-6.430730000000000057e-01 +-2.124317900000000092e-01,-1.180141799999999908e+00 +8.225284000000000484e-01,-1.205206400000000011e+00 +-8.350524099999999672e-01,-1.060197700000000021e+00 +-5.357524999999999649e-01,1.047561900000000046e+00 +-1.530057199999999895e+00,-1.208064800000000050e+00 +3.520430900000000030e-01,3.326098499999999847e-01 +-1.092217400000000005e+00,5.313280000000000225e-01 +7.090473299999999757e-01,8.716419699999999882e-01 +1.544045499999999960e+00,1.752175000000000038e+00 +-1.870284099999999894e+00,-5.554441199999999856e-01 +-2.822578099999999979e-01,1.782443000000000000e+00 +4.284739599999999871e-01,-4.939487499999999920e-01 +9.231434200000000478e-01,9.328246399999999827e-01 +-1.208175900000000080e+00,-4.090078899999999851e-01 +-1.314715700000000043e+00,1.754388500000000073e-01 +-7.526302700000000456e-01,-3.213854099999999825e-01 +6.138766100000000447e-01,-1.464895800000000081e+00 +-5.982624699999999907e-01,-1.351439300000000010e+00 +-1.150486600000000026e+00,-4.976118000000000208e-02 +1.600778999999999952e+00,-1.362585099999999994e+00 +-9.022864499999999621e-01,-3.599877599999999900e-01 +-6.602350999999999637e-01,-7.082208700000000579e-02 +2.573285199999999939e+00,9.886675299999999611e-01 +-1.127372199999999935e+00,-6.340390799999999494e-02 +-1.007361299999999904e+00,-3.968323100000000214e-01 +1.433772000000000046e+00,-1.205295299999999958e+00 +1.001900000000000013e+00,2.539818500000000090e-01 +-1.029400599999999999e-01,-4.144374299999999955e-01 +-9.690746399999999872e-01,1.024335900000000077e+00 +1.093897099999999956e+00,1.026310999999999973e+00 +1.077468300000000045e+00,1.630505100000000096e-02 +1.439467599999999958e+00,-1.701349000000000000e+00 +-2.021488000000000174e+00,-7.917858399999999630e-01 +1.169300500000000076e+00,5.105467799999999778e-01 +4.034095800000000315e-02,-1.505532899999999952e+00 +9.307848899999999759e-01,7.834486400000000295e-01 +8.238188899999999704e-01,5.866570100000000343e-01 +-9.567614800000000252e-02,-1.055013200000000095e+00 +1.052796899999999924e+00,4.715053900000000242e-01 +-2.682880300000000107e-01,-1.520081599999999922e+00 +1.346687399999999979e+00,2.290646100000000018e-01 +-1.399771300000000052e+00,4.749794099999999908e-01 +-1.337785900000000083e+00,-4.187185399999999724e-01 +-5.225960699999999681e-01,-1.305665599999999982e+00 +1.184773999999999994e+00,1.487817699999999910e+00 +5.658372200000000296e-01,8.153223299999999563e-02 +3.402758200000000066e-01,-1.343877699999999953e+00 +5.777885600000000066e-01,-6.219213000000000102e-01 +7.533608800000000105e-01,1.888209500000000096e+00 +-1.843004699999999940e-01,-3.136109900000000339e-02 +5.599262999999999879e-01,1.696589200000000019e+00 +1.071365300000000076e+00,4.567340899999999815e-01 +-6.127885100000000085e-01,7.752049600000000540e-01 +8.193394400000000299e-01,4.623918899999999998e-02 +1.111361899999999903e+00,-6.282070200000000604e-02 +-1.611153599999999964e+00,-1.131371700000000091e+00 +1.063133399999999895e+00,-5.309965299999999666e-02 +7.637157399999999763e-01,-8.042571400000000370e-01 +2.262534800000000068e-01,-1.384561600000000059e+00 +-1.185682600000000031e+00,1.010274099999999953e+00 +1.246527699999999905e+00,-1.183727900000000055e-01 +-1.763410200000000039e+00,-1.168714599999999937e+00 +-2.344177400000000133e-01,-6.561110300000000395e-01 +-1.451553099999999930e+00,-1.290066099999999993e+00 +2.336311600000000044e-01,-8.222062300000000379e-01 +-1.840579699999999930e+00,-8.652922400000000458e-01 +-1.625205199999999905e+00,-1.316205400000000081e+00 +-7.150060400000000094e-01,-9.689811799999999975e-01 +9.727302000000000171e-02,-1.329824600000000023e+00 +-5.509805199999999736e-01,2.875500799999999857e-01 +1.386527800000000088e+00,-1.303580200000000085e-02 +2.148528100000000052e-01,-1.659049999999999914e+00 +2.237088699999999902e-02,1.429698600000000042e+00 +-3.384107700000000274e-01,-1.328681800000000024e+00 +-6.927330400000000221e-01,-1.128778500000000018e-01 +5.332371799999999912e-02,-1.018350000000000088e+00 +-1.531453200000000014e-01,-1.223317800000000011e+00 +-1.178229900000000052e+00,3.023280999999999885e-02 +1.145664699999999980e+00,3.292338300000000051e-01 +1.305047700000000033e+00,-1.400636299999999945e+00 +-2.845874999999999932e-01,1.983507300000000084e+00 +1.055446200000000001e+00,-5.533659099999999748e-02 +-9.035523500000000041e-01,-2.025655399999999884e-01 +6.611600699999999886e-01,-1.989674200000000059e-01 +8.963936300000000523e-01,-1.273064299999999927e+00 +-1.542863099999999932e+00,-6.176848099999999731e-01 +1.289730899999999902e+00,-6.567315099999999628e-01 +-1.668154099999999973e+00,-1.498244399999999921e+00 +-1.357762200000000030e+00,-8.483887199999999851e-01 +1.984750800000000037e+00,-2.522597799999999960e-02 +4.850419999999999732e-01,6.349993099999999835e-01 +-6.743546099999999655e-01,8.416586400000000134e-01 +7.218927699999999614e-01,-9.754704699999999784e-01 +1.019719400000000054e+00,-6.246073199999999659e-01 +1.255851800000000074e+00,-1.807248700000000097e-01 +-6.580226399999999920e-01,-1.306758700000000051e+00 +-4.051604400000000106e-01,-1.253013500000000002e+00 +1.266628400000000099e+00,4.934014899999999981e-01 +9.332715000000000316e-01,-2.769830600000000032e-01 +-8.147536499999999959e-01,-6.817346300000000081e-01 +-1.514960000000000084e+00,-7.689848200000000134e-01 +1.173707199999999978e-01,-1.068793799999999905e+00 +3.843473000000000028e-01,2.154406700000000008e+00 +-1.318883100000000086e+00,1.098079500000000097e+00 +1.195037200000000022e+00,-1.651005499999999904e+00 +5.285954400000000136e-02,1.520457699999999912e+00 +1.457810899999999910e+00,3.728388400000000324e-02 +-6.066974200000000428e-01,-1.597386600000000101e+00 +-1.061883499999999980e+00,-5.486636299999999578e-01 +-1.393185600000000024e+00,8.620422899999999888e-01 +5.269345399999999513e-01,5.098330599999999491e-01 +1.154260700000000028e+00,-4.510613500000000275e-01 +-1.710500500000000090e-01,1.046478500000000075e-02 +-1.634494799999999914e+00,5.241812300000000535e-01 +2.201459499999999903e+00,-6.649900499999999992e-01 +8.261589099999999686e-01,1.418818200000000029e+00 +-1.384275600000000050e-01,-5.236241900000000032e-02 +-7.541366299999999745e-01,1.191150200000000048e+00 +5.736512500000000292e-02,-1.336480999999999919e-01 +8.766016599999999492e-01,1.606494400000000045e-01 +1.165034899999999984e+00,4.460492200000000240e-01 +6.471197199999999539e-01,3.166952000000000100e-01 +-1.734075500000000103e+00,-2.422062399999999893e-01 +6.309289799999999726e-01,-6.737876900000000502e-01 +-9.734964200000000289e-01,-4.697260999999999798e-02 +-3.999630699999999761e-01,4.627099800000000242e-03 +-1.144411300000000020e+00,-1.265178600000000042e+00 +4.204600399999999794e-01,1.095552199999999976e+00 +4.300973399999999947e-01,-6.543422500000000142e-01 +1.485385799999999978e+00,3.651501500000000067e-01 +-6.425376099999999813e-01,1.684609599999999929e+00 +6.977409300000000369e-01,-6.550936400000000326e-01 +-2.224567199999999967e-01,2.465339800000000192e+00 +-1.215753499999999931e+00,-9.609964699999999638e-02 +-2.757251199999999902e-01,1.051365099999999941e+00 +-2.529648099999999844e-01,-4.061394700000000024e-01 +-1.376160199999999945e+00,6.782206400000000412e-01 +-1.127801000000000053e+00,-4.428126700000000193e-01 +1.304293900000000006e+00,1.754493799999999881e-01 +1.638180399999999981e+00,1.462639000000000022e+00 +1.479798200000000064e+00,1.776150200000000012e+00 +-1.607299800000000056e+00,-3.626086000000000031e-01 +-1.520326000000000066e+00,-1.227423799999999954e+00 +-6.566546600000000566e-02,-1.188828500000000066e+00 +-6.530627900000000041e-01,4.328000200000000075e-01 +2.483717700000000139e+00,1.205914300000000106e+00 +-9.589512000000000036e-01,1.076567199999999946e+00 +8.418334200000000545e-01,-3.718270200000000081e-01 +-4.022811700000000212e-01,-1.113043900000000086e+00 +-2.908761000000000263e-01,5.608643800000000512e-01 +-1.016084200000000104e+00,-2.235546499999999936e-01 +4.500774599999999848e-01,-1.037773200000000062e-01 +-1.883057299999999934e+00,-4.541202599999999978e-01 +-5.816139299999999457e-01,-7.522939600000000393e-01 +-5.939624000000000015e-01,1.243667800000000101e+00 +-1.493000499999999953e+00,-3.248604799999999827e-03 +5.524547699999999839e-01,1.042958800000000075e+00 +1.064861899999999917e+00,4.427904400000000074e-01 +4.064504999999999924e-01,-7.070751100000000067e-01 +1.147923900000000108e+00,4.361150099999999696e-02 +6.118196500000000482e-01,1.373169599999999990e+00 +1.098179199999999911e+00,5.076150499999999566e-01 +1.097792300000000054e+00,6.040636099999999731e-01 +-7.530634400000000284e-01,1.179346800000000028e+00 +-1.126014899999999930e+00,4.105307100000000209e-01 +-8.721999799999999858e-01,2.597796600000000233e-01 +-4.146016900000000227e-01,2.561624900000000205e-01 +8.011884900000000309e-01,-6.390880500000000186e-01 +7.619477999999999529e-01,1.249359699999999934e-01 +3.431077999999999906e-02,-1.179340300000000008e+00 +5.239864899999999714e-01,-2.306302899999999878e-01 +-2.623103699999999872e-02,-1.178430199999999983e+00 +-1.465807099999999918e+00,-3.583728299999999756e-02 +1.285417800000000055e+00,1.515852400000000100e-01 +-8.509160799999999636e-01,1.889333599999999946e-01 +-9.614535699999999796e-01,1.001211599999999979e+00 +-7.759772700000000523e-01,1.270411499999999916e-01 +-6.735439100000000234e-01,-6.204733300000000451e-01 +2.440462499999999924e-01,-6.902335800000000132e-01 +-9.738216100000000042e-01,5.110741199999999651e-01 +3.696439899999999779e-01,1.033722599999999936e+00 +-1.976750800000000030e+00,-8.798290399999999511e-01 +3.079200799999999849e-01,1.174691600000000030e-01 +-1.623225899999999999e+00,4.684939900000000268e-01 +-8.425139099999999770e-01,-1.079739599999999911e+00 +1.003917399999999960e+00,1.172986499999999932e+00 +-9.542117100000000463e-01,6.867008000000000001e-01 +7.677991600000000361e-01,-9.659891699999999526e-01 +1.635265100000000027e+00,1.115119299999999924e+00 +-1.704592399999999897e+00,-5.535632900000000411e-01 +4.038231999999999933e-01,9.985467499999999344e-03 +7.039547999999999917e-01,-9.057749000000000494e-01 +3.345825800000000183e-01,1.394055599999999950e+00 +-1.701715700000000053e+00,-5.690908000000000078e-01 +2.642808800000000069e+00,1.144779400000000003e+00 +7.692568699999999815e-01,7.715549699999999511e-01 +-2.545677800000000213e-01,-1.419916499999999970e-01 diff --git a/newt/data/banana_Y_train b/newt/data/banana_Y_train new file mode 100644 index 0000000..8e81223 --- /dev/null +++ b/newt/data/banana_Y_train @@ -0,0 +1,400 @@ +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +1.000000000000000000e+00 +0.000000000000000000e+00 +1.000000000000000000e+00 diff --git a/newt/data/banana_large.csv b/newt/data/banana_large.csv new file mode 100644 index 0000000..1f0032b --- /dev/null +++ b/newt/data/banana_large.csv @@ -0,0 +1,5301 @@ +At1,At2,Class +1.14,-0.114,-1 +-1.52,-1.15,1 +-1.05,0.72,-1 +-0.916,0.397,1 +-1.09,0.437,1 +-0.584,0.0937,1 +1.83,0.452,-1 +-1.25,-0.286,1 +1.7,1.21,1 +-0.482,-0.485,1 +1.79,-0.459,-1 +-0.122,-0.808,-1 +0.0809,1.93,1 +-0.541,-0.332,1 +-1.02,0.619,-1 +-0.768,-1.04,-1 +-1.69,-0.0461,1 +1.26,1.21,1 +0.724,0.989,-1 +0.444,1.99,-1 +-1.01,-1.36,-1 +-0.863,0.496,1 +1.16,-0.458,1 +-0.595,-0.651,1 +-0.77,0.364,1 +-0.871,-0.825,1 +0.996,-1.7,1 +1.28,0.691,-1 +0.925,0.895,-1 +-0.687,-1.29,-1 +1.74,0.964,1 +1.18,-0.335,1 +2.52,1.43,1 +1.71,-0.044,-1 +0.271,-0.591,1 +1.12,0.626,-1 +1.3,0.196,-1 +-1.59,-0.68,1 +0.408,0.0673,1 +1.13,1.48,-1 +0.763,0.921,-1 +-1.41,1.11,-1 +-0.75,-0.881,-1 +1.16,0.978,-1 +1.13,0.405,-1 +-0.522,-1.34,-1 +-1.41,0.894,-1 +0.00902,-0.434,1 +-2.14,-1.43,1 +-1.31,1.25,-1 +0.041,-1.13,-1 +0.0483,0.866,-1 +-2.11,0.193,-1 +0.522,1.46,-1 +0.0284,1.62,-1 +0.396,-0.606,1 +0.536,0.921,-1 +0.315,-0.182,1 +-0.123,-1.07,-1 +0.526,1.48,-1 +0.00665,0.0118,1 +-0.352,-0.49,1 +-0.0701,-1.23,-1 +-0.149,-1.2,-1 +0.785,0.0481,1 +-1.62,0.593,-1 +-0.0314,-1.01,-1 +-0.285,-1.1,-1 +1.33,1.51,1 +1.09,-1.37,1 +-0.223,-1.28,-1 +-0.0341,-1.07,-1 +1.22,1.13,-1 +-1.67,-1.26,1 +1.97,-0.772,-1 +-0.508,-0.715,1 +0.603,-0.108,1 +-0.323,-0.213,1 +-0.124,-1.12,-1 +-0.439,-0.961,-1 +0.201,0.0024,1 +0.812,0.708,-1 +0.888,0.817,-1 +0.0238,0.0836,1 +0.415,-1,-1 +-0.308,2.19,1 +0.767,-0.248,1 +1.23,-1.2,1 +1.33,1.63,1 +0.27,0.0491,1 +-1.69,-1.87,1 +0.495,-0.281,1 +-0.519,-0.799,-1 +-1.99,0.549,-1 +1.36,-0.732,1 +-1.03,0.654,-1 +0.431,-1.33,-1 +-0.0583,-1.15,-1 +-0.209,0.345,1 +1.26,-1.37,1 +-1.78,-0.378,1 +0.98,-0.0439,1 +-0.0536,1.6,-1 +-1.39,-0.451,1 +1.22,-0.361,1 +1.22,0.561,-1 +-0.838,0.356,1 +-0.446,-0.861,-1 +1.17,-1.39,1 +-0.0868,-1.33,-1 +-1.12,0.576,-1 +-0.28,-1.3,-1 +-0.0269,0.958,1 +-0.697,1.35,-1 +1.38,-1.74,1 +0.408,1.16,-1 +1.2,1.54,1 +2.07,1.02,1 +-0.462,-0.187,1 +1.27,0.589,-1 +-0.101,-0.765,-1 +-0.819,1.26,-1 +0.615,-0.0228,1 +-1.79,-0.98,1 +-1.79,-0.954,1 +0.826,1.5,1 +0.845,0.766,-1 +-0.476,-1.49,-1 +1.01,0.48,-1 +1.39,-0.376,-1 +0.357,-1.07,-1 +0.77,1.4,-1 +-1.08,0.114,1 +-0.795,-1.43,-1 +0.706,1.38,1 +-1.26,0.23,1 +-0.833,-0.569,-1 +-0.0303,2.11,-1 +-0.223,-0.419,1 +-0.562,-0.873,-1 +0.926,0.972,-1 +-1.86,-1.57,1 +-0.806,0.941,-1 +-0.214,0.38,1 +0.744,1.42,1 +1.94,-0.65,-1 +1.77,1.35,1 +-0.903,0.101,1 +-0.0418,-1.02,-1 +0.309,-0.175,1 +-0.634,-0.969,-1 +-1.62,0.102,-1 +-1.21,1.27,-1 +0.529,0.133,-1 +0.386,1.55,-1 +-0.0962,0.19,1 +-0.536,1.13,-1 +1.02,-0.261,1 +-0.876,-1,-1 +2.35,1.21,1 +-0.982,-1.08,-1 +-1.16,0.469,1 +-1.74,0.693,-1 +-1.1,1.16,-1 +0.0179,-0.7,1 +-0.62,-0.984,-1 +-1.52,0.66,-1 +1.07,-0.985,1 +0.917,0.525,-1 +-0.0482,1.52,-1 +1.49,1.55,1 +-0.584,1.01,-1 +-0.0625,-0.12,1 +-1.68,1.08,-1 +-1.77,-0.923,1 +-0.202,0.712,1 +1.06,-0.427,1 +1.71,1.1,1 +-1.7,0.872,-1 +1.86,1.14,1 +-0.529,-0.357,1 +-0.411,-1.3,-1 +0.666,1.36,1 +0.162,-0.791,-1 +-0.296,0.255,1 +-0.0347,1.3,-1 +0.294,-1.5,-1 +-0.52,-1.17,-1 +0.741,-1.2,1 +-1.33,1.08,-1 +-0.0987,0.966,1 +0.0131,-0.969,-1 +0.896,1.58,1 +-0.412,0.492,1 +0.683,1.07,-1 +-0.753,-0.0615,1 +-0.0674,1.85,-1 +1.08,0.683,-1 +1.15,0.243,-1 +0.497,-0.671,1 +0.174,1.92,-1 +1.64,0.0477,-1 +-0.478,-0.796,-1 +-0.447,-1,-1 +-1.04,-0.2,1 +2.06,-0.482,-1 +-0.89,-1.17,-1 +0.312,-1.23,-1 +0.976,-1.39,1 +0.291,-0.795,-1 +0.303,-0.143,1 +1.1,-0.695,1 +0.579,-1.02,1 +-1.54,0.685,-1 +0.916,0.144,1 +1.18,0.614,-1 +0.254,-0.862,-1 +-0.877,-0.401,1 +1.97,0.99,1 +0.471,1.39,-1 +1.63,0.475,-1 +0.945,0.309,-1 +0.121,-1.3,-1 +1.06,-0.69,1 +1.42,-0.32,1 +0.535,0.504,1 +0.912,-0.114,-1 +-0.75,1.14,-1 +1.14,-0.209,1 +-1.03,-1.09,-1 +0.268,-1.08,1 +-1.77,-0.828,1 +0.378,-0.0609,1 +1.19,0.113,1 +-0.168,1.32,-1 +2.07,1.21,1 +-1.07,0.707,-1 +-1.19,0.309,-1 +1.19,0.697,-1 +0.121,1.72,-1 +1.23,-0.0582,1 +0.44,0.399,1 +-0.723,0.633,-1 +-0.63,0.949,-1 +0.143,0.263,1 +1.23,-0.615,1 +1.9,-0.0565,-1 +1.22,-0.0172,-1 +0.0107,0.0258,1 +0.36,-0.715,1 +0.139,-1.01,-1 +0.308,-1.06,-1 +-0.645,-0.238,1 +1.15,0.715,-1 +-0.531,-1.27,-1 +1.05,0.701,-1 +0.0681,-0.76,-1 +-0.595,1.46,-1 +0.72,0.989,-1 +-1.86,-0.963,1 +0.0692,-0.828,-1 +0.863,0.765,-1 +-0.574,1.32,-1 +-0.467,-1.51,-1 +1.43,0.484,-1 +-0.592,-1.17,-1 +0.122,-0.041,1 +0.909,0.118,-1 +-0.705,-0.93,-1 +1.05,0.453,-1 +-0.786,-1.33,-1 +0.07,1.7,-1 +-0.36,-1.28,-1 +-1.25,0.985,-1 +1.33,1.42,1 +0.778,-0.694,1 +-1.87,-1.26,1 +0.178,1.49,-1 +-1.03,1.18,-1 +-0.956,-0.818,1 +0.0345,-0.691,-1 +-1.25,0.512,1 +-0.187,-1.38,-1 +-1.24,1,-1 +0.329,-0.37,1 +-1.37,-0.803,1 +0.529,1.62,-1 +-0.23,1.08,-1 +-0.619,-0.748,-1 +-0.914,1.07,-1 +0.163,-1.31,-1 +1.19,-1.34,1 +0.983,-1.31,1 +-1.63,-1.41,1 +-1.02,-0.296,1 +-0.387,0.26,1 +-1.59,-1,1 +1.69,-0.724,-1 +-0.869,-0.0393,1 +-1.9,0.106,1 +-1.75,0.722,-1 +-1.06,-1.07,-1 +-0.509,-1.41,-1 +-0.112,-0.841,1 +1.08,0.79,-1 +0.498,0.567,-1 +-0.796,-1.63,-1 +-0.3,-1.04,-1 +-2.34,0.246,-1 +0.842,-1.41,1 +-0.0263,-1.35,-1 +0.586,1.15,-1 +1.87,-0.239,-1 +0.404,-0.868,-1 +0.564,-1.13,1 +-0.0584,0.54,1 +-1.58,-1.26,1 +-0.813,-0.57,1 +-0.579,-0.748,1 +-1.35,1.32,-1 +-0.639,-0.938,-1 +1.49,-0.00779,-1 +0.719,0.0781,1 +0.0346,1.7,-1 +-0.274,0.0468,1 +0.207,0.0304,1 +1.63,-0.251,-1 +-0.207,-0.745,-1 +-0.717,1,-1 +1.17,-0.567,1 +0.661,1.69,-1 +-1.13,0.6,-1 +-0.109,-1.25,-1 +0.68,0.702,-1 +0.867,0.81,-1 +-0.562,-1.01,-1 +1.79,-0.102,-1 +-1.87,-1.1,1 +-0.946,-0.197,1 +0.812,-0.195,1 +0.287,-0.517,1 +0.262,-0.673,-1 +-0.358,-1.02,-1 +-0.392,-1.27,-1 +-1.2,-1.02,-1 +-0.337,-1.45,-1 +-1.54,-0.643,1 +0.311,1.76,-1 +-0.473,-0.749,-1 +1.71,0.0217,-1 +1.88,-0.368,-1 +-1.18,-0.0978,1 +0.0431,-0.0199,1 +1.23,0.354,-1 +0.0788,-0.0998,1 +0.0544,-1.59,-1 +-0.24,1.72,1 +-0.496,-1.09,-1 +-0.636,1.35,-1 +1.15,0.634,-1 +1.29,-1.43,1 +-0.983,1.06,-1 +-0.206,-0.474,1 +0.298,-0.829,1 +-0.972,-1.06,-1 +1.21,0.354,-1 +0.328,1.71,1 +0.313,-1.07,-1 +-1.24,-0.352,1 +-1.39,-0.68,1 +1.2,-1.56,1 +-0.208,0.332,1 +0.553,1.54,-1 +0.354,-1.19,-1 +-0.74,-1.53,-1 +-0.769,1.35,-1 +-0.0381,-0.791,-1 +-0.102,-0.13,1 +-1.06,0.496,1 +-0.481,1.09,-1 +0.866,-0.917,1 +1.06,0.266,-1 +1.73,1.68,1 +-0.516,-0.935,-1 +-0.893,-1.27,-1 +1.2,-0.948,1 +0.763,1.15,-1 +0.794,0.762,-1 +-0.314,0.0194,1 +-0.456,1.55,-1 +1.04,0.563,-1 +-0.0978,-0.424,1 +1.37,-0.643,-1 +-0.201,0.0726,1 +-1.97,0.611,-1 +0.99,-0.0762,1 +1.28,0.902,-1 +0.569,1.62,1 +-1.2,-0.451,1 +-0.508,1.48,-1 +-1.69,-0.175,-1 +-0.298,-1.26,-1 +0.301,-1.17,-1 +0.975,1.48,-1 +-0.853,-0.254,1 +0.837,1.75,1 +0.379,1.36,-1 +-0.364,-0.0828,1 +0.939,-0.351,1 +1.24,0.554,-1 +1.37,-0.331,1 +-0.48,-0.913,-1 +1.34,-0.261,-1 +0.286,1.23,-1 +0.672,2.03,-1 +-0.52,-1.52,-1 +-0.513,1.29,-1 +-0.648,-0.631,-1 +-0.113,-0.197,1 +0.103,0.329,1 +-0.941,0.976,-1 +-0.533,-0.402,1 +1.09,-1.59,1 +-0.395,-0.756,-1 +1.46,-0.417,1 +-0.724,-1.26,-1 +-1.59,-0.98,1 +0.159,1.51,-1 +-0.843,1.4,-1 +-0.581,-1.27,-1 +-0.268,-1.26,-1 +-1.58,-0.842,1 +-0.52,1.2,-1 +-0.0203,1.92,1 +-1.52,0.934,-1 +1.05,0.444,-1 +0.351,-0.0402,1 +-0.322,-1.12,-1 +-0.654,-0.999,-1 +0.165,-1.58,-1 +2.6,0.951,1 +1.52,-0.0653,-1 +-1.29,-0.529,1 +1.18,0.392,-1 +-0.00264,-1.02,-1 +1.24,0.273,-1 +1.45,0.398,-1 +0.54,-1.26,-1 +-0.324,-1.03,-1 +-1.69,-1.2,1 +-0.455,0.868,-1 +2.16,1.16,1 +-0.232,-1.4,-1 +0.685,1.69,-1 +-0.0537,-1.52,-1 +1.03,0.548,-1 +-0.758,-1.21,-1 +-0.259,0.496,1 +-0.472,0.411,1 +-0.127,-1.33,-1 +-0.784,-1.05,-1 +-1.73,-1.32,1 +-1.24,-0.919,1 +0.275,-0.987,-1 +1.66,0.672,-1 +-1.07,0.963,-1 +-0.262,1.28,-1 +-1.4,-0.37,1 +-0.288,0.305,1 +0.778,1.44,1 +1.23,-0.493,-1 +-1.27,-0.273,1 +-0.994,1.14,-1 +0.74,1.2,1 +0.489,0.904,-1 +0.805,-0.237,1 +-1.86,-1.47,1 +-0.617,-1.02,-1 +-0.66,0.965,-1 +0.487,-0.13,1 +-0.0508,-1.19,-1 +0.96,-0.522,1 +-1.21,0.778,-1 +0.149,1.98,-1 +-1.76,-0.903,1 +0.0486,-0.389,1 +-0.891,1,-1 +0.263,-1.48,-1 +1.21,1.28,-1 +-1.46,0.0829,1 +-1.04,1.19,-1 +-1.69,-0.542,1 +0.707,0.0524,1 +1.31,-0.254,-1 +-0.488,-0.566,1 +1.8,-0.811,-1 +1.84,1.18,1 +1.49,-0.156,-1 +-0.881,-1.59,-1 +0.529,0.788,1 +0.571,0.197,1 +-2.07,0.0318,-1 +-1.23,-0.308,1 +0.682,1.5,-1 +-1.33,0.0398,1 +-1.35,0.424,1 +0.307,-0.678,-1 +0.859,-0.952,1 +-0.672,0.683,1 +1.36,0.258,-1 +-0.37,1.48,-1 +-1.9,-1.1,1 +-1.38,-0.32,1 +0.234,0.958,-1 +-1.01,-0.771,1 +-0.739,0.578,1 +-0.107,-1.5,-1 +0.592,0.973,-1 +0.52,1.21,-1 +0.416,1.86,1 +-0.181,0.344,1 +1.27,-0.441,-1 +0.422,-0.934,1 +-0.704,-0.802,-1 +0.088,-1.25,-1 +0.952,0.427,-1 +1.39,-0.218,-1 +-1.08,-1.3,-1 +1.25,-0.35,-1 +0.183,-1.21,-1 +-0.0765,-1.12,-1 +-0.407,0.602,1 +0.975,0.809,-1 +-0.0552,0.882,1 +-0.946,1.02,-1 +1.11,-0.106,-1 +-0.794,1.07,-1 +-0.652,-1.1,-1 +-2.08,-1.78,1 +-0.631,-0.867,-1 +-0.856,-1.31,-1 +-0.215,-1.01,-1 +-1.12,0.154,1 +-1.1,0.135,1 +-0.134,-1.1,-1 +0.844,0.243,-1 +0.472,0.289,1 +1.64,0.174,-1 +-0.413,-0.0275,1 +-0.875,0.591,1 +-0.404,0.178,1 +-0.297,-0.467,1 +-0.698,-1.26,-1 +1.2,0.298,-1 +-0.346,-1.5,-1 +-1.14,-0.494,1 +-0.0242,-0.659,1 +-0.495,-1.2,-1 +-0.803,-0.782,-1 +-0.641,0.516,1 +-0.0874,-0.948,-1 +-1.04,0.679,-1 +0.701,0.0237,1 +-0.677,-1.41,-1 +-0.516,0.831,1 +-0.864,-1.56,-1 +0.863,-1.02,1 +-1.12,0.559,1 +-0.779,-1.14,-1 +-0.9,1.49,-1 +2.43,1.39,1 +-1.67,-1.73,1 +0.743,-1.18,1 +-1.52,-0.658,1 +1.28,-0.176,1 +1.58,-1.29,1 +0.61,0.488,-1 +-2.15,-0.858,1 +-0.0307,0.678,1 +-0.227,-1.19,-1 +0.642,1.65,1 +-1.58,-1.25,1 +0.373,1.48,-1 +0.333,-0.845,1 +-1.18,-0.758,1 +1.15,-1.52,1 +-0.601,-0.539,1 +-1.57,-1.74,1 +0.652,-0.646,1 +-1.43,1.26,-1 +0.71,0.0529,1 +1.18,0.585,-1 +-1.01,-1.27,1 +0.13,1.38,-1 +0.469,-0.321,1 +-1.08,0.72,-1 +0.987,0.858,-1 +1.57,-0.389,-1 +-0.411,0.727,1 +-0.72,0.189,1 +-0.00329,-0.0777,1 +-0.937,-0.086,1 +2.21,1.02,1 +-0.606,-0.997,-1 +1.4,0.377,-1 +0.37,0.312,1 +-1.41,-0.214,1 +-0.192,1.66,-1 +-0.0246,-0.141,1 +1.07,-0.0433,-1 +-0.477,-1.25,-1 +-1.35,-0.485,1 +0.996,0.225,-1 +-1.25,-1.28,-1 +-0.198,1.67,1 +-0.8,-0.282,1 +0.168,0.345,1 +0.851,1.46,-1 +1.45,-1.24,1 +-1.02,-1.18,-1 +-1.31,0.916,-1 +-0.457,-0.322,1 +0.0744,1.04,-1 +0.902,-1.08,1 +-0.904,0.953,-1 +-0.11,-0.911,-1 +1.01,-1.05,1 +0.441,-1.13,-1 +-0.287,-0.726,1 +0.205,-0.471,1 +-0.625,1.14,-1 +-0.623,-0.737,1 +-0.254,-0.932,-1 +-0.817,-1.19,-1 +-1.73,0.87,-1 +0.176,1.94,-1 +0.899,-0.235,1 +0.834,-0.584,1 +0.15,-0.256,1 +-1.23,-0.336,1 +0.866,-0.807,1 +1.07,0.0877,-1 +-0.256,1.52,-1 +2.34,-1.25,-1 +-0.374,-1.08,-1 +-1.16,0.381,-1 +-1.3,-1.17,1 +0.527,-0.251,1 +-0.748,-0.57,1 +1.02,0.997,-1 +0.105,0.103,1 +0.563,0.137,1 +0.317,-0.0233,1 +0.15,1.3,-1 +-1.31,0.00723,1 +1.42,-0.146,-1 +0.576,1.7,1 +-0.106,-0.316,1 +-1.2,0.932,-1 +-1.14,0.488,-1 +-0.151,1.66,-1 +-1.46,-0.909,1 +1.36,-0.425,-1 +1.41,0.83,-1 +0.176,-1.11,-1 +-0.526,1.26,-1 +0.552,-1.07,1 +-1.14,0.304,1 +-1.64,0.143,1 +-0.189,-1.24,-1 +0.775,0.0171,1 +-0.598,1.22,-1 +-1.71,-1.29,1 +1.52,0.409,-1 +0.548,-0.959,-1 +-0.636,-0.988,1 +-0.0723,-1.08,-1 +0.0949,-0.854,1 +0.726,0.289,-1 +1.43,0.147,-1 +-0.0238,4.07E-04,1 +0.939,1.69,1 +-1.29,0.511,-1 +0.324,1.68,1 +-0.506,-1.09,-1 +0.341,-1.32,-1 +-0.978,0.931,-1 +-1.31,-0.229,1 +1.29,0.677,-1 +1.01,-0.94,1 +0.0219,-0.388,1 +-0.647,-1.22,-1 +-0.31,-1.38,-1 +-0.146,0.132,1 +1.38,0.877,-1 +0.24,-1.14,-1 +-1.01,-1.53,-1 +1.21,-1.02,1 +-0.623,1.24,-1 +-1.42,0.83,-1 +-0.0908,-0.846,-1 +1.39,-0.872,1 +0.502,-0.536,1 +-0.046,1.51,-1 +-1.35,-1.19,1 +1.77,0.109,-1 +-0.166,-0.986,-1 +0.152,1.53,-1 +0.308,0.933,-1 +-1.11,0.439,1 +-1.87,-0.451,1 +-1.66,-0.735,1 +1.18,0.424,-1 +-0.109,-0.884,-1 +0.243,0.304,1 +1.04,0.816,-1 +0.514,1.27,-1 +-0.145,1.67,-1 +0.169,1.86,1 +-0.984,-0.807,-1 +0.379,-0.755,-1 +-1.11,0.655,1 +-0.918,1.13,-1 +-0.43,-0.722,-1 +-1.62,-1.85,1 +0.377,0.198,1 +-0.329,-0.858,-1 +0.125,2.16,-1 +-0.108,-1.3,-1 +-0.118,1.7,1 +0.826,0.744,-1 +-1.1,0.811,-1 +1.2,-0.598,1 +-1.89,-0.727,1 +1.45,0.229,-1 +0.833,-0.254,1 +-1.7,-0.845,1 +0.272,1.82,1 +0.179,1.81,-1 +-0.951,0.533,-1 +-1.07,0.534,1 +-0.874,0.922,-1 +-0.0715,1.41,-1 +0.195,-0.296,1 +0.373,-1.17,-1 +0.808,-0.831,1 +-0.504,1.03,-1 +1.39,0.262,-1 +1.12,0.862,-1 +0.755,0.22,1 +1.57,-0.114,-1 +1.32,1.36,1 +0.802,0.133,-1 +-0.558,0.572,1 +0.27,0.696,1 +-0.606,-0.546,1 +-0.556,-0.505,1 +-1.74,0.254,-1 +0.143,0.337,1 +1.19,-0.404,-1 +-0.16,-1.36,-1 +1.29,0.153,-1 +1.47,-0.312,1 +-0.853,-1.17,-1 +-0.377,0.607,1 +0.816,0.306,1 +1.16,-1.08,1 +0.378,-0.56,1 +-0.567,-1.14,-1 +0.934,0.258,1 +1.01,-0.743,1 +0.96,-0.9,1 +-0.249,0.0234,1 +1.45,-0.26,-1 +-0.497,1.07,-1 +0.827,0.755,-1 +0.604,0.374,-1 +2.51,1.11,1 +0.154,-0.653,1 +-0.264,-0.768,1 +-0.573,-0.789,1 +0.792,0.988,-1 +-0.0338,1.52,-1 +1.59,-0.522,-1 +-0.61,-0.306,1 +0.184,1.7,1 +0.0317,-0.821,1 +0.781,0.415,-1 +0.118,-1.37,-1 +-1.57,0.942,-1 +0.415,1.8,-1 +0.552,2.05,-1 +-1.21,-0.934,-1 +-2.14,-0.476,1 +0.0154,-1.41,-1 +-0.0789,-1.35,-1 +-0.397,1.32,-1 +-0.466,0.145,1 +0.238,-1.51,-1 +-0.433,-1.13,-1 +1.3,0.555,-1 +0.0991,-1.05,-1 +0.59,-0.796,1 +-0.235,-0.617,1 +0.796,0.429,1 +-1.97,-0.271,1 +0.284,2.37,-1 +-0.39,-0.209,1 +0.165,-0.405,1 +-0.424,2.01,-1 +-0.934,-1.58,-1 +0.218,-0.368,1 +-0.283,1.88,1 +-0.347,-1.5,-1 +-0.342,-1.31,-1 +0.313,0.189,1 +-0.341,-0.0358,1 +-0.00698,-1.15,-1 +-1.73,-1.3,1 +-0.451,0.775,1 +-1.62,-0.712,1 +-0.862,-0.0613,1 +-0.828,-0.839,-1 +-1.01,1.07,-1 +1.02,0.858,-1 +-0.255,0.387,1 +0.118,0.13,1 +-0.786,-0.64,1 +-0.0895,1.65,1 +2.46,1.23,1 +-0.803,-0.705,-1 +-0.618,-1.2,-1 +0.713,1.02,-1 +0.976,-1.53,1 +1.47,1.21,1 +-1.97,-0.674,1 +-0.012,1.42,-1 +0.748,-0.558,1 +-1.45,-0.203,1 +0.199,-0.86,-1 +-1.68,-0.51,1 +0.56,0.491,-1 +0.216,-1.12,-1 +-0.232,-0.843,1 +1.66,1.42,1 +0.301,2.01,-1 +1.33,0.825,-1 +1.18,0.08,-1 +-1.59,0.138,-1 +1.78,-0.508,-1 +1.37,-1.52,1 +0.802,0.431,1 +0.913,-0.318,1 +0.00564,-1.14,-1 +-1.08,1.31,-1 +0.379,1.37,-1 +-0.106,-0.933,-1 +0.854,-0.827,1 +-1.47,-0.212,1 +1.24,-1.33,1 +-0.314,-0.984,-1 +-0.474,-0.911,-1 +0.659,-0.057,1 +1.83,0.349,-1 +1.51,-1.15,1 +-0.237,-1.4,-1 +0.016,-1.13,-1 +0.26,1.91,-1 +-0.104,-1.09,-1 +-0.724,-1.48,-1 +1.24,-0.145,-1 +-0.776,-1.26,-1 +1.16,0.0489,-1 +1.05,-2.08,1 +0.143,1.05,-1 +-1.42,-1.51,1 +1.21,0.642,-1 +0.551,1.48,-1 +0.191,1.72,-1 +0.345,-0.891,-1 +-0.396,-1.38,-1 +-1.59,-0.766,1 +0.903,-1.17,1 +1.57,-0.0908,-1 +1.18,0.913,-1 +-0.323,-0.264,1 +-0.302,-1.22,-1 +-1.26,-0.296,1 +0.874,-1.41,1 +-0.885,1.05,-1 +-1.52,-0.74,1 +-0.716,-0.194,1 +-0.579,-0.397,1 +-1.52,0.388,-1 +1.57,-0.566,-1 +-0.702,-1.2,-1 +-0.366,-0.882,-1 +1.45,-1.78,1 +-1.22,0.831,-1 +1.39,-1.11,1 +0.969,0.921,-1 +0.659,1.07,-1 +-0.238,0.126,1 +-0.576,-0.463,1 +0.171,-0.886,-1 +0.893,0.345,-1 +-1.49,0.885,-1 +0.258,0.309,1 +-0.609,-0.917,-1 +1.05,0.651,-1 +-0.827,-0.506,1 +0.374,-1.39,-1 +-0.784,0.969,-1 +1.08,1.42,1 +0.146,-0.151,1 +-1.93,0.603,-1 +-0.734,-1.09,-1 +-1.19,-0.529,1 +0.0176,0.0932,1 +1.72,-0.448,-1 +1.37,0.625,-1 +1.3,0.186,-1 +-0.0618,0.358,1 +-0.929,-1.39,-1 +1.14,-0.0771,-1 +-0.85,-0.92,-1 +-1.78,-1.49,1 +-1.86,-0.977,1 +-0.699,-5.78E-04,1 +-0.995,-1.39,-1 +-0.233,0.593,1 +-0.415,1.48,-1 +-1.66,-0.792,1 +-0.152,-0.669,-1 +-0.673,0.313,1 +-1.24,-0.72,1 +0.73,-0.37,-1 +-1.25,0.104,1 +1.85,1.38,1 +0.134,-0.922,-1 +0.546,-0.195,1 +0.604,1.03,-1 +-1.13,-1.08,1 +-1.13,0.193,1 +-1.62,1.02,-1 +-1.07,-1.11,1 +-0.443,1.86,-1 +0.0438,0.536,1 +-0.262,-1.2,-1 +0.156,0.152,1 +-0.311,0.995,1 +0.639,1.2,-1 +-1.42,-0.36,1 +-1.43,0.707,-1 +1.52,-1.34,1 +-1.91,-2.24,1 +0.132,2,-1 +2.06,1.12,1 +0.512,-1.34,-1 +-0.138,-0.951,-1 +1.15,0.824,-1 +0.233,-1.29,-1 +-0.124,-1.27,-1 +0.0584,-0.638,1 +0.486,0.245,1 +-0.0227,0.163,1 +1.15,1.16,1 +0.834,1.24,-1 +-2.03,-1.65,1 +0.0205,1.23,-1 +0.522,1.36,1 +-0.801,-0.24,1 +-0.178,-0.238,1 +1.16,1.05,-1 +-1.94,-1.03,1 +1.45,0.273,-1 +-1.07,1.16,-1 +0.413,-1.3,-1 +1.32,0.0928,-1 +0.322,1.68,1 +-0.292,-1.32,-1 +0.206,1.19,-1 +-0.639,-1.37,-1 +0.407,1.67,1 +0.144,1.56,-1 +-1.58,0.588,-1 +-1.58,0.807,-1 +0.683,-0.0347,1 +-0.668,-1.13,-1 +-2.05,-1.4,1 +-0.679,-0.0957,1 +-0.924,1.45,-1 +-0.547,-1.31,-1 +0.0296,1.2,-1 +2.23,0.895,1 +-0.56,-1.41,-1 +-0.558,-1.67,-1 +0.0598,-1.26,-1 +0.51,-1.15,-1 +-0.122,-0.0671,1 +-0.502,-1.04,-1 +1.41,-0.808,1 +-0.68,0.314,1 +-0.347,0.574,1 +-0.155,1.29,-1 +1.18,0.0819,-1 +1.8,-1,-1 +-0.653,-0.734,-1 +1.3,0.648,-1 +0.265,-0.182,1 +0.896,0.36,1 +-0.945,-1.43,-1 +-1.26,-1.01,1 +-1.65,-0.723,1 +-0.103,-1.71,-1 +1.87,1.19,1 +0.839,-0.0819,1 +0.167,0.87,-1 +-0.917,-0.863,-1 +1.26,-0.912,1 +-0.786,-1.02,-1 +2.81,1.3,1 +0.725,-0.54,1 +-0.709,-0.262,1 +0.56,-1.05,-1 +-1,0.078,1 +1.18,-0.0513,-1 +-1.29,0.891,-1 +0.126,-0.865,-1 +1.31,-0.238,-1 +1.62,-0.919,-1 +0.918,0.82,-1 +2.17,1.24,1 +0.433,0.38,1 +0.211,-1.28,-1 +1.18,-0.0335,-1 +-1.32,-0.403,1 +-0.0936,-1.28,-1 +-0.812,-0.411,1 +0.933,1.21,-1 +0.642,0.909,-1 +1.16,1.48,1 +-0.54,-0.601,1 +-1.48,-0.682,1 +1.41,-1.48,1 +-1.63,-1.16,1 +0.663,1.7,-1 +0.0329,1.88,-1 +-1.08,-0.0594,1 +0.981,-0.0572,-1 +-0.334,0.18,1 +-0.477,0.676,1 +1.12,-0.756,1 +1.36,0.482,-1 +0.123,-1.62,-1 +-0.259,0.996,-1 +-0.0662,-0.479,1 +-1.4,0.553,-1 +0.1,-1.48,-1 +1.37,0.428,-1 +-0.32,0.707,1 +1.76,1.33,1 +-0.254,0.403,1 +0.636,1.34,1 +-0.437,-0.984,-1 +-0.804,0.396,1 +0.685,2.2,-1 +1.89,0.834,1 +-0.615,-1.36,-1 +0.387,1.71,-1 +-0.238,-1.49,-1 +-1.03,-0.422,1 +0.656,0.844,-1 +0.713,2.04,1 +1.26,1.72,-1 +-0.745,0.987,-1 +-1.44,-0.358,1 +0.127,1.22,-1 +1.79,1.5,1 +-0.27,0.0117,1 +0.418,-1.12,-1 +2.17,1.45,1 +1.66,1.3,1 +2.12,1.1,1 +-0.252,0.623,1 +0.16,-1.4,-1 +1.27,0.301,-1 +-0.93,0.758,-1 +-0.966,0.487,1 +0.0236,-1.07,-1 +0.517,-0.931,-1 +-1.95,-0.9,1 +0.213,-0.752,-1 +-0.377,-0.973,-1 +0.395,0.423,1 +-0.76,-0.862,-1 +-0.947,0.445,1 +0.112,0.238,1 +1.24,0.342,-1 +-0.322,-0.356,1 +-1.15,0.587,-1 +0.923,0.246,-1 +-0.339,0.806,1 +-0.711,0.0759,1 +0.157,-0.934,-1 +0.235,-1.05,-1 +-0.111,-1.33,-1 +-0.651,0.369,1 +0.527,1.18,-1 +1.76,1.48,1 +0.0386,-1.26,-1 +0.584,-0.212,1 +-1.02,-1.01,-1 +-1.2,-0.248,1 +-0.824,0.858,-1 +0.191,1.94,-1 +-0.259,1.34,-1 +-1.22,1.12,-1 +1.11,0.57,-1 +0.073,-0.00169,1 +-0.0281,-0.323,1 +0.48,-1.15,-1 +0.0967,-1.4,-1 +0.375,1.24,-1 +-1.63,0.101,-1 +0.02,-1.54,-1 +0.592,1.15,-1 +0.0071,-0.924,-1 +0.784,0.972,-1 +1.33,0.335,-1 +-1.42,-0.0747,1 +-0.795,1.17,-1 +-0.0247,-1.5,-1 +0.904,1.04,-1 +0.237,0.0367,1 +0.604,-0.709,1 +-0.948,-0.982,-1 +0.686,-0.19,1 +1.2,-0.754,1 +-1.59,0.763,-1 +0.255,0.412,-1 +1.78,1.35,1 +-0.629,-0.118,1 +0.0428,-1.04,-1 +0.973,0.457,-1 +-0.639,1.28,-1 +1.23,-0.00847,-1 +2.64,0.966,1 +0.323,-0.796,1 +-1.12,-1.39,-1 +-1.14,0.693,-1 +0.0328,1.22,-1 +0.0496,-1.05,-1 +-2.11,-1.45,1 +-1.28,0.213,1 +-0.577,0.206,1 +2.71,1.35,1 +0.121,1.63,-1 +-0.0275,-1.42,-1 +-1.15,0.406,1 +-0.204,-1.54,-1 +-0.299,-1.14,-1 +0.216,1.96,1 +-0.567,-1.14,-1 +-0.236,-1.17,-1 +1.22,-0.0466,-1 +-0.852,-0.59,1 +-1.06,0.917,-1 +0.595,1.02,-1 +1.05,0.768,-1 +0.996,1.04,-1 +1,1.69,1 +0.799,0.995,-1 +0.725,1.39,-1 +0.782,1.8,-1 +1.93,1.34,1 +-0.791,-1.43,-1 +1.33,-0.486,1 +1.64,-0.605,-1 +1.13,0.33,-1 +-1.58,0.456,-1 +-0.0767,1.68E-04,1 +-1.87,-1.17,1 +-0.892,1.25,-1 +0.785,0.173,-1 +1.04,0.779,-1 +-0.693,0.953,-1 +0.682,-0.146,-1 +-0.167,1.07,-1 +-0.621,-1.12,-1 +0.133,-0.227,1 +0.871,1.29,-1 +-0.719,-0.384,1 +0.0379,-1.64,-1 +1.57,1.43,1 +0.314,1.84,-1 +1.68,-0.293,-1 +-0.656,-0.334,1 +0.551,0.921,-1 +-1.52,0.946,-1 +-0.156,-1.16,-1 +0.0652,-0.766,-1 +-1.15,0.771,-1 +1.07,-0.221,1 +0.535,1.4,-1 +-0.0692,1.73,1 +2.36,1.33,1 +-0.599,-0.965,-1 +-0.903,-0.0822,1 +-1.6,0.682,-1 +0.271,-0.0382,1 +0.838,0.603,-1 +1.34,-0.689,-1 +0.657,1.05,-1 +-1.52,0.506,-1 +-1.21,0.488,-1 +-0.112,1.85,1 +0.74,1.53,1 +0.451,-0.00833,1 +-1.6,0.632,-1 +-2.1,-0.859,1 +-1.37,0.645,-1 +0.52,1.25,-1 +-0.198,-0.536,1 +1.54,1.24,1 +-0.421,-0.731,-1 +1.1,0.253,-1 +0.734,-0.212,1 +-2.12,-0.575,1 +-0.0531,-1.26,-1 +0.835,-0.0137,-1 +1.1,0.604,-1 +-0.505,1.17,-1 +-0.524,-0.857,-1 +-1.89,-0.799,1 +-0.975,-0.182,1 +0.616,-1.07,1 +-0.362,-1.7,-1 +0.611,0.649,-1 +-1.41,-1.02,1 +-0.614,-1.43,-1 +-0.286,-0.571,1 +-0.697,-1.69,-1 +-0.859,0.885,-1 +0.526,1.36,1 +-0.251,0.846,1 +2.01,-0.695,-1 +-1.51,-0.593,1 +-0.155,-1.03,-1 +-1.8,0.529,-1 +-0.162,-0.0223,1 +-0.628,-0.267,1 +-0.746,-0.93,-1 +-0.61,0.00888,1 +-0.0339,-1.66,-1 +0.935,0.556,-1 +1.01,0.829,-1 +-2.77,0.0962,-1 +1.26,0.0903,-1 +-0.638,-1.01,-1 +0.552,0.961,-1 +-0.239,-0.848,-1 +0.275,-0.202,1 +-1.83,0.601,-1 +-0.507,1.24,-1 +0.908,-0.506,1 +1.16,0.703,-1 +-0.751,-1.37,-1 +-0.628,0.601,1 +-0.5,-0.0595,1 +-0.889,-0.884,-1 +-0.341,0.169,1 +0.0554,-0.139,1 +-0.455,-1.3,-1 +-0.116,0.0815,1 +-0.71,-0.874,1 +1.35,-0.25,-1 +1.51,1.59,1 +1.67,-0.978,-1 +-0.116,-1.36,-1 +-1.95,-0.16,1 +1.75,-1.03,-1 +2.36,1.5,1 +1.24,0.828,-1 +-0.526,1.19,-1 +-0.0866,-1.37,-1 +-1.46,-0.891,1 +0.637,0.00536,1 +0.384,-0.314,1 +0.431,0.266,-1 +1.35,0.644,-1 +-0.744,0.196,1 +-0.45,-1.59,-1 +1.31,1.07,-1 +-0.696,-1.2,-1 +0.553,-0.817,1 +-0.532,-0.179,1 +-0.861,-0.862,-1 +-1.25,-0.897,1 +-0.631,-0.914,-1 +0.524,-0.648,1 +0.791,1.69,1 +-0.501,1.1,-1 +0.768,-0.917,1 +1.29,1.07,-1 +1.24,0.00235,-1 +0.811,2,-1 +1.18,1.25,1 +0.32,-0.465,1 +-1.27,-0.704,1 +-0.869,1.38,-1 +-0.468,0.245,1 +1.44,-1.52,1 +-1.62,-1.16,1 +1.89,1.1,1 +0.284,2.31,-1 +0.0066,-0.994,-1 +-1.81,-0.643,1 +-1.19,-1.03,-1 +-0.747,0.468,1 +1.19,-0.966,1 +0.748,1.81,1 +0.225,1.34,-1 +0.899,0.538,-1 +0.904,0.195,-1 +-1.25,0.479,-1 +0.74,1.35,1 +-0.798,-1.21,-1 +0.371,-0.589,1 +1.11,0.0482,-1 +-0.27,-1.28,-1 +-0.369,-0.984,-1 +-0.752,-0.319,1 +-0.538,-1.15,-1 +-0.509,0.912,-1 +2.51,1.3,1 +0.137,1.69,-1 +-0.129,1.57,-1 +-1.55,-0.663,1 +0.818,0.845,-1 +-0.717,-1.03,-1 +0.0188,0.0512,1 +-0.71,0.465,1 +-1.43,-1.18,1 +-1.39,-0.282,1 +0.753,1.84,-1 +0.598,0.484,-1 +0.0256,0.228,1 +1.02,0.508,-1 +-1.32,1.12,-1 +-1.7,0.422,-1 +-0.037,-1.35,-1 +-1.01,-0.211,1 +-1.25,-1.1,-1 +1.5,0.637,-1 +-1.02,0.899,1 +-0.827,-0.757,-1 +1.54,-0.26,-1 +-0.0979,-1.01,-1 +-1.01,-1.29,-1 +-0.707,-0.378,1 +-0.878,-1.16,-1 +-1.34,0.717,-1 +2.21,1.21,1 +0.25,0.466,1 +2.34,1.13,1 +0.614,-0.157,1 +2.03,-0.427,-1 +-0.568,-1.36,-1 +-1.64,-1.27,1 +0.779,-0.815,1 +-0.707,-0.568,1 +-0.712,-1.1,-1 +1.62,0.0564,-1 +-1.01,0.412,1 +1.61,1.4,1 +-1.26,-0.503,1 +-0.845,-0.377,1 +1.31,-0.566,1 +-0.218,2.09,-1 +-0.68,-1.07,-1 +1.15,-1.56,1 +0.55,0.857,-1 +-0.43,-1.4,-1 +0.403,-0.997,-1 +-0.0017,0.282,1 +-0.617,-0.458,1 +0.143,-1.56,-1 +-0.221,0.12,1 +0.58,1.17,-1 +0.876,-0.969,1 +-0.0237,0.303,1 +0.0052,0.24,1 +0.904,0.97,-1 +-0.806,0.575,1 +0.423,-0.334,1 +1.08,1.36,1 +-0.639,1.25,-1 +-0.0782,-1.1,-1 +0.595,0.429,-1 +0.908,1.48,1 +0.355,0.562,1 +0.149,-0.295,1 +-1.94,-0.273,1 +0.222,0.214,1 +-1.44,-0.43,1 +0.265,-0.375,1 +-0.295,0.565,1 +-1.55,-1.02,1 +-0.93,-0.895,-1 +-1.18,0.422,1 +-0.203,-1.08,-1 +1.26,-0.328,1 +1.48,-1.05,1 +-0.499,-0.766,-1 +0.12,-1.12,-1 +-0.338,-0.91,-1 +0.909,0.522,-1 +0.395,0.0856,-1 +-0.7,-1.25,-1 +0.0828,-0.879,1 +-0.832,-1.33,-1 +-2.01,-1.85,1 +-1.55,0.695,-1 +-0.755,-1.09,-1 +-0.83,0.993,-1 +0.314,1.54,-1 +-1.44,-0.225,1 +0.287,1.45,1 +-0.278,-0.815,1 +1.46,-0.509,1 +0.496,1.52,-1 +1.81,-0.218,-1 +0.824,0.45,-1 +-2.56,-0.0784,-1 +-0.836,-0.507,1 +0.066,-1.23,-1 +-0.742,0.36,1 +1.31,0.328,-1 +1.35,1.21,1 +0.34,-1.03,-1 +-1.07,-0.616,1 +-0.634,1.65,-1 +1.95,-1.02,-1 +-0.0397,-1.09,-1 +1.04,-1.13,1 +-1.15,0.729,-1 +-0.528,0.953,1 +0.854,1.15,-1 +-1.04,1.04,-1 +-0.921,-1.19,-1 +-1.78,-1.58,1 +-0.0184,-1.13,-1 +-1.55,0.386,-1 +-0.539,-1.21,-1 +-0.455,0.494,1 +1.44,1.38,1 +-1.2,0.659,-1 +0.731,1.31,-1 +-1.39,-1.09,1 +-0.413,0.0554,1 +0.271,0.38,1 +-0.882,1.29,-1 +1.29,-1.04,1 +0.299,1.49,-1 +-0.754,1.1,-1 +-0.0601,2.04,1 +1.42,-0.697,1 +1.12,0.173,-1 +-0.783,-1.33,-1 +-1.55,-1.6,1 +-1.34,0.146,1 +-1.1,-1.17,-1 +0.101,-1.39,-1 +-0.484,-1.2,-1 +-0.984,1.03,-1 +-0.269,0.961,1 +-0.547,1.71,-1 +0.494,-1.01,-1 +-0.123,1.86,-1 +-0.0853,-1.12,-1 +1.41,0.607,-1 +1.27,-0.159,-1 +-0.0373,-1.32,-1 +0.433,1.65,1 +-1.03,-0.858,1 +0.69,0.745,-1 +0.354,1.06,-1 +-1.92,-1.06,1 +-0.541,-0.65,1 +1.38,-1.36,1 +1.29,0.525,-1 +-0.836,0.959,-1 +-0.247,0.775,1 +0.552,0.942,-1 +-0.19,0.168,1 +0.714,-1.19,1 +1.19,0.594,-1 +0.428,-1.43,-1 +-0.575,-1.62,-1 +-1.97,-1.72,1 +-0.0255,-1.3,-1 +-0.892,1.02,-1 +-0.132,-1.35,-1 +-1.85,0.545,-1 +-0.491,-0.429,1 +-0.158,0.358,1 +-0.475,-1.02,-1 +1.26,-2.39,1 +-0.807,0.992,-1 +1.19,0.115,-1 +0.439,1.5,-1 +1.44,-1.73,1 +0.109,1.65,1 +-0.596,-1.33,-1 +1.11,0.431,-1 +-0.347,-0.556,1 +1.47,1.39,1 +1.63,-0.56,-1 +-1.53,1.21,-1 +-1.12,1.1,-1 +0.0551,1.34,-1 +-0.388,-1.08,-1 +0.136,-1.36,-1 +-0.79,-0.823,-1 +0.646,1.82,1 +-0.468,-0.206,1 +0.735,0.571,-1 +-0.777,-1.33,-1 +-0.31,1.45,-1 +-0.0693,-1.3,-1 +-0.162,-0.334,1 +0.724,-0.574,1 +-1.03,0.153,1 +0.385,-0.744,-1 +1.23,-0.115,-1 +-1.49,0.796,-1 +0.0586,-0.133,1 +1.05,-0.362,1 +-0.84,-1.05,-1 +0.351,-1.42,-1 +-1.34,-0.462,1 +-0.213,0.77,1 +0.523,1.05,-1 +-0.665,1.23,-1 +1.03,1.55,1 +0.495,1.54,-1 +0.582,0.232,-1 +0.35,0.889,-1 +0.321,1.58,-1 +0.155,0.0373,1 +-0.783,0.931,-1 +0.153,-0.875,1 +1.08,0.925,-1 +0.614,0.908,-1 +-0.353,1.98,1 +0.0762,-1.27,-1 +0.282,-1.43,-1 +0.876,-0.807,1 +-0.685,0.459,1 +0.265,1.4,-1 +0.29,0.591,-1 +0.662,1.49,-1 +-0.689,-1.45,-1 +-0.214,-1.27,-1 +-0.119,-0.842,-1 +-1.84,-1.23,1 +-0.102,-0.306,1 +-0.995,-0.937,-1 +-0.479,0.0719,1 +-1.31,0.463,-1 +-0.486,-1.62,-1 +0.115,-0.443,1 +0.698,0.784,-1 +1.09,-1.24,1 +-1.57,-0.401,1 +-0.274,0.489,1 +0.764,1.98,1 +2.41,0.946,1 +-0.348,0.119,1 +1.27,-0.194,-1 +-0.22,-1.38,-1 +1.08,-1.83,1 +-0.414,-1.28,-1 +2.12,1.07,1 +0.22,2.08,-1 +0.207,-1.38,-1 +-0.519,-1,-1 +-0.00634,-1,-1 +-0.68,1.68,-1 +-1.18,-0.522,1 +0.32,0.282,1 +-0.638,1.32,-1 +0.72,-0.154,1 +0.92,-0.717,1 +0.511,-0.773,1 +-0.681,-0.586,1 +1.11,0.205,-1 +0.087,-1.28,-1 +1.13,-0.541,1 +-0.016,-1.26,-1 +-0.439,-0.884,1 +0.424,-0.84,-1 +2.25,-1.4,-1 +0.139,-0.803,1 +0.488,1.31,-1 +-1.04,1.27,-1 +1.33,-0.704,-1 +-1.23,0.89,-1 +-0.329,-0.955,-1 +0.174,-1.11,-1 +-0.699,0.755,-1 +-0.137,-1.16,-1 +1.09,-1.1,1 +-0.0172,-1.03,-1 +0.269,0.605,1 +0.305,1.63,1 +-1.82,0.904,-1 +-0.527,-1.38,-1 +0.557,-0.255,1 +-2.41,0.26,-1 +-0.514,-1.07,-1 +-0.325,-1.32,-1 +2.53,1.5,1 +0.471,2.03,-1 +0.986,0.843,-1 +0.746,0.441,-1 +-1.32,0.907,-1 +1.33,1.7,1 +1.59,-0.378,-1 +0.387,-6.53E-04,1 +-0.914,-0.919,-1 +1.41,0.0907,-1 +-0.639,0.522,1 +0.866,1.14,-1 +-1.78,-0.491,1 +0.391,-0.0901,1 +-1.27,0.184,1 +-1.08,-0.612,1 +-0.717,0.455,1 +-1.11,0.00172,1 +-1.51,-0.966,1 +-0.862,1.53,-1 +0.605,1.86,1 +-0.818,0.654,1 +-0.391,1.1,-1 +0.941,-0.254,1 +1.59,1.72,1 +-1.51,-1.16,1 +0.413,0.473,1 +-0.351,-1,-1 +1.68,0.113,-1 +-0.834,1.48,-1 +1.45,-0.507,-1 +-0.559,0.0594,1 +1.23,-0.0997,-1 +-0.0342,0.145,1 +-0.834,1.41,-1 +0.743,-1.01,1 +1.01,1.82,1 +0.191,-1.29,-1 +0.0543,-0.967,-1 +0.768,-1.25,1 +1.12,0.612,-1 +0.424,0.547,1 +1.29,-0.391,-1 +-0.612,-1.42,-1 +-0.201,0.287,1 +-0.779,1.09,-1 +0.44,0.246,1 +0.778,0.952,-1 +0.0179,-1.01,-1 +-0.216,-1.38,-1 +-0.554,-0.38,1 +0.0698,-1.25,-1 +-0.869,0.625,-1 +-1.53,-1.17,1 +-0.175,-0.909,-1 +-1.5,-1.14,1 +0.528,-1.04,1 +1.07,0.61,-1 +-0.987,-0.735,1 +-0.468,-1.17,-1 +1.77,0.239,-1 +0.885,-0.372,1 +2.59,-1.15,-1 +1.04,0.285,-1 +0.736,1.64,-1 +1.23,0.458,-1 +-0.137,-1.39,-1 +-0.0574,-0.208,1 +-0.496,1.03,-1 +-1.04,0.984,-1 +0.952,-1.9,1 +-0.21,0.11,1 +-1.95,0.614,-1 +1.68,-0.472,-1 +-0.866,0.751,-1 +0.322,-1.04,-1 +-1.71,-0.653,1 +-0.486,-1.53,-1 +0.587,-0.311,1 +-0.55,0.236,1 +1.4,-0.874,1 +-0.384,-1.08,-1 +0.746,0.991,-1 +0.758,-1.45,-1 +-0.0468,-0.332,1 +0.698,1.02,-1 +-1.53,0.658,-1 +-0.162,-1.24,-1 +-0.507,0.69,1 +1.41,-1.38,1 +-0.735,-1.19,-1 +-0.141,-0.119,1 +1.31,0.628,-1 +1.38,-1.02,1 +1.18,-1.19,1 +-0.0785,-1.16,-1 +0.844,1.22,-1 +0.913,-1.6,1 +-1.04,1.33,-1 +-1.1,0.145,1 +-0.244,-1.2,-1 +0.0643,-1.28,-1 +0.483,-1.23,-1 +1.37,1.29,1 +-0.869,0.634,-1 +-0.0547,-1.06,-1 +0.816,-0.133,1 +-1.49,0.744,-1 +-0.813,-0.665,1 +1.6,0.0487,-1 +0.742,1.56,1 +1.45,-0.188,-1 +-0.791,-1.44,-1 +-1.31,0.118,1 +0.0336,0.323,1 +0.308,-1.34,-1 +0.856,-1.11,1 +0.532,-0.0431,1 +-0.171,1.68,1 +-0.305,0.247,1 +-0.422,-0.241,1 +0.836,0.849,-1 +1.77,0.013,-1 +-1.56,0.514,-1 +0.137,-0.862,-1 +-1.07,-0.017,1 +-1.31,-0.32,1 +-1.33,-0.581,1 +-1.76,-1.58,1 +-0.258,-1.06,-1 +-0.253,-0.809,-1 +0.395,1.35,1 +-1.39,-0.156,1 +0.446,-1.07,-1 +1.52,0.795,-1 +1.44,0.898,-1 +1.04,0.952,-1 +-1.82,-0.847,1 +-1.11,0.276,1 +-1.2,-1.19,-1 +0.268,1.14,-1 +1.29,-1.19,1 +-0.935,1.02,-1 +0.247,-0.96,-1 +0.387,1.59,1 +0.247,0.41,1 +0.722,0.153,-1 +0.249,-0.451,1 +0.923,0.166,1 +-0.237,-0.0378,1 +1.27,0.496,-1 +-1.2,1.43,-1 +-1.19,-0.939,-1 +0.0209,-0.973,-1 +0.637,0.984,-1 +0.714,-1.08,1 +-0.215,0.549,1 +0.306,-0.161,1 +0.205,-1.28,-1 +1.51,1.52,1 +-0.761,0.373,1 +-0.706,-0.697,-1 +-0.383,0.671,1 +-1.11,-0.44,1 +0.496,-0.614,1 +-0.0529,-0.0168,1 +2.09,1.1,1 +-1.28,-1.17,1 +1.44,0.0946,-1 +2.06,-0.893,-1 +-0.638,-0.752,-1 +0.402,-0.39,1 +0.122,0.599,1 +1.42,1.49,1 +-1.55,0.588,-1 +0.854,0.0949,1 +-0.268,0.75,1 +-0.57,0.502,1 +-0.381,-0.959,-1 +0.236,0.443,1 +0.898,-0.719,1 +-0.432,1.13,-1 +1.14,-0.938,1 +0.0935,2.1,-1 +0.463,0.935,-1 +-0.23,-1.02,-1 +0.671,0.181,1 +0.168,1.7,-1 +-0.175,0.0644,1 +0.796,0.423,1 +-0.824,-0.471,1 +-0.93,-0.502,1 +-1.06,1.49,-1 +0.375,1.27,-1 +2.45,1.25,1 +-2.04,0.401,-1 +0.0735,-1.01,-1 +-0.737,-0.0677,1 +0.848,0.402,-1 +0.281,0.343,1 +-1.13,-0.0974,1 +-0.821,-1.44,-1 +-0.211,-0.334,1 +-0.364,-1.4,-1 +-1.27,-0.673,1 +0.784,1.39,-1 +-1.02,0.883,-1 +0.833,-0.0565,1 +0.478,1.31,1 +-1.14,-0.245,1 +1.14,0.494,-1 +0.667,-1.01,1 +2.17,0.926,1 +-0.129,0.877,1 +-0.581,0.582,1 +0.427,1.77,1 +-0.138,0.425,1 +-1.98,0.875,-1 +-0.337,0.559,1 +-0.369,-0.965,-1 +0.325,-0.153,1 +-0.232,-0.364,1 +-0.749,1.26,-1 +-0.566,1.19,-1 +1.33,-0.774,1 +-1.25,-1.47,-1 +0.00453,1.47,-1 +-1.81,-0.543,1 +-0.307,0.938,-1 +-1.18,0.732,-1 +-1.68,0.93,-1 +-1.52,-1.4,1 +-1.45,-0.701,1 +0.616,1.77,1 +-0.0222,-0.202,1 +0.246,-0.767,-1 +-0.763,-0.573,1 +-0.581,-0.182,1 +1.42,-0.0465,-1 +-0.139,-1.27,-1 +-0.179,0.19,1 +-0.721,1.19,-1 +-0.709,-0.942,-1 +0.945,0.239,1 +-0.891,0.593,-1 +-1.18,0.862,-1 +1.3,-0.028,-1 +0.576,-0.342,1 +0.0381,0.167,1 +-0.219,-0.195,1 +0.855,0.959,-1 +-1.08,-1.17,-1 +-0.596,-0.989,-1 +-1.41,-0.856,1 +-0.298,-0.243,1 +0.444,1.14,-1 +1.01,0.397,-1 +1.31,-0.36,1 +1.78,1.19,1 +1.1,0.458,-1 +-1.33,-0.647,1 +-1.6,-0.541,1 +-1.2,0.455,-1 +1.01,1.57,1 +1.38,0.583,-1 +-0.223,1.04,-1 +-1,-1.22,-1 +0.576,-0.665,1 +-0.394,1.39,-1 +-0.573,-0.88,-1 +-2.19,0.0991,-1 +0.729,1.49,1 +0.308,-0.913,-1 +0.661,0.0147,1 +0.515,0.884,-1 +1.32,-0.144,-1 +-1.05,-0.382,1 +1.46,-0.0465,-1 +1.22,-0.159,-1 +0.639,-0.815,-1 +-0.235,0.352,1 +-2,0.403,-1 +-0.158,-1.08,-1 +1.71,1.39,1 +0.905,-0.551,1 +-0.0949,-0.821,-1 +0.781,1.08,-1 +-0.624,1.75,-1 +-1.4,1.19,-1 +-1.21,0.357,1 +0.123,-1.24,-1 +1.01,0.446,-1 +-0.666,-1.34,-1 +-0.819,-0.943,-1 +0.202,-1.17,-1 +0.745,0.681,-1 +-0.461,-1.07,-1 +0.622,-0.546,1 +-1.51,-1.39,1 +1.05,0.409,-1 +0.751,0.608,-1 +-1.13,0.164,1 +2.33,1.21,1 +0.855,-0.265,1 +-0.0247,-0.765,-1 +0.21,-1.26,-1 +-1.67,-0.212,1 +-0.394,-0.338,1 +-0.389,-1.2,-1 +1.11,1.33,1 +-1.24,0.323,1 +-0.116,-1.23,-1 +0.372,1.6,-1 +-1.23,-0.806,1 +1.31,-1.72,1 +-0.254,-1.37,-1 +-1.15,-0.647,1 +1.21,-1.77,1 +0.00425,1.61,-1 +-0.67,1.22,-1 +1.57,1.16,1 +-0.241,0.897,1 +1.04,0.93,-1 +1.39,0.504,-1 +1.96,-0.709,-1 +0.978,-1.33,1 +-1.67,-0.00451,1 +-0.529,-1.02,-1 +-0.137,1.44,-1 +-1.87,-0.78,1 +1.04,-0.24,1 +-1.21,-0.785,1 +0.8,0.662,-1 +0.133,2.05,-1 +1.34,-1.8,1 +0.175,0.652,1 +1.14,-0.0829,-1 +-0.634,1.26,-1 +-0.617,-0.792,-1 +-1.42,0.728,-1 +0.897,0.742,-1 +-0.0814,2.18,1 +1.27,1.6,1 +-0.759,0.366,1 +-0.847,-0.896,-1 +1.23,-0.785,1 +-0.843,-1.14,-1 +0.357,0.647,-1 +-1.36,-0.258,1 +0.977,0.597,-1 +1.05,-0.467,1 +-0.717,1.23,-1 +0.955,-0.0158,-1 +1.6,-0.183,-1 +-1.21,-0.733,1 +-0.593,1.28,-1 +1.14,0.814,-1 +-0.597,0.232,1 +0.575,-0.19,1 +-1.34,-0.31,1 +-0.129,-0.061,1 +0.447,0.92,-1 +0.377,-0.643,1 +-1.54,0.643,-1 +-1.25,-0.689,1 +2.34,1.18,1 +-0.341,1.55,-1 +1.27,0.0842,-1 +-1.73,0.562,-1 +0.957,0.0986,-1 +-0.639,-1.04,-1 +0.303,-1.35,-1 +-0.439,1.37,-1 +-0.287,-1.07,-1 +1.46,0.651,-1 +0.957,-0.786,1 +-0.0107,-0.96,-1 +1.21,0.0512,-1 +0.105,-1.67,-1 +0.786,0.859,-1 +-1.08,0.356,1 +0.0909,-1.39,-1 +-1.34,0.805,-1 +-0.647,1.08,-1 +1.17,-1.4,1 +-0.811,-1.35,-1 +-0.961,-0.102,1 +-0.968,-0.226,1 +-0.322,0.535,1 +-1.26,-0.515,1 +0.62,-0.986,1 +-2.32,0.0861,-1 +1.21,1.31,1 +0.196,1.08,-1 +1.45,1.33,1 +0.137,-0.247,1 +-0.974,-1.43,-1 +-0.335,-1.69,-1 +-0.0999,1.97,1 +0.272,0.813,-1 +0.56,1.18,-1 +-0.0527,-0.746,-1 +-0.188,-0.998,-1 +-0.369,0.692,1 +-0.226,-1.06,-1 +0.458,-0.709,1 +-1.26,-0.912,-1 +-1.33,-0.392,1 +-0.621,1.52,-1 +-0.753,1.1,-1 +-0.435,-1.08,-1 +0.328,1.28,-1 +0.227,1.71,1 +-0.715,0.0859,1 +0.81,1.93,-1 +0.364,1.98,1 +0.624,1.1,-1 +0.922,0.93,-1 +-0.416,-0.941,-1 +-1.37,0.968,-1 +0.269,1.62,1 +0.353,1.58,-1 +0.11,0.553,1 +1.49,0.385,-1 +0.571,-1.53,-1 +0.171,1.59,1 +0.409,-0.0572,1 +1.93,-0.842,-1 +0.907,0.191,-1 +0.0413,0.345,1 +0.484,-1.3,-1 +-0.0305,-1.09,-1 +1,-0.0897,-1 +1.09,0.299,-1 +-0.543,-0.0204,1 +0.659,0.913,-1 +-0.542,-0.739,-1 +-1.02,1.53,-1 +-1.63,0.293,-1 +1.49,0.627,-1 +-0.583,-1.05,-1 +-0.807,1.12,-1 +-0.683,1.38,-1 +0.0734,-0.162,1 +0.388,1.64,-1 +0.874,1.79,-1 +0.584,0.679,-1 +1.42,0.374,-1 +-0.779,0.676,1 +0.678,0.335,-1 +0.204,-1.06,-1 +1.43,0.498,-1 +-0.779,-1.2,-1 +-1.3,0.577,-1 +1.03,0.0331,-1 +-0.532,-1.17,-1 +0.119,-1.28,-1 +1.53,-1.05,1 +-0.481,-0.904,-1 +0.858,0.399,-1 +0.126,0.819,-1 +-0.88,-1.27,-1 +0.145,-0.626,1 +0.534,1.86,1 +2.03,1.1,1 +-0.546,0.442,1 +1.42,1.8,1 +1.41,-2.09,1 +-0.566,-0.563,1 +-0.973,0.495,1 +0.932,1.43,1 +0.472,1.37,-1 +0.74,0.053,1 +-0.214,1.76,-1 +-0.53,-0.944,-1 +-0.936,-1.5,-1 +0.37,-0.0513,1 +0.695,0.651,-1 +0.88,0.275,-1 +-0.563,-0.164,1 +1.27,-0.675,1 +-1.32,-0.146,1 +1.7,-0.489,-1 +0.832,0.442,-1 +0.561,1.74,1 +0.617,1.11,-1 +-1.41,1.14,-1 +1.71,-0.225,-1 +0.473,0.339,1 +-0.855,1.44,-1 +-1.72,-0.41,1 +0.18,-0.0627,1 +0.712,0.837,-1 +0.211,1.71,1 +-0.755,-0.759,1 +0.661,1.2,-1 +0.902,0.947,-1 +0.544,-0.842,1 +1.67,0.974,1 +-0.0571,-1.5,-1 +-1.61,-0.39,1 +-0.261,-1.27,-1 +1.8,1.53,1 +0.134,-1.18,-1 +-0.279,-1.06,-1 +0.405,-0.555,1 +-1.64,-0.562,1 +-0.146,1.83,1 +0.499,-1.17,-1 +-0.171,-1.09,-1 +-0.175,-0.946,-1 +1.54,-0.336,-1 +1.33,0.406,-1 +0.833,0.653,-1 +0.69,1.57,-1 +-0.542,0.918,-1 +1.19,-0.319,1 +1.73,1.26,1 +-2.01,0.439,-1 +0.316,1.43,1 +0.933,0.711,-1 +2.43,0.869,1 +-0.472,-1.16,-1 +-1.17,0.126,1 +2.22,-2.04,-1 +1.07,0.438,-1 +-0.125,0.521,1 +0.857,0.875,-1 +0.569,-0.858,-1 +0.646,-0.932,1 +-0.983,1.1,-1 +-0.829,1.06,-1 +0.22,-0.867,-1 +-2.09,-1.22,1 +-1.64,-0.962,1 +0.0604,1.64,-1 +1.62,-1.16,1 +-0.021,-1.09,-1 +-1.11,-0.653,1 +1.13,-0.0116,-1 +-0.838,-1.17,-1 +0.102,-1.03,-1 +0.576,-1.05,1 +0.635,1.67,1 +0.336,0.611,1 +0.32,-0.998,-1 +-1.58,-1.61,1 +-0.129,-1.69,-1 +-0.249,0.505,1 +-0.986,-1.27,-1 +0.335,-0.483,1 +0.133,-0.568,1 +1.08,-1.02,1 +-0.486,-0.887,-1 +2.08,1.35,1 +0.638,1.11,-1 +0.0965,-1.1,-1 +1.03,-0.382,1 +-0.705,0.129,1 +1.35,1.1,1 +-0.1,-1.05,-1 +0.431,-1.56,-1 +-1.37,-0.123,1 +-0.995,-0.832,-1 +-0.981,-1.04,-1 +-1.9,-1.32,1 +0.982,-0.452,1 +-0.494,-0.719,-1 +0.995,0.502,-1 +-0.507,0.357,1 +0.0853,0.789,1 +-0.0659,-1.33,-1 +-1.51,-0.276,1 +-1.03,0.681,-1 +1.19,-1.25,1 +-1.49,0.228,1 +0.409,1.76,-1 +-0.606,-0.129,1 +1.18,0.597,-1 +0.108,-0.304,1 +0.418,-0.897,1 +0.381,1.55,-1 +-0.951,-1.3,-1 +-0.119,2.06,1 +1.14,0.148,-1 +1.31,1.62,1 +0.724,0.417,-1 +-0.107,-0.232,1 +-1.57,0.905,-1 +-0.214,1.58,-1 +-0.212,-0.155,1 +-0.0683,-0.404,1 +-0.951,-1.33,-1 +0.533,0.541,1 +-1.36,0.887,-1 +0.98,-0.832,1 +-1.29,1.52,-1 +1.08,-8.17E-04,-1 +-0.147,1.85,-1 +-0.135,0.0707,1 +0.318,-0.87,-1 +0.632,1.74,-1 +-1.43,1.25,-1 +-0.499,-1.31,-1 +0.296,1.51,-1 +0.416,0.1,1 +-0.337,1.48,-1 +-1.3,-0.829,1 +-0.0299,0.617,1 +-1.78,-0.843,1 +0.314,-1.25,-1 +-0.801,1.78,-1 +0.0984,0.629,1 +-1.68,-0.528,1 +0.423,0.598,1 +0.00295,-0.125,1 +-0.528,-1.23,-1 +0.17,-0.719,-1 +0.202,-1.37,-1 +0.535,1.99,-1 +-0.491,1.22,-1 +-1.33,-0.276,1 +0.595,0.546,1 +-0.219,0.524,1 +0.0182,1.68,-1 +-0.907,0.416,1 +1.86,-0.36,-1 +-0.733,-1.34,-1 +-1.39,-0.322,1 +0.0578,-1.32,-1 +-1.7,0.869,-1 +-0.166,0.165,1 +-0.8,-1.38,-1 +-1.02,-0.146,1 +-0.145,-1.39,-1 +-0.439,0.331,1 +0.136,1.46,1 +-0.669,0.51,1 +-1.49,-0.0266,1 +0.386,-0.997,-1 +0.812,-0.15,1 +-0.303,-0.883,-1 +-0.621,1.16,-1 +-2.04,-0.978,1 +-0.772,0.933,-1 +-1.06,-0.86,1 +0.0914,0.493,1 +-1.17,0.222,1 +0.553,-0.159,1 +0.759,0.0328,1 +-1.04,-1.24,-1 +0.974,-0.951,1 +0.33,-1.48,-1 +0.105,1.77,-1 +-1.02,-0.305,1 +-0.635,1.94,1 +0.44,1.16,-1 +-0.97,-1.6,-1 +2.22,1.31,1 +-1.18,-0.122,1 +1.46,0.0665,-1 +0.219,1.59,1 +1.09,0.812,-1 +1.15,1.68,1 +0.955,-0.23,1 +-0.103,0.0369,1 +0.619,1.65,-1 +-1.43,-0.357,1 +-0.765,-1.13,-1 +-0.664,1.18,-1 +-1.09,0.289,1 +-0.766,1.06,-1 +-0.436,0.821,1 +0.6,-0.38,1 +-0.124,0.181,1 +1.03,-0.0835,-1 +-1.71,-1.27,1 +-1.46,-0.179,1 +1.34,1.37,1 +1.47,0.574,-1 +0.716,0.487,-1 +0.0337,-1.13,-1 +-0.432,-1.34,-1 +0.344,1.49,-1 +1.29,-0.151,-1 +1.85,-0.558,-1 +0.0319,1.39,-1 +1.95,1.09,1 +-0.839,0.111,1 +-0.956,-0.77,-1 +1.08,0.839,-1 +0.0624,-1.05,-1 +-1.06,-1.24,-1 +-0.366,-1.52,-1 +-1.63,-0.363,1 +1.05,1.07,-1 +-0.47,-1.12,-1 +-1.45,0.377,-1 +-1.56,-1.05,1 +0.912,2.1,-1 +1.61,0.582,-1 +0.208,-1.21,-1 +0.929,-0.902,1 +-1.66,-1.52,1 +-1.85,-0.347,1 +-1.02,-0.0308,1 +0.569,-0.538,1 +1.43,-0.366,-1 +1.47,-0.0578,-1 +0.154,0.629,1 +-1.59,0.176,-1 +0.801,1.14,-1 +-1.5,-0.829,1 +0.271,0.0595,1 +0.188,0.914,-1 +-0.386,1.5,-1 +1.02,0.0541,-1 +0.82,-0.514,1 +0.804,0.6,-1 +-1.38,-1.11,1 +-1.01,-0.155,1 +0.683,1.6,1 +-1.62,-0.896,1 +-0.00515,1.75,-1 +-1.42,-1.28,1 +-1.8,-1.26,1 +-1.76,-0.632,1 +-0.0958,-1.66,-1 +-0.473,-1.55,-1 +-0.316,-1.36,-1 +-0.899,-1.25,-1 +-0.236,-1.54,-1 +0.546,-1.12,-1 +1.16,-1.3,1 +0.00921,-0.00839,1 +1.63,-0.43,-1 +-0.826,0.682,-1 +0.371,-1.23,-1 +-0.821,-0.855,1 +0.309,-1.03,-1 +-0.912,-0.484,1 +0.365,0.632,-1 +1.15,-1.69,1 +-0.714,-1.36,-1 +-0.467,1.3,-1 +0.177,-1.07,-1 +0.589,0.557,1 +-1.85,-0.35,1 +0.676,0.892,-1 +-1.49,-0.498,1 +-1.28,0.72,-1 +0.166,-1.25,-1 +0.448,0.71,-1 +-1.9,-1.46,1 +-0.922,0.533,-1 +0.307,0.12,1 +0.328,0.553,-1 +-0.847,-0.871,-1 +1.08,0.437,-1 +0.173,-0.428,1 +-0.818,-0.69,1 +-0.583,-0.743,-1 +-0.768,-0.0166,1 +-2.04,0.131,-1 +-1.16,-1.01,-1 +0.995,0.131,1 +1.32,-0.616,1 +0.993,0.397,1 +-0.606,3.19,-1 +0.811,0.00911,1 +-0.462,-0.707,-1 +0.304,-0.00393,1 +-1.94,0.598,-1 +-0.372,1.54,-1 +0.341,-1.43,-1 +-0.78,-0.43,1 +0.247,-0.233,1 +-0.561,0.473,1 +-0.524,-1.09,-1 +-1.99,-0.912,1 +-2,-0.0624,-1 +0.554,1.19,-1 +0.899,1.4,1 +0.794,1.01,-1 +0.417,0.348,1 +-0.597,-1.6,-1 +1.71,-0.289,-1 +-0.461,0.343,1 +-1.02,-0.469,1 +-0.434,1.98,-1 +-0.299,-1.4,-1 +-1.67,-0.875,1 +-0.192,1.04,-1 +0.108,-0.769,-1 +-0.58,1.34,-1 +-1.17,-0.95,1 +-0.124,-0.95,-1 +0.283,-1.56,-1 +0.418,-0.711,1 +1.2,0.286,-1 +0.295,-0.273,1 +-1.37,-0.904,1 +0.0175,1.47,-1 +1.64,0.513,-1 +0.557,1.38,-1 +-0.878,0.659,-1 +0.254,2.42,-1 +0.961,0.454,-1 +-0.994,0.192,1 +-1.07,-0.622,1 +1.95,1.53,1 +0.281,-0.4,1 +1.29,0.283,-1 +0.586,-0.273,1 +-0.0272,1.86,1 +-1.65,-0.0692,1 +1.22,-0.345,-1 +-1.53,0.132,1 +-1.51,-0.248,1 +-1.2,-1.04,-1 +0.299,1.52,-1 +-0.687,1.01,-1 +0.479,-0.821,-1 +1.08,-0.133,-1 +-0.888,-1.59,-1 +-1.52,0.497,-1 +0.198,-1.41,-1 +-0.211,-0.492,1 +0.0647,-1.22,-1 +-1.28,-1.07,1 +-0.0779,-0.967,-1 +0.369,1.69,-1 +-0.826,-0.966,-1 +-0.394,1.6,-1 +1.2,-0.68,1 +0.942,1.89,1 +-0.817,-1.11,-1 +0.405,1.93,-1 +0.666,-1.52,-1 +0.152,-0.964,-1 +-0.82,-0.76,1 +0.72,-0.316,1 +-0.201,0.596,1 +-0.594,-1.21,-1 +-1.07,0.0456,1 +1.05,0.397,-1 +-1.12,-1.2,-1 +1.5,-0.38,-1 +-2.06,-1.23,1 +0.785,0.158,1 +0.867,-1.19,1 +0.624,-1.31,-1 +-0.509,-0.932,-1 +-0.565,-0.853,-1 +0.0662,1.62,-1 +0.0852,-0.852,-1 +0.399,-1.48,-1 +1.27,0.433,-1 +-0.441,-1.45,-1 +1,0.285,1 +-1.01,-1.47,-1 +0.873,-1.03,1 +-0.144,-1.03,-1 +1.25,-1.62,1 +-0.149,0.816,1 +0.893,0.691,-1 +-0.136,-1.41,-1 +0.569,1.44,-1 +-1.93,-0.0856,1 +-0.625,1.65,-1 +-1.16,-0.285,1 +-0.903,-0.988,-1 +0.486,1.05,-1 +-0.0145,-1.15,-1 +-0.678,-1.32,-1 +1.29,-0.499,-1 +-0.116,1.26,-1 +1.1,-0.374,1 +1.06,0.913,-1 +0.19,0.969,1 +-0.487,-1.19,-1 +-1.79,-1.46,1 +-0.0388,-0.984,-1 +0.893,-0.327,1 +1.38,-0.244,-1 +1.01,-1.12,1 +-1.58,-0.352,1 +1.52,-1.12,1 +0.136,-1.49,-1 +0.538,-1.39,-1 +-0.503,1.35,-1 +-0.991,0.114,1 +0.675,-1.53,-1 +-0.581,-1.34,-1 +-1.38,-1.08,1 +-0.298,-0.823,-1 +0.0919,-0.154,1 +-1.01,1.21,-1 +-1.89,-0.418,1 +2.22,1.18,1 +0.0608,-1.23,-1 +0.677,1.03,-1 +0.641,1.72,1 +-0.539,0.648,1 +0.934,0.358,-1 +0.0105,-1.26,-1 +-0.86,-1.27,-1 +0.883,-1,1 +2.32,1.3,1 +-0.566,-1.62,-1 +-1.25,-1.14,1 +2.69,1.12,1 +-0.529,-1.22,-1 +1.39,1.52,1 +1.21,-0.132,-1 +0.126,-1.08,-1 +-0.967,1.22,-1 +-0.121,-0.498,1 +1.4,0.43,-1 +0.994,0.348,-1 +-0.193,-0.288,1 +-0.545,-1.31,-1 +1.06,-0.232,-1 +1.06,0.763,-1 +0.646,1.23,-1 +0.673,1.07,-1 +-0.512,-0.954,-1 +-0.701,1.17,-1 +-0.957,0.848,1 +-0.676,0.797,-1 +1.47,-0.0839,-1 +0.514,0.715,-1 +-0.451,-1.34,-1 +-0.484,0.615,1 +1.17,0.868,-1 +1.2,0.318,-1 +-0.795,0.276,1 +-6.89E-04,2,1 +-2.06,-0.332,-1 +1.42,-1.1,1 +1.07,0.646,-1 +-0.285,1.39,-1 +0.0524,-0.853,-1 +1.81,-0.0547,-1 +-0.379,0.638,1 +-1.14,1.16,-1 +0.878,-0.856,1 +-1.08,1.19,-1 +0.13,-0.469,1 +0.224,1.57,1 +2.13,1.16,1 +-0.604,-1.2,-1 +-0.109,-1.32,-1 +0.0803,-1.15,-1 +0.383,-0.987,-1 +0.799,-0.834,1 +0.866,0.612,-1 +0.897,0.294,-1 +2.46,1.21,1 +-0.267,1.37,-1 +2.04,-0.462,-1 +-1.17,-0.556,1 +-1.49,0.555,-1 +1.08,-1.76,1 +2.03,0.984,1 +1.17,1.28,-1 +-0.448,0.91,-1 +1.07,0.573,-1 +1.05,0.776,-1 +1.2,0.0691,-1 +-0.973,0.712,-1 +0.0399,-1.28,-1 +-0.522,0.206,1 +1.46,-1.61,1 +-0.912,1.34,-1 +-0.375,-1.39,-1 +-0.117,1.47,-1 +0.388,0.863,-1 +-1.53,0.0251,1 +1.38,1.27,-1 +0.237,-1.26,-1 +1.63,-2.16,1 +-0.148,1.6,-1 +0.159,0.586,1 +0.262,-0.538,1 +-1.83,-0.964,1 +-1.65,-0.731,1 +0.737,-0.183,1 +0.711,0.787,-1 +0.953,1.14,1 +-0.284,-1.02,-1 +1.47,-1.29,1 +-0.89,-1.18,1 +1.7,0.157,-1 +-0.451,0.723,1 +-0.499,-0.858,-1 +-0.515,0.625,1 +1.16,0.776,-1 +-1.55,-0.132,1 +-0.325,0.388,1 +0.372,-0.809,-1 +0.335,1.99,-1 +-0.595,0.625,1 +-1.03,-0.95,1 +0.452,1.95,1 +0.656,1.25,-1 +-0.872,-0.461,1 +0.425,0.0454,1 +-1.24,0.788,-1 +0.524,0.283,1 +0.83,0.692,-1 +0.347,-0.242,1 +1.29,0.622,-1 +-0.106,0.552,1 +-0.353,-0.836,-1 +-1.28,-0.00494,1 +-0.172,-0.676,1 +0.303,-1.18,-1 +0.455,-0.869,1 +-0.964,-1.17,-1 +-1.85,-0.467,1 +0.632,1.45,1 +0.552,-0.199,1 +2.14,-0.453,-1 +-0.688,0.709,1 +-0.0708,0.439,1 +0.177,-1.37,-1 +-0.135,0.136,1 +-0.288,0.385,1 +-0.974,-0.818,1 +-0.511,-0.734,-1 +0.449,0.648,-1 +-1.51,-0.193,1 +0.565,1.56,1 +1.19,0.19,-1 +-1.13,-0.622,1 +0.651,-1.08,-1 +-0.542,1.12,-1 +1.57,0.171,-1 +1.09,-0.538,1 +-0.399,1.3,-1 +1.36,-1.06,1 +0.614,-0.705,1 +0.823,-0.248,1 +0.894,-0.992,1 +-0.731,-0.906,-1 +0.017,-1.26,-1 +-0.487,-1.13,-1 +-1.03,0.277,1 +0.816,0.762,-1 +1.03,1.61,-1 +0.889,-0.566,1 +1.2,-0.944,1 +-0.375,1.36,-1 +1.46,1.69,1 +0.587,0.322,1 +0.985,1.16,-1 +1.11,-0.476,1 +1.18,-0.789,-1 +0.386,1.92,-1 +-1.29,-1.03,1 +0.921,0.3,-1 +-0.835,-1.04,-1 +-1.04,-1.42,-1 +-0.356,-1.16,-1 +0.74,0.203,1 +-0.879,-0.975,-1 +-0.603,1.3,-1 +-1.05,-0.4,1 +-0.501,-1.43,-1 +0.501,0.0251,1 +-0.55,-1.07,-1 +0.133,2.04,1 +1.5,0.541,-1 +-0.741,0.979,-1 +0.764,0.381,1 +1.02,-0.549,1 +1.23,2.19,-1 +0.4,-1.39,-1 +-0.167,-0.633,1 +0.995,-1.63,1 +0.422,2.33,-1 +0.45,1.6,-1 +0.115,1.41,1 +0.00914,0.431,1 +-0.992,1.4,-1 +-0.95,0.144,1 +0.297,-1.11,1 +0.953,0.719,-1 +-0.683,0.419,1 +0.44,-0.778,-1 +-0.785,-1.11,-1 +0.182,-0.0823,1 +1.32,-1.23,1 +-0.762,-1.11,-1 +0.977,-1.01,1 +0.395,-1.04,-1 +-1.16,1.03,-1 +0.786,0.945,-1 +-0.0267,-1.09,-1 +-1.39,1.09,-1 +1.88,-0.141,-1 +-1.81,0.0758,1 +-0.0931,1.52,-1 +-0.0677,-0.394,1 +-0.544,-0.773,-1 +0.175,-0.345,1 +-1.07,-0.456,1 +0.534,-0.763,1 +-0.737,-0.617,-1 +-0.291,-0.858,-1 +-0.753,-1.25,-1 +0.875,1.82,1 +0.518,1.41,-1 +0.0526,-1.31,-1 +-0.0198,-1.22,-1 +0.28,0.429,1 +1.19,-1.33,1 +-0.926,0.694,-1 +1.3,-1.28,1 +1.46,-1.32,1 +0.475,-0.381,1 +0.685,0.546,-1 +-1.33,-0.0457,1 +-0.943,-1.26,-1 +0.226,-1.02,-1 +0.971,0.64,-1 +0.49,-0.0477,1 +-0.343,-1.39,-1 +0.301,-0.928,-1 +0.343,-1.14,-1 +-0.625,-1.14,-1 +2.2,0.935,1 +-0.429,1.13,-1 +-0.761,-1.44,-1 +-0.409,0.349,1 +1.07,-1.38,1 +0.209,-0.455,1 +0.521,0.375,1 +0.575,1.42,1 +-1.23,-1.15,1 +1.11,0.989,-1 +-0.595,1.09,-1 +1.2,0.361,-1 +-0.629,0.809,1 +1.15,-1.01,1 +0.238,-0.551,1 +-1.02,0.409,1 +-0.0743,-1.38,-1 +0.393,1.64,-1 +-0.126,-1.67,-1 +1.14,-1.57,1 +-1.82,-0.665,1 +0.636,0.756,-1 +-1.14,0.915,-1 +-1.2,0.4,1 +1.57,-0.0786,-1 +0.196,-1.71,-1 +-1.42,0.275,-1 +1.89,-0.243,-1 +-1.64,0.767,-1 +1.1,-0.364,1 +-0.113,0.626,1 +-0.48,0.0507,1 +-0.415,-1.3,-1 +1.28,-0.341,1 +0.399,-0.594,1 +1.01,-0.205,1 +-0.4,-1.37,-1 +-0.239,-0.619,-1 +-1.53,-1.72,1 +0.0745,-0.607,1 +1.53,0.0838,-1 +2.75,1.47,1 +0.26,-1.17,-1 +1.08,1.03,-1 +-0.239,-1.17,-1 +1.09,1.21,-1 +-1.19,-1,1 +1.29,-0.12,-1 +0.844,-0.402,1 +0.224,1.75,1 +-1.42,1.13,-1 +1.31,-1.08,1 +-0.275,1.52,1 +0.948,1.41,-1 +-0.999,0.906,-1 +-1.63,0.279,1 +1.13,1.51,1 +1.24,0.809,-1 +0.0941,1.44,-1 +-0.636,1.72,-1 +-1.26,-0.457,1 +0.249,-1.46,-1 +-1.02,0.675,-1 +-1.77,1.01,-1 +-0.292,1.32,-1 +0.391,-1.42,-1 +0.377,1.72,1 +-0.826,-1.67,-1 +-0.528,0.296,1 +-0.00835,1.76,-1 +1.64,-0.152,-1 +0.317,-1.13,-1 +1,-0.0347,-1 +-2,-2.1,1 +0.0394,-1.35,-1 +-0.633,-1.18,-1 +0.0927,-1.02,-1 +-0.389,-1.32,-1 +0.83,1.67,1 +1.12,1.01,-1 +0.176,1.23,-1 +0.991,0.594,-1 +-0.0545,-1.45,-1 +0.927,-1.12,1 +-1.78,0.819,-1 +-0.0207,-1.14,-1 +0.915,0.611,-1 +-2.98,-0.16,-1 +-1.81,-1.21,1 +-0.0982,0.395,1 +0.98,0.766,-1 +0.47,-0.765,1 +1.41,0.9,-1 +-1.55,1.05,-1 +-1.53,0.39,-1 +0.621,1.48,1 +0.47,1.21,-1 +0.805,1.21,-1 +-1.4,-0.386,1 +-2.14,-1.59,1 +0.974,0.898,-1 +-1.16,-0.641,-1 +0.947,-0.888,1 +1.52,-0.85,1 +0.39,-1.2,-1 +-1.82,-0.899,1 +-1.42,-0.617,1 +-0.927,-0.406,1 +0.64,1.35,-1 +-0.2,-1.02,-1 +-0.0611,-1.4,-1 +-1.06,0.181,1 +1.22,0.541,-1 +-0.56,1.06,-1 +1.79,-0.834,-1 +0.362,1.59,-1 +2.55,1.4,1 +-0.248,0.514,1 +-0.908,1.26,-1 +1.91,1.37,1 +-1.35,-0.688,1 +-0.0906,2.01,-1 +0.409,0.212,1 +-0.69,-1.33,-1 +-1.51,0.213,1 +0.923,1.54,1 +-0.687,-0.458,1 +-0.928,1.55,-1 +1.01,-0.935,1 +-0.44,1.22,-1 +0.921,1.12,1 +-1.65,-2.05,1 +-1.57,-0.772,1 +2.32,1.32,1 +-0.64,-0.238,1 +1.21,-0.271,-1 +0.484,1.04,-1 +1.26,-0.25,-1 +-0.659,-1.27,-1 +0.0558,1.88,-1 +-1.7,-0.087,-1 +0.739,0.673,-1 +0.67,0.784,-1 +0.592,0.235,1 +-1.64,-1.91,1 +-1.74,-0.521,1 +-2.1,-1.06,1 +0.487,1.33,-1 +-0.772,-1.38,-1 +-0.856,-0.664,1 +0.0844,1.72,1 +-0.792,-0.132,1 +-0.954,0.848,-1 +-1.39,-1.35,1 +1.4,0.855,-1 +1.3,1.04,-1 +0.299,-0.819,-1 +-1.1,-1.13,-1 +1.03,-1.12,1 +-1.3,-0.811,1 +-0.178,-1.15,-1 +2.05,-0.105,-1 +-1.38,0.0973,1 +-1.01,-1.23,-1 +0.534,2.15,-1 +0.388,-0.0851,1 +0.484,1.04,-1 +0.646,0.778,-1 +-1.3,0.867,-1 +-1.38,-0.149,1 +-1.02,0.849,-1 +0.0971,1.58,1 +0.606,1.77,1 +0.129,1.52,-1 +0.689,-0.506,1 +-0.811,-1.15,-1 +0.318,1.82,1 +1.89,1.34,1 +0.326,1.45,-1 +-0.655,-1.44,-1 +1.85,1.61,1 +1.15,1.49,1 +0.261,0.214,1 +-0.108,-0.793,1 +-1.08,-1.39,-1 +0.342,-0.701,-1 +-0.0615,0.299,1 +0.905,-0.994,1 +0.318,0.621,1 +0.851,0.598,-1 +-0.66,1.41,-1 +0.908,0.127,-1 +1.18,1.36,1 +0.999,0.811,-1 +-1.04,-0.0359,1 +-0.492,0.124,1 +0.409,-1.15,-1 +1.31,0.936,-1 +1.23,-1.4,1 +-1.82,-1.78,1 +-0.0723,-0.428,1 +-0.0681,-1.24,-1 +-0.96,0.644,-1 +0.974,-1.47,1 +0.0292,-1.21,-1 +-0.852,1.32,-1 +-0.777,1.22,-1 +-0.379,-1.6,-1 +0.896,0.363,1 +2.59,1.16,1 +-0.489,-1.13,-1 +-1.12,-0.297,1 +1.03,0.766,-1 +-0.621,0.425,1 +-0.537,-0.223,1 +1.85,-0.377,-1 +-0.0434,-0.442,1 +0.856,0.669,-1 +-0.219,0.128,1 +1.28,-0.588,-1 +-0.999,1.2,-1 +-0.834,-1.36,-1 +0.3,-1.18,-1 +1.05,-1.05,1 +-0.0203,-1.42,-1 +-0.157,1.84,-1 +0.208,-0.427,1 +-0.21,1.35,-1 +-1.51,-0.196,1 +1.46,-0.14,-1 +-0.78,-1.52,-1 +-0.00816,-1.38,-1 +-0.591,-1.08,-1 +-0.0202,-1.26,-1 +0.288,1.62,-1 +-1.51,0.384,-1 +-1.15,-0.508,1 +0.245,-0.681,-1 +0.757,-0.208,1 +-0.242,-1.47,-1 +1.01,1.21,1 +0.651,0.744,-1 +1.4,0.746,-1 +0.0426,-1.18,-1 +-1.67,0.809,-1 +-0.232,0.626,1 +-1.74,-1.83,1 +-0.31,-1.27,-1 +0.0968,0.589,1 +-0.202,1.64,-1 +0.15,0.442,1 +-0.82,-1.04,-1 +-0.804,0.687,-1 +0.148,1.26,-1 +0.832,-1.29,1 +0.577,0.628,-1 +-1.19,-1.14,1 +1.63,-0.539,-1 +0.289,0.598,1 +-0.679,0.536,1 +-1.78,-1.07,1 +0.277,-0.324,1 +-0.128,-1.14,-1 +0.613,-0.211,1 +0.468,0.986,-1 +0.295,-1.36,-1 +0.563,0.661,-1 +0.407,1.8,-1 +-0.744,-0.415,1 +0.203,0.617,1 +0.0169,-1.22,-1 +-0.295,-1.22,-1 +-1.01,-1.65,-1 +0.851,0.95,-1 +1.49,1.59,1 +-0.658,0.554,1 +-1.29,-1.14,1 +-1.61,-0.364,1 +-0.738,1.01,-1 +-1.45,-0.0697,1 +-1.5,-0.39,1 +-1.1,-1.24,-1 +-0.921,0.894,1 +-0.0633,-0.699,-1 +0.31,0.57,1 +1.12,-0.422,-1 +-1.08,0.989,-1 +0.686,1.2,-1 +-0.135,-1.12,-1 +-0.243,1.76,1 +0.812,-0.751,1 +0.477,0.791,-1 +-0.838,0.644,1 +-0.711,-1.13,-1 +-0.269,-1.47,-1 +1.18,-0.917,1 +1.03,-0.209,-1 +-0.599,-0.521,1 +1.26,-1.57,1 +-1.2,0.92,-1 +0.0538,-1.01,-1 +0.247,-1.28,-1 +-1.37,0.15,1 +1.21,0.197,-1 +-0.431,-0.346,1 +1.93,0.165,-1 +0.883,0.106,1 +-0.206,-0.0207,1 +1.24,0.608,-1 +0.262,-1.16,-1 +1.02,0.72,-1 +-0.384,-0.397,1 +-0.803,-1.21,-1 +0.677,0.964,-1 +-0.148,-1.37,-1 +0.355,2.08,-1 +-0.811,-1.21,-1 +1.16,0.598,-1 +-1.05,-0.832,1 +-0.326,0.168,1 +-0.372,-1.38,-1 +1.13,1.38,1 +0.331,0.185,1 +1.67,-1.77,1 +-0.00621,1.56,-1 +0.166,-0.33,1 +0.847,-0.61,1 +0.68,0.739,-1 +-0.411,-1.41,-1 +-0.917,-1.08,-1 +0.956,0.558,1 +-2.11,-1.51,1 +-0.0791,-1.38,-1 +0.329,1.11,-1 +0.0545,2,-1 +0.451,-1.23,-1 +-1.44,-0.775,1 +0.841,-0.0776,1 +0.829,1.56,-1 +-1.64,-0.121,1 +0.0291,-1.12,-1 +0.823,1.04,-1 +-1.55,-0.744,1 +0.196,-0.779,-1 +1.16,0.654,-1 +0.35,1.22,-1 +0.487,1.45,-1 +0.811,-1.65,1 +-1.53,-0.665,1 +-1.39,-0.118,1 +-2.13,0.678,-1 +-1.8,-1.62,1 +-0.182,-1.54,-1 +0.569,0.481,1 +-0.463,-1.01,-1 +0.311,1.68,-1 +0.923,-1.02,1 +0.388,1.72,-1 +1.52,0.356,-1 +-1.03,1.31,-1 +0.637,1.55,1 +-0.122,-1.14,-1 +0.258,-0.85,-1 +-0.581,1.36,-1 +-1.78,-0.582,1 +1.09,0.6,-1 +1.17,0.426,-1 +-0.419,1.4,-1 +0.932,0.501,-1 +-0.132,0.954,-1 +-2.04,-0.584,1 +0.218,1.32,-1 +0.747,-1.05,1 +1.32,-0.311,1 +-0.72,-1.39,-1 +1.44,-0.0447,-1 +2.03,1.32,1 +-0.654,-1.02,-1 +-1.66,0.835,-1 +-0.515,0.976,-1 +-0.851,0.405,1 +-0.231,-0.75,-1 +0.271,-1.28,-1 +-0.335,-0.81,-1 +0.981,0.26,1 +-1.25,-0.576,1 +-0.849,-0.187,1 +-0.483,1.16,-1 +0.472,0.0979,1 +0.247,-0.9,-1 +-0.651,-0.159,1 +-0.278,-1.32,-1 +1.43,0.57,-1 +-0.473,0.085,1 +0.251,0.171,1 +-1.77,0.457,-1 +1.03,0.252,1 +-0.552,-1.18,-1 +0.91,0.665,-1 +-0.431,1.21,-1 +1.41,0.258,-1 +-0.579,0.79,-1 +-0.383,-1.04,-1 +-0.833,-1.23,-1 +-2.13,-0.347,1 +-0.975,0.725,-1 +0.139,-0.762,-1 +-0.847,-0.213,1 +-0.106,-1.14,-1 +1.22,-0.106,-1 +-0.555,-1.02,-1 +-0.851,0.681,-1 +-0.401,-1.17,-1 +-0.11,-0.372,1 +-0.425,0.996,1 +1.22,-0.607,1 +0.707,0.948,-1 +0.101,1.93,1 +-0.316,0.177,1 +-1.06,0.665,-1 +-0.896,0.207,1 +-0.381,1.41,-1 +0.915,-1.27,1 +1.24,0.31,-1 +2.44,1.37,1 +-0.0819,1.64,-1 +-1.48,0.74,-1 +0.4,1.68,1 +1.25,1.2,1 +-0.957,1,-1 +-0.153,-1.15,-1 +-1.98,0.345,-1 +-0.675,1.18,-1 +-1.39,-0.474,1 +-1.07,0.256,1 +-0.357,-0.944,-1 +-0.35,-1.49,-1 +-0.747,-0.72,-1 +0.84,-0.176,1 +-1.5,-1.22,1 +-0.294,-1.54,-1 +1.22,-1.58,1 +2.25,1.21,1 +0.75,1.92,1 +-0.472,-0.0198,1 +0.833,-0.593,1 +0.423,1.74,1 +-1.49,0.738,-1 +-0.855,-1.33,-1 +0.325,-0.0325,1 +0.0526,1.42,-1 +1.72,-0.793,-1 +-1.43,-0.546,1 +0.427,1.99,-1 +-1.48,-1.23,1 +-1.19,-0.638,1 +-1.07,0.00513,1 +1.98,0.417,-1 +-0.264,0.0571,1 +-0.0143,1.55,-1 +0.408,0.0179,1 +-0.952,4.32E-04,1 +0.667,1.92,1 +-0.945,-0.639,1 +-0.387,0.104,1 +-0.633,-0.698,1 +-0.583,-1.35,-1 +-0.692,0.759,1 +-0.935,0.321,1 +0.544,1.22,-1 +0.0745,1.78,-1 +0.465,0.283,1 +-0.987,-0.814,1 +1.24,0.79,-1 +-0.427,-0.485,1 +0.708,0.961,-1 +-0.502,1.84,1 +1.32,-0.331,1 +0.454,1.48,1 +0.321,0.299,1 +-1.92,-1.5,1 +-1.33,0.719,-1 +-0.803,1.25,-1 +-0.265,1.28,-1 +-1.59,0.913,-1 +-1.23,-0.703,1 +-1.46,1.2,-1 +1.44,0.358,-1 +1.07,0.441,-1 +-0.139,-1.26,-1 +-0.632,1.35,-1 +-0.949,1,-1 +2.34,1.16,1 +-0.0753,0.267,1 +-0.179,-0.0492,1 +1.8,1.32,1 +-0.142,0.231,1 +1.19,-0.278,-1 +-0.924,-0.102,1 +1.35,0.472,-1 +0.151,0.322,1 +-0.616,0.413,1 +-0.779,-1.04,-1 +0.347,0.414,1 +0.745,0.366,-1 +-0.0187,-1.55,-1 +0.168,-1.06,-1 +-2.6,0.217,-1 +-0.228,-1.06,-1 +-1.06,-0.881,-1 +0.325,0.213,1 +0.926,-1.17,1 +0.479,1.48,-1 +0.0109,-0.378,1 +-0.445,-1.24,-1 +0.89,0.864,-1 +-0.115,-0.719,-1 +1.05,-0.96,1 +1.21,0.227,-1 +0.105,-0.208,1 +0.514,1.52,-1 +-0.644,-0.78,-1 +0.63,-1.23,-1 +0.558,-1.06,1 +-1.57,0.679,-1 +0.962,0.851,-1 +-0.588,-1.35,-1 +1.08,0.378,-1 +1.96,1.31,1 +-0.0685,0.931,-1 +-0.0253,0.0742,1 +0.552,-0.244,1 +0.95,-0.0297,1 +-0.298,0.158,1 +-0.279,-1.25,-1 +1.05,-0.23,-1 +1.27,-0.573,1 +-0.292,-0.224,1 +-0.174,-0.478,1 +1.72,-0.471,-1 +-0.862,1.01,-1 +-0.383,-1.26,-1 +-0.953,1.06,-1 +-1.24,1.14,-1 +-1.11,1.1,-1 +-1.58,0.272,-1 +-1.66,0.75,-1 +-0.364,1.7,1 +-1.4,-0.107,1 +0.171,2.11,1 +-1.75,0.318,-1 +-0.99,0.231,1 +1.2,-0.778,1 +0.57,1.42,-1 +0.802,1.21,-1 +1.25,-0.202,-1 +0.941,1.16,-1 +-0.163,-0.759,-1 +1.36,0.86,-1 +1.25,0.555,-1 +1.18,0.204,-1 +-0.459,-1.11,-1 +1.66,0.239,-1 +-1.05,0.966,-1 +-0.334,1.63,-1 +-0.593,-1.58,-1 +-0.974,1.05,-1 +-1.96,0.445,-1 +0.361,0.931,-1 +1.35,-0.461,-1 +0.0727,-1.46,-1 +-0.956,-1.61,-1 +2.34,1.64,1 +-1.45,-0.167,1 +-1.83,-0.746,1 +-0.971,-0.906,1 +-0.813,-1.23,-1 +0.482,-0.653,1 +-0.14,-0.694,1 +0.56,-1.02,-1 +-0.693,1.28,-1 +0.358,-0.887,1 +1.8,1.36,1 +-0.666,-1.1,-1 +-0.239,-1.42,-1 +1.14,-0.286,1 +-0.315,-1.3,-1 +-0.153,1.97,1 +0.589,0.714,-1 +2.47,1.12,1 +0.605,1.28,-1 +-1.21,1.19,-1 +-0.704,1.46,-1 +-0.724,-0.973,-1 +-1.67,0.672,-1 +-0.0939,1.68,-1 +0.097,-0.593,1 +-1.8,-1.71,1 +-1.49,0.415,-1 +-0.722,1.19,-1 +0.407,-0.94,-1 +0.463,-1.28,-1 +0.185,0.261,1 +1.97,1.41,1 +-0.246,-0.739,-1 +-0.889,-0.417,1 +1.19,0.554,-1 +-1.8,0.676,-1 +0.291,1.34,-1 +-0.267,-0.896,-1 +-0.59,-0.328,1 +0.0885,-1.22,-1 +-1.42,0.624,-1 +0.66,0.835,-1 +2.43,1.31,1 +0.217,-0.961,-1 +-0.877,0.263,1 +-0.899,0.0621,1 +-0.141,-0.797,-1 +-0.784,1.81,-1 +0.104,-1.04,-1 +-0.973,-0.126,1 +-0.278,-1.09,-1 +-0.914,-0.522,1 +0.238,1.74,-1 +-1.84,0.0932,-1 +1.64,1.53,1 +-0.19,-1.42,-1 +-0.508,-1.7,-1 +0.187,-1.02,-1 +0.16,0.189,1 +-1.71,-0.319,1 +0.793,0.836,-1 +0.383,-0.824,1 +-0.0697,1.56,-1 +-1.64,-1.05,1 +-0.739,-0.066,1 +1.22,-1.46,1 +1.48,1.01,1 +-0.876,0.232,1 +1.75,0.016,-1 +-0.048,-1.06,-1 +0.316,-0.601,1 +-1.87,-1,1 +-0.769,-1.46,-1 +-1.53,0.656,-1 +1.29,0.337,-1 +0.403,0.0297,1 +-1.31,-1.11,1 +1.31,-1.81,1 +0.617,-0.585,1 +0.713,0.948,-1 +0.994,0.0734,-1 +-0.345,0.376,1 +-0.84,-1.16,-1 +0.515,2.21,-1 +1.37,-0.8,1 +-0.197,0.544,1 +0.913,-0.374,1 +1.18,-0.277,-1 +0.834,-0.177,1 +-0.874,0.719,1 +1.09,-0.384,1 +-0.972,-1.08,1 +-0.85,0.963,-1 +1.21,0.306,-1 +0.933,0.716,-1 +1.66,0.243,-1 +1.75,1.33,1 +1.58,0.206,-1 +0.415,0.986,-1 +0.0128,1.29,-1 +-1.45,-0.0461,1 +0.928,-0.102,1 +0.834,0.925,-1 +0.824,-0.673,1 +-0.0275,-0.582,1 +1.19,-1.67,1 +1.34,0.32,-1 +-0.0839,1,1 +2.42,1.5,1 +0.39,1.27,-1 +-0.341,0.0621,1 +0.265,-1.06,-1 +-1.78,-0.572,1 +0.2,-0.609,1 +-1.51,0.348,1 +0.0311,-0.978,-1 +0.586,0.402,-1 +-0.0264,-0.952,-1 +-1.19,1.23,-1 +-1.19,-1.32,-1 +-0.534,1.15,-1 +0.418,0.616,1 +0.0194,-1.13,-1 +0.83,-0.939,1 +-1.37,-0.213,1 +-0.652,-1.54,-1 +-0.453,-0.0737,1 +-0.835,0.671,-1 +-3.09,-0.832,-1 +-0.598,-1.38,-1 +-0.0215,0.704,1 +-0.00124,-0.938,1 +-0.145,0.471,1 +0.633,0.901,-1 +-0.77,0.637,-1 +-0.756,-0.943,-1 +-1.02,0.968,-1 +-0.637,0.942,1 +-0.0254,-0.79,1 +-0.494,-1.22,-1 +1.09,-0.643,1 +0.366,0.164,1 +0.595,0.677,-1 +-0.132,1.68,1 +1.32,0.376,-1 +1.09,0.614,-1 +-0.26,1.36,-1 +-0.467,-0.945,-1 +-1.6,-0.0414,1 +1.23,-2.2,1 +1.93,1.41,1 +1.87,1.13,1 +0.785,-0.601,1 +1.8,0.757,-1 +-0.369,-1.14,-1 +-0.376,1.33,-1 +-1.43,0.245,-1 +-0.96,1.34,-1 +1.22,0.423,-1 +0.0951,-1.39,-1 +0.439,-1.09,-1 +0.886,-0.292,1 +1.22,0.834,-1 +1.12,-1.72,1 +-1.3,1.16,-1 +0.675,0.667,-1 +0.456,0.278,1 +-0.815,1.18,-1 +1.72,-0.259,-1 +-0.497,-1.29,-1 +-1.17,0.564,1 +0.0786,1.63,-1 +-0.64,-0.939,-1 +0.264,-0.605,1 +-0.222,0.156,1 +7.25E-04,-0.0366,1 +1.1,1.38,1 +-0.865,-1.1,-1 +1.32,-1.48,1 +-1.51,0.481,-1 +0.518,1.7,-1 +-0.845,-0.00258,1 +0.00273,-0.973,-1 +0.635,0.559,1 +1.27,-0.685,-1 +1.03,0.28,-1 +-0.964,0.108,1 +-0.132,-1.37,-1 +-0.186,-0.0925,1 +0.0585,-0.378,1 +-0.0876,0.255,1 +-0.56,-1.12,-1 +1.47,-2.05,1 +0.113,0.462,1 +-0.0189,-1.28,-1 +0.378,0.987,-1 +-1.32,0.516,1 +-1.07,-0.306,1 +1.49,-0.886,-1 +-0.138,-1.37,-1 +-1.7,-1.16,1 +0.598,0.133,1 +1.74,0.577,-1 +1.71,0.569,-1 +-0.973,0.107,1 +-1.48,-0.297,1 +1.19,0.812,-1 +-0.607,-0.894,-1 +-1.02,-1.29,-1 +-1.31,-0.627,1 +-1.02,0.521,1 +0.752,1.58,-1 +-0.0666,0.0656,1 +0.615,0.375,-1 +-0.142,1.65,-1 +0.335,1.95,-1 +1.22,0.247,-1 +-0.245,-1.18,-1 +0.409,-1.51,-1 +-1.3,-1.08,1 +0.505,1.45,-1 +-1.19,-0.976,-1 +0.388,0.484,1 +1.06,-0.453,1 +-0.911,-0.908,-1 +1.11,0.428,-1 +0.903,-0.339,1 +-0.815,0.334,1 +1.35,-0.387,1 +0.185,0.617,-1 +-1.96,-0.942,1 +-1.08,-0.952,-1 +-1.67,-0.266,1 +-0.856,-0.235,1 +-0.918,1.44,-1 +1.24,1.45,1 +1.41,-0.447,-1 +0.177,0.731,1 +0.0886,-0.844,-1 +0.604,0.555,-1 +-0.394,-0.253,1 +0.319,1.61,1 +-0.808,-1.5,-1 +-0.44,0.298,1 +-0.634,1.08,-1 +0.0116,-1.09,-1 +-0.651,-0.103,1 +0.0605,2.06,1 +-1.04,0.937,-1 +2.62,1.12,1 +-0.499,-0.241,1 +-1.46,-0.108,1 +1.01,0.677,-1 +0.627,0.234,-1 +-0.436,1.45,-1 +0.0165,-0.639,1 +1.42,-0.656,-1 +-1.23,-1.3,1 +1.35,-0.177,-1 +1.77,1.59,1 +-1.79,-0.0728,1 +-0.26,-1.23,-1 +-0.259,1.52,-1 +1.27,-0.747,-1 +-2.07,-0.521,1 +0.477,1.83,-1 +-0.195,1.31,-1 +1.22,1.04,-1 +-1.44,0.0919,1 +-0.784,-0.00647,1 +-1.55,0.121,1 +-0.236,1.44,-1 +1.24,-0.687,1 +-2.84,-0.177,-1 +-0.767,-1.3,-1 +1.14,0.376,-1 +1.05,-0.723,1 +0.814,0.817,-1 +-1.26,-0.119,1 +1.41,1.63,1 +0.933,-1.24,1 +-1.14,0.0337,1 +0.85,0.726,-1 +0.624,0.621,-1 +1.59,-0.0963,-1 +-1.8,-1.25,1 +0.00356,1.87,-1 +0.178,-1.24,-1 +1.18,0.334,-1 +0.91,0.864,-1 +0.145,-0.415,1 +1.26,-0.553,1 +-0.187,-0.267,1 +0.769,0.855,-1 +-1.31,-1.16,1 +1.08,0.148,1 +-1.25,0.857,-1 +-0.0656,1.24,-1 +-1.82,-0.705,1 +1.53,-1.81,1 +-0.177,-1.12,-1 +-1.03,-0.357,1 +-1.68,0.398,-1 +0.389,-0.804,1 +0.272,-1.12,-1 +0.846,1.87,-1 +-0.0686,1.72,-1 +-0.102,-1.31,-1 +0.168,1.97,1 +0.538,-1.25,-1 +0.677,0.108,1 +1.23,-0.146,1 +0.73,-0.99,1 +1.7,-0.0336,-1 +-0.968,0.348,1 +2.51,0.901,1 +0.643,-0.403,1 +-0.877,-1.08,-1 +-0.975,-0.273,1 +0.321,0.393,1 +-1.24,-0.0104,1 +-0.509,-0.824,-1 +0.0817,0.418,1 +1.95,0.902,1 +1.68,-0.32,-1 +-0.67,-0.55,1 +-0.33,0.301,1 +-0.736,0.683,1 +0.235,-1.06,-1 +-0.59,-1.23,-1 +1.08,0.72,-1 +0.527,-1.13,-1 +0.71,1.7,1 +0.0243,-0.719,-1 +0.207,0.65,1 +1.57,0.483,-1 +1.02,-0.351,1 +-0.21,0.171,1 +0.88,-0.241,-1 +-1.08,-0.868,1 +-1.52,0.246,1 +-0.834,-1.19,-1 +1.29,0.23,-1 +1.02,0.976,-1 +-0.241,0.774,1 +-0.451,1.7,-1 +-0.506,-1.2,-1 +1.35,0.657,-1 +-1.93,-0.315,1 +1.21,-0.774,1 +0.82,1.83,-1 +-2.05,-1.93,1 +1.3,0.739,-1 +0.832,1.33,-1 +-0.959,0.41,1 +0.432,0.749,-1 +2.12,0.978,1 +1.54,-0.193,-1 +-0.0454,1.75,-1 +-1.86,-1.3,1 +0.464,-0.792,1 +-0.539,-1.39,-1 +1.21,-0.873,1 +1.56,-0.177,-1 +0.292,-0.159,1 +0.445,-1.38,-1 +2.68,1.17,1 +-0.463,0.0231,1 +-0.951,1.39,-1 +0.572,-0.812,1 +-0.114,-0.147,1 +0.477,-1.24,-1 +-0.254,-1.27,-1 +-0.449,1.04,-1 +-0.311,-1.07,-1 +0.566,-0.977,1 +1.47,0.486,-1 +1.37,1.45,1 +-0.543,7.62E-05,1 +-1.4,0.261,1 +-1.53,-0.466,1 +0.26,-0.54,1 +1.96,1.28,1 +-0.707,-1.43,-1 +-2.1,-0.604,1 +0.721,0.731,-1 +-0.234,-0.974,-1 +1.26,0.462,-1 +-0.527,0.887,-1 +1.03,0.805,-1 +0.276,-0.218,1 +-1.07,-0.039,1 +1.35,-1.51,1 +0.507,1.02,-1 +0.306,-1.13,-1 +-1.21,-0.392,1 +-0.793,0.905,1 +0.204,-1.29,-1 +1.39,-0.631,1 +-0.218,-1.63,-1 +-1.34,0.626,-1 +-0.35,2.07,-1 +0.021,-0.905,-1 +-0.614,-1.22,-1 +0.753,0.781,-1 +1.23,0.0999,1 +0.445,0.0484,1 +-0.959,1.07,-1 +-0.204,-0.181,1 +-2.18,-1.88,1 +-0.474,0.264,1 +0.904,1.15,-1 +-0.876,0.92,-1 +1.1,-0.313,1 +1.26,-1.03,1 +0.961,-1.33,1 +-0.224,-1.02,-1 +-0.558,-1.73,-1 +-0.18,-0.639,-1 +0.326,0.00626,1 +0.279,0.149,1 +-1.43,0.0281,1 +0.449,-0.0489,1 +0.465,0.842,-1 +-0.49,1.25,-1 +0.368,-0.856,-1 +-0.0242,-1.28,-1 +0.42,2.1,-1 +-1.09,-0.463,1 +-0.391,-0.547,1 +0.703,-0.583,1 +-1.3,0.469,-1 +-1.44,-0.505,1 +1.01,0.865,-1 +1.55,-0.649,-1 +0.447,-1.18,-1 +-0.0433,0.605,1 +-1.56,-0.329,1 +-1.37,0.808,-1 +-0.448,-0.354,1 +1.53,0.368,-1 +-0.672,1.35,-1 +0.883,0.598,-1 +0.491,0.194,1 +-0.11,-1.21,-1 +-0.436,1.86,-1 +0.924,0.701,-1 +1.24,0.908,-1 +-0.058,-0.904,-1 +0.195,-1.27,-1 +2,1.28,1 +0.562,0.172,1 +0.278,0.383,1 +-0.711,0.465,1 +-0.605,-1.52,-1 +1.39,0.155,-1 +0.38,-1.35,-1 +-1.07,-0.709,1 +-0.958,0.812,-1 +-0.296,0.538,1 +0.952,0.836,-1 +0.917,0.231,-1 +-1.72,0.142,1 +-1.52,-0.368,1 +-1.53,0.257,-1 +1.38,-0.645,1 +0.102,-1.36,-1 +1.45,-1.25,1 +-1.12,0.589,1 +0.613,-0.0651,1 +2.44,-0.448,-1 +1.33,-1.71,1 +0.289,1.01,-1 +-1.05,0.219,1 +-0.405,-1.09,-1 +0.819,0.546,-1 +1.58,-0.468,-1 +1.07,1.53,-1 +0.434,0.295,1 +-0.385,-0.734,1 +0.85,-0.829,1 +-0.901,-1.68,-1 +-0.0677,1.74,-1 +0.244,1.84,1 +-1.84,-0.958,1 +1.21,0.00827,-1 +-1.13,-1.06,-1 +0.0114,1.66,-1 +1.89,-0.633,-1 +0.367,-0.451,1 +0.344,0.908,-1 +-0.416,0.246,1 +-0.716,-1.19,-1 +-1.07,-0.552,1 +-1.94,0.832,-1 +0.21,2.18,-1 +-0.932,-1.15,-1 +0.23,-1.15,-1 +1.55,1.1,1 +0.497,1.27,-1 +-0.599,1.44,-1 +0.928,1.45,1 +1.38,1.25,1 +-0.866,-0.197,1 +-0.817,-0.783,-1 +0.221,0.147,1 +-0.314,-0.919,-1 +0.802,0.852,-1 +1.25,-1.38,1 +0.991,-1.1,1 +-0.927,-0.595,1 +0.517,1.71,-1 +0.952,-0.782,1 +-0.215,-0.217,1 +-0.207,-1.37,-1 +-0.223,1.65,1 +-0.882,-0.726,-1 +-0.747,-0.509,1 +1.27,-0.519,-1 +0.009,0.928,1 +-1.17,0.586,-1 +0.521,0.56,1 +-1.01,0.768,-1 +0.605,0.378,1 +-0.582,-0.587,1 +0.125,-1.24,-1 +-0.471,-0.919,-1 +1.4,0.541,-1 +1.49,-1.53,1 +0.459,1.69,1 +0.183,-1.16,-1 +-0.104,0.48,1 +-0.0541,-1.31,-1 +0.728,0.752,-1 +-1.13,0.654,-1 +-0.265,1.71,-1 +-1.96,-0.442,1 +0.675,0.886,-1 +-1.76,-1.33,1 +-1.92,-1.17,1 +-1.42,-1.4,-1 +-0.39,-1.01,-1 +-0.634,1.21,-1 +-0.218,-1.05,-1 +0.332,-0.527,1 +0.171,-0.502,1 +-2.13,-1.85,1 +0.0273,-0.683,-1 +-0.0897,-0.115,1 +0.603,1.8,1 +-0.0232,-1.24,-1 +-1.48,0.944,-1 +-0.0127,-1.12,-1 +-0.587,1.01,-1 +0.163,-1.13,-1 +0.93,0.65,-1 +1.5,-0.0608,-1 +-0.0911,-0.0844,1 +1.64,-0.308,-1 +0.404,-1.1,-1 +1.37,1.24,1 +-1.22,-0.107,1 +-0.642,-1.35,-1 +-0.181,-1.1,-1 +-0.218,-0.882,-1 +-0.683,0.139,1 +-0.305,1.18,-1 +-1.19,0.814,-1 +-0.758,0.794,-1 +0.811,1.07,-1 +1.45,-1.03,1 +-0.169,-0.601,1 +0.462,1.89,-1 +-1.89,-1.16,1 +-0.0199,-1.32,-1 +-0.396,-1.45,-1 +-1.08,0.89,-1 +0.624,-0.285,1 +-0.177,1.51,-1 +-0.437,-0.789,-1 +1.22,1.48,1 +-0.961,0.755,-1 +-1.64,0.572,-1 +1.34,0.274,-1 +1.16,-0.254,-1 +-0.38,-1.34,-1 +0.918,1.72,1 +-1.14,-0.927,1 +-0.516,1.46,-1 +0.188,1.59,1 +-1.51,-0.27,1 +1.21,0.151,-1 +1.05,-1.11,1 +0.385,-1.07,1 +-0.139,-0.363,1 +0.953,-1.3,1 +0.725,1.33,-1 +0.0966,-1.06,-1 +-0.347,-1.15,-1 +-0.0605,1.65,-1 +-1.88,-1.26,1 +0.484,-0.427,1 +-0.618,-0.506,1 +-0.121,-1.05,-1 +-0.169,-0.135,1 +-1.28,-0.542,1 +1.62,-0.131,-1 +-0.446,-1.53,-1 +1.16,-0.357,1 +0.298,-0.878,-1 +-1.46,1.11,-1 +-0.764,0.218,1 +0.901,-0.952,1 +0.955,0.142,1 +-1.12,-1.17,-1 +-0.0845,1.15,-1 +-0.844,0.371,1 +-1.43,0.0911,1 +-0.978,-1.05,1 +-0.899,-1.38,-1 +0.942,1.83,1 +1.35,0.677,-1 +-0.678,1.6,-1 +-0.145,1.65,-1 +1.35,-0.518,-1 +-1.54,-0.598,1 +1.12,1.14,-1 +-0.715,-1.49,-1 +-1,-0.885,-1 +-0.357,1.51,-1 +-0.859,1.26,-1 +0.8,1.56,-1 +-1.53,0.795,-1 +0.707,-0.778,1 +-1.62,0.213,-1 +-0.836,-1.36,-1 +-1.84,-0.284,1 +-0.685,0.727,1 +0.875,-1.47,1 +-0.079,0.227,1 +0.731,1.74,1 +-1.15,-0.659,1 +-1.6,0.788,-1 +-0.114,0.846,1 +-1.14,1.08,-1 +0.143,0.845,-1 +0.678,0.961,-1 +-0.397,0.563,1 +-0.872,0.663,-1 +-0.262,1.71,1 +-1.2,-1.4,-1 +1.08,0.088,-1 +0.523,-1.38,-1 +1.23,-0.0874,-1 +1.13,-1.75,1 +-0.466,-0.851,-1 +-0.336,0.182,1 +1.04,0.0909,1 +0.553,-0.172,1 +0.134,-1.06,-1 +-0.882,-0.608,1 +1.24,0.441,-1 +-0.714,-1.65,-1 +0.547,1.15,-1 +-1.47,0.96,-1 +0.437,2.35,-1 +-0.539,0.292,1 +0.543,0.913,-1 +-1.34,-1.05,1 +-0.326,0.0769,1 +0.843,0.634,-1 +0.452,-0.204,1 +0.974,0.877,-1 +1.97,0.041,-1 +0.554,-0.454,1 +-1.29,-0.264,1 +-1.18,-0.898,1 +1.08,1.37,1 +-1.04,-1.07,-1 +-0.325,0.969,1 +0.72,0.677,-1 +0.803,0.349,-1 +-1.58,-0.798,1 +1.3,-0.346,-1 +0.332,-0.69,1 +-0.0128,-1.27,-1 +-1.83,0.379,-1 +-1.21,1,-1 +-0.76,-1.26,-1 +-0.665,1.61,-1 +1.12,-0.447,1 +-0.23,2,1 +-0.176,-0.207,1 +-1.75,-1.92,1 +-0.599,1.25,-1 +0.101,0.881,1 +-0.298,-1.16,-1 +1.01,0.257,-1 +-0.479,0.91,-1 +-0.428,-0.509,1 +-1.74,0.367,-1 +0.637,1.25,-1 +0.00879,1.47,-1 +-0.302,-0.116,1 +0.157,0.956,-1 +-0.617,1.61,-1 +0.934,0.32,-1 +-0.226,-1.45,-1 +-0.667,-0.895,-1 +-0.0337,-1.36,-1 +-0.0411,0.32,1 +-1.78,-0.607,1 +-0.495,1.31,-1 +-0.823,0.729,-1 +0.916,-0.0255,1 +1.7,0.0023,-1 +-0.563,1.29,-1 +-0.995,-0.507,1 +-1.39,-0.943,1 +-1.75,-1.63,1 +0.742,0.744,-1 +-0.435,-1.53,-1 +2.27,0.284,-1 +0.175,-0.887,-1 +1.17,1.37,-1 +-1.75,-1.27,1 +-1.33,0.878,-1 +-0.238,1.74,1 +0.99,1.02,-1 +-1.9,0.856,-1 +-1.64,-1.16,1 +-0.415,0.463,1 +0.435,0.397,1 +-0.306,2.28,-1 +0.569,1.15,-1 +1.04,-1.17,1 +-1.28,-1.12,1 +-1.07,1.3,-1 +-0.211,-0.637,-1 +-0.104,-1.15,-1 +-0.0169,-0.939,-1 +-1.4,0.247,1 +-0.897,-0.972,-1 +-1.68,-0.472,1 +0.655,1.72,1 +-0.58,-0.579,1 +0.688,-0.878,1 +1.55,-0.00546,-1 +-2.25,0.167,-1 +0.252,0.258,1 +1.54,-0.749,-1 +-1.5,0.0847,1 +-0.851,-1.26,-1 +-0.403,-0.636,1 +-0.138,-1.03,-1 +1.94,1.25,1 +-0.651,1.12,-1 +-0.562,-1.12,-1 +1.66,-0.402,-1 +-1.31,0.239,1 +-0.0113,-0.78,-1 +0.0821,-0.398,1 +-0.639,0.68,1 +-1.49,-0.221,1 +-0.006,-0.32,1 +0.782,-0.139,1 +-1.33,0.897,-1 +-1.19,-0.774,1 +0.127,1.94,-1 +-1.29,-0.23,1 +0.76,-1.16,1 +-0.992,-0.0156,1 +-0.396,1.21,-1 +-0.665,-1.04,-1 +1.24,-1.33,1 +-0.22,-1.12,-1 +-1.54,0.562,-1 +-0.159,-0.12,1 +1.03,-0.805,1 +-0.625,1.38,-1 +-1.79,-0.124,1 +-0.0656,-1.57,-1 +1.12,-0.908,1 +-1.28,-0.259,1 +-0.309,1.16,-1 +-0.625,-0.675,-1 +-0.752,-0.117,1 +-1.1,0.322,1 +1.09,1.49,1 +-0.867,0.214,1 +0.596,-0.172,1 +-0.704,0.71,-1 +0.672,1.57,-1 +1.38,0.698,-1 +-1.34,0.54,-1 +0.429,1.94,1 +0.381,1.61,1 +0.129,-0.889,-1 +-1.01,-0.0908,1 +-0.651,-0.528,1 +0.0439,2.03,1 +-2.37,-0.01,-1 +-0.553,-0.687,1 +-1.47,-0.817,1 +1.36,-0.412,-1 +1.92,0.0429,-1 +-0.673,-0.177,1 +1.32,0.218,-1 +0.221,0.946,1 +1.05,0.899,-1 +0.942,0.248,-1 +-1.7,-1.79,1 +0.359,1.52,-1 +-0.977,0.639,1 +-1.02,-1.19,-1 +-0.496,1.41,-1 +1.45,0.273,-1 +1.16,-1.1,1 +-0.878,-1.46,-1 +0.983,0.203,-1 +0.167,-0.793,1 +-1.67,1.1,-1 +-1.28,-0.471,1 +1.1,0.74,-1 +0.435,1.31,1 +-1.41,-0.357,1 +-0.00145,-0.7,1 +1.5,-0.993,-1 +-0.248,-1.69,-1 +-0.819,-1.21,-1 +1.78,0.272,-1 +-1.04,1.13,-1 +-0.536,-0.814,-1 +-0.663,-1.47,-1 +0.355,-0.222,1 +0.486,0.991,-1 +-1.33,-0.12,1 +0.975,-1.34,1 +0.431,1.64,1 +-0.516,-0.92,1 +0.0479,-1.06,-1 +-1.04,-1.08,-1 +-0.767,-1.2,-1 +0.613,1.35,-1 +-0.834,-0.258,1 +0.473,-1.11,1 +-0.734,1.25,-1 +2.2,1.14,1 +-0.443,1.02,-1 +-0.568,-0.873,-1 +-0.452,-0.717,1 +0.14,1.25,-1 +1.22,0.325,-1 +2.18,1.13,1 +1.52,0.315,-1 +-0.276,-1.19,-1 +-0.29,0.7,1 +-1.22,0.177,1 +0.799,-0.99,1 +-0.0163,-1.07,-1 +1.51,-0.355,-1 +0.431,-1.46,-1 +-0.264,-1.14,-1 +1.14,0.607,-1 +-0.508,-0.694,1 +1.53,0.228,-1 +2.07,1.37,1 +1.78,1.01,1 +-0.0175,-1.21,-1 +-0.295,-1.27,-1 +-0.743,0.685,1 +0.405,-1.2,-1 +0.351,-0.964,-1 +0.192,1.94,1 +-0.867,1.09,-1 +1.21,0.752,-1 +0.545,1.28,-1 +0.985,0.114,-1 +-0.544,-0.831,-1 +1.12,0.293,-1 +-1.44,-0.47,1 +-0.077,1.87,1 +-0.956,1.11,-1 +1.35,-0.962,1 +0.234,-0.413,1 +-0.861,-1.21,-1 +0.599,1.14,-1 +-1.38,-0.826,1 +-1.37,0.928,-1 +0.395,1.21,-1 +1.01,-0.437,1 +1.1,-1.17,1 +0.999,-1.12,1 +-0.381,-0.917,-1 +0.817,-0.941,1 +0.184,0.558,1 +-0.334,-1.34,-1 +0.974,-1.32,1 +0.655,1.14,-1 +-0.209,-0.356,1 +-0.511,1.53,-1 +-0.891,-0.617,1 +0.0545,-1.32,-1 +0.545,-0.165,1 +1.29,0.488,-1 +0.364,-0.269,1 +-1.11,0.308,1 +-0.421,-1.09,-1 +0.849,0.907,-1 +-1.17,1.08,-1 +0.629,-0.617,1 +0.424,0.0533,1 +0.179,1.87,-1 +1.36,-0.117,-1 +1.02,0.729,-1 +0.252,-0.389,1 +-0.549,1.03,-1 +1.27,0.775,-1 +-0.0192,-1.03,-1 +-1.31,0.0708,1 +-0.719,0.242,1 +-0.633,0.257,1 +-0.491,-1.55,-1 +-1.84,-0.366,1 +-0.457,-0.983,-1 +-0.565,-0.0822,1 +0.401,-1.31,-1 +-0.561,-0.787,1 +-1.74,0.697,-1 +0.745,0.877,-1 +0.617,1.34,1 +-1.77,-1.19,1 +1.88,0.325,-1 +1.09,0.155,-1 +0.0899,-1.09,-1 +0.229,0.613,1 +1.05,-0.655,1 +-1.15,-0.545,1 +-0.555,0.908,-1 +1.45,0.211,-1 +0.132,-1.49,-1 +1,0.654,-1 +-0.487,-0.797,-1 +0.643,-0.351,1 +-0.0744,-0.463,1 +-0.0476,-1.24,-1 +0.478,0.46,1 +-0.398,-1.28,-1 +0.335,1.15,-1 +-0.397,-1.1,-1 +0.625,1.38,1 +-0.648,-0.713,1 +-0.4,-1.15,-1 +0.762,-0.334,1 +0.137,1.94,1 +-0.726,-1.17,-1 +-1.13,1.01,-1 +1.34,-0.632,1 +-0.0261,-1.38,-1 +-1.21,-1.22,-1 +1.55,0.482,-1 +-0.794,1.24,-1 +0.517,1.46,-1 +-0.865,1.6,-1 +-1.57,-0.561,1 +0.329,0.606,-1 +0.497,2.23,-1 +-0.529,0.365,1 +-1.82,0.474,-1 +0.948,0.985,-1 +-0.488,-1.31,-1 +1.46,-1.06,1 +-1.67,0.88,-1 +2.4,1.2,1 +1.57,-0.909,1 +-0.144,1.68,-1 +0.123,-0.283,1 +-1.23,-0.977,-1 +-1.69,0.614,-1 +1.71,-0.945,-1 +-0.135,-1.1,-1 +-1.35,-0.57,1 +-0.0117,-1.34,-1 +1.31,0.151,-1 +0.983,0.611,-1 +-1.52,-0.00335,1 +0.976,1.24,-1 +0.0271,1.83,1 +-1.4,0.98,-1 +-0.45,-0.928,-1 +-2.09,0.604,-1 +0.455,0.877,-1 +0.193,1.72,-1 +-0.304,1.5,-1 +-1.98,-0.655,1 +-0.0237,-1,-1 +-0.184,-1.4,-1 +0.458,-0.157,1 +-1.45,-1.22,1 +-0.152,-1.11,-1 +-0.0208,1.76,1 +1.31,0.208,-1 +-1.06,-1.54,-1 +-1.24,0.1,1 +0.715,1.3,-1 +-1.77,-0.455,1 +0.75,1.63,1 +1.12,0.422,-1 +1.88,-1.13,-1 +0.894,-0.742,1 +0.756,-0.0709,1 +-0.734,0.881,-1 +0.301,-1.29,-1 +0.426,0.194,1 +-1.93,0.96,-1 +0.258,0.482,1 +0.272,-1.6,-1 +0.194,-1.27,-1 +-2.28,-1.02,1 +-1.55,-0.61,1 +-1.36,-0.324,1 +0.869,0.576,-1 +1.59,-0.111,-1 +-0.424,0.0207,1 +0.621,1.32,1 +-0.0367,-0.144,1 +0.735,0.932,-1 +1.74,0.195,-1 +-0.482,0.0903,1 +-0.201,1.35,-1 +-0.0882,1.17,-1 +0.831,1.35,-1 +0.762,0.344,-1 +0.777,0.312,-1 +0.813,1.06,-1 +-0.507,1.89,-1 +0.163,-1.08,-1 +-0.9,0.858,-1 +1.88,-0.289,-1 +0.273,1.79,-1 +1.47,-0.531,-1 +-0.814,0.23,1 +-0.422,-0.241,1 +0.996,1.46,1 +0.0432,-1.39,-1 +0.591,-0.427,1 +-0.698,-0.726,1 +-0.536,1.19,-1 +-0.53,0.101,1 +0.494,1.18,-1 +-0.305,0.194,1 +0.0878,-1.08,-1 +-0.752,0.342,1 +0.135,0.0532,1 +-0.0461,1.65,-1 +1.29,0.0437,-1 +-0.534,-1.23,-1 +-0.269,1.37,-1 +-0.803,-1.09,-1 +0.915,1.15,-1 +0.676,0.925,-1 +1.28,0.373,-1 +-0.699,-0.359,1 +-0.222,0.259,1 +-1.6,-1.44,1 +-0.665,0.357,1 +-0.615,-0.97,-1 +0.938,-0.255,1 +-0.926,0.916,-1 +-0.854,-0.379,1 +-0.611,-1.28,-1 +-1.01,0.357,1 +-1.19,1.14,-1 +-0.728,-0.76,1 +-0.768,-0.909,-1 +-0.173,1.59,-1 +0.229,1.63,-1 +0.513,0.741,-1 +-0.0773,-1.22,-1 +-0.114,-1.5,-1 +-1.3,0.651,1 +0.5,0.762,1 +-1.15,-0.562,1 +0.884,-0.766,1 +-0.83,-1.08,-1 +-1.6,-1.32,1 +0.781,0.911,-1 +-1.32,0.0399,1 +0.673,2.52,-1 +-0.855,1.22,-1 +0.0893,-1.1,-1 +-1.52,-0.993,1 +-0.355,1.28,-1 +0.273,-1.45,-1 +-1.27,-0.688,1 +0.157,-0.652,1 +-1.18,1.17,-1 +-0.736,-1.14,-1 +0.603,0.145,1 +0.722,0.368,-1 +-0.38,-1.27,-1 +1.66,-0.384,-1 +-0.118,-1.26,-1 +0.689,-0.187,1 +-0.886,-1.27,-1 +-1.47,0.52,-1 +1.07,0.668,-1 +-1.94,-0.761,1 +0.41,1.01,-1 +-0.78,-1.37,-1 +0.344,1.38,1 +0.22,-1.36,-1 +-0.621,1.11,-1 +0.707,1.37,-1 +0.196,1.76,-1 +0.44,-0.936,-1 +1.37,0.219,-1 +0.284,1.86,-1 +1.45,0.169,-1 +0.941,1.46,-1 +-0.275,-1.6,-1 +-1.5,0.282,1 +0.121,0.219,1 +-1.87,0.373,-1 +-1.51,-0.626,1 +1.41,0.319,-1 +0.919,0.798,-1 +0.533,1.76,-1 +0.845,0.00714,1 +-0.203,1.26,-1 +-0.52,1.35,-1 +-0.0696,-0.896,-1 +-0.914,0.317,1 +-0.688,-1.14,-1 +1.07,0.399,-1 +-0.411,-1.34,-1 +0.924,0.861,-1 +-0.901,-1.13,-1 +1.55,-0.0305,-1 +1.15,0.565,-1 +-0.572,1.16,-1 +0.155,1.88,-1 +1.43,-0.816,1 +-0.858,-1.2,-1 +-0.0652,0.631,1 +0.16,-0.616,1 +-1.53,0.501,-1 +-1.45,-1.14,1 +-0.113,2.13,1 +-0.241,-0.374,1 +-0.49,-1.45,-1 +-0.861,-0.972,-1 +1.14,0.212,-1 +-0.44,-1.3,-1 +-0.98,-1.49,-1 +-1.69,-0.176,1 +0.84,1.42,-1 +-0.972,0.473,1 +-0.668,0.919,-1 +0.732,0.493,-1 +0.535,1.46,1 +-1.4,-0.183,1 +1.26,0.305,-1 +-0.996,-0.984,1 +-0.534,-1.4,-1 +0.733,0.926,-1 +0.587,0.0331,1 +0.0125,-0.665,-1 +0.835,1.24,-1 +-0.942,-1.15,-1 +0.0368,-0.239,1 +1.5,1.5,1 +-0.744,-1.39,-1 +-0.472,0.267,1 +0.304,1.61,-1 +-0.0439,0.305,1 +-1.96,-1.09,1 +1.31,0.17,-1 +-1.78,-0.253,1 +-1.21,0.918,-1 +0.457,0.191,1 +0.998,0.37,-1 +0.513,1.5,-1 +-0.714,-0.161,1 +0.0722,1.8,-1 +1.33,-0.0673,-1 +0.508,-0.331,1 +0.862,-1.24,1 +0.65,1.86,-1 +1.74,-0.143,-1 +0.417,-1.54,-1 +0.466,0.281,1 +1.73,0.979,1 +-0.314,-0.72,-1 +0.0981,0.465,1 +-1.08,1.01,-1 +2.37,1.07,1 +0.143,-0.786,-1 +0.032,-1.5,-1 +0.739,2.14,-1 +0.396,1.81,1 +0.538,1.38,-1 +0.0325,-1.46,-1 +-0.391,1.32,-1 +-0.695,1.08,-1 +-1.84,-0.846,1 +-0.824,1.1,-1 +1.16,-1.24,1 +0.7,1.14,-1 +-0.17,1.18,-1 +-0.0932,0.472,1 +0.164,-1.18,-1 +0.443,1.54,-1 +-1.22,0.0836,1 +-0.188,-1.27,-1 +0.109,-0.522,1 +1.62,1.19,1 +-0.853,0.358,1 +-1.69,-1.14,1 +0.725,-0.99,1 +0.25,-1.1,-1 +-1.22,0.569,1 +0.627,0.125,-1 +-0.0699,-1.2,-1 +0.594,1.8,-1 +-0.901,-1.56,-1 +1.02,0.0975,-1 +-0.149,-0.0185,1 +-0.226,-1.09,-1 +1.11,0.685,-1 +-0.301,1.69,-1 +0.471,-0.445,1 +-0.827,1.23,-1 +-0.672,-0.0414,1 +0.606,-0.877,-1 +-0.64,0.705,1 +0.14,-0.8,-1 +0.425,-1.21,-1 +0.966,-0.199,1 +1.06,0.629,-1 +0.839,-1.15,1 +0.379,1.74,-1 +0.603,-1,-1 +-0.463,-1.2,-1 +1.18,1.42,1 +1.09,-1.12,1 +0.9,-1.36,1 +1.11,-1.1,1 +1.46,-0.111,-1 +0.242,0.347,1 +0.669,0.98,-1 +-1.2,-0.826,1 +-1.13,0.928,-1 +-0.799,1.11,-1 +-0.108,1.7,-1 +-1.18,0.692,-1 +-0.787,-0.113,1 +-0.412,-0.689,-1 +-0.452,0.524,1 +0.29,-0.515,1 +-0.518,-0.914,-1 +-1.1,0.419,1 +1.34,1.44,1 +-1.46,0.594,-1 +-0.613,1.1,-1 +1.5,0.0442,-1 +1.34,-0.0272,-1 +-0.146,-0.869,-1 +0.0726,-1.31,-1 +-0.198,-0.661,1 +-0.0776,-1.35,-1 +-1.22,-0.68,1 +1.17,0.407,-1 +1.34,0.0998,-1 +1.25,-1.25,1 +0.712,1.49,1 +-0.516,-0.915,-1 +-1.63,0.728,-1 +-0.514,-1.21,-1 +1.12,0.822,-1 +-0.076,-1.12,-1 +-0.678,-0.417,1 +-0.897,-1.04,-1 +1.12,0.375,-1 +-1.94,-1.61,1 +-0.756,-0.679,-1 +0.825,-1.28,1 +1.8,0.0607,-1 +0.108,-1.21,-1 +2.18,1.21,1 +0.634,0.366,1 +1.26,0.799,-1 +-0.702,-0.811,-1 +-0.336,-0.0523,1 +-0.729,1.5,-1 +1.3,0.749,-1 +0.283,0.394,1 +0.247,1.65,1 +0.212,1.81,-1 +1.08,-0.628,-1 +-0.852,-0.298,1 +-1.77,-0.592,1 +-0.251,0.327,1 +0.417,-1.28,-1 +-0.12,-1.22,-1 +-2.19,-1.3,1 +2.11,0.942,1 +-0.214,-0.235,1 +1.05,0.219,-1 +-0.863,0.174,1 +0.244,1.42,-1 +0.0725,0.596,1 +-1.34,0.869,-1 +-0.0874,-1.07,-1 +-0.452,-1.22,-1 +0.973,0.653,-1 +0.911,-0.832,1 +0.303,-0.94,-1 +0.282,-0.761,1 +1.7,-0.554,-1 +0.659,1.28,-1 +-0.192,0.791,1 +0.424,1.64,1 +-0.59,-1.04,-1 +2.04,1.36,1 +-1.11,-0.351,1 +-1.86,-1.78,1 +-1.12,-0.315,1 +-0.932,-1.02,-1 +1.71,-0.424,-1 +0.726,-0.626,1 +-1.46,0.657,-1 +-0.347,-1.13,-1 +-1.21,-1.16,1 +0.541,1.41,-1 +0.334,1.04,-1 +1.31,0.118,-1 +0.673,0.172,1 +1.14,0.866,-1 +-0.372,-1.11,-1 +1.29,0.644,-1 +1.26,-1.25,1 +0.408,-0.664,1 +0.339,0.881,-1 +0.00275,-0.0555,1 +-0.836,-0.65,1 +0.838,1.5,-1 +-1.4,-1.54,1 +0.786,1.31,-1 +-0.88,0.407,1 +0.987,0.258,-1 +1.86,-0.213,-1 +-1.53,0.679,-1 +-1.12,0.699,-1 +-1.19,-0.328,1 +0.829,-0.305,-1 +1.16,0.847,-1 +-0.434,-1.1,-1 +-0.177,-1.45,-1 +-0.131,1.76,1 +1.19,0.309,-1 +0.189,1.35,-1 +0.0963,-1.38,-1 +-0.989,0.48,-1 +-0.532,0.181,1 +0.704,-0.91,1 +-0.301,1.82,1 +0.0561,-0.479,1 +1.13,-1.27,1 +-0.206,-1.15,-1 +-0.413,1.17,-1 +-1.21,-0.854,1 +1.01,0.555,-1 +0.879,1.03,-1 +0.724,0.706,-1 +-2.03,-1.23,1 +0.414,0.141,1 +0.466,0.322,1 +1.44,0.276,-1 +0.97,1.15,-1 +1.1,1.58,1 +0.719,0.759,-1 +1.07,0.512,-1 +-0.753,-1.08,-1 +-0.451,1.72,-1 +-1.24,-0.706,1 +0.179,-1.04,-1 +0.214,-0.45,1 +0.364,-0.858,-1 +-1.04,-0.131,1 +0.273,1.68,-1 +1.19,0.289,-1 +-0.436,-0.954,-1 +-0.403,-0.488,1 +-0.8,1.66,-1 +0.00854,1.31,-1 +1.47,-1.44,1 +1.13,1.07,-1 +-1.1,0.737,-1 +-0.711,0.96,-1 +1.81,-0.551,-1 +-0.381,-1.08,-1 +2,1.71,1 +-1.34,0.824,-1 +-0.184,-0.751,1 +0.608,-1.18,1 +0.957,-0.572,1 +-0.243,-1.24,-1 +0.914,0.26,-1 +0.701,0.564,-1 +1.35,1.33,1 +0.387,-0.798,-1 +-1.02,-1.43,-1 +1.71,1.17,1 +-0.178,-0.551,1 +-0.434,-1.16,-1 +0.512,-0.49,1 +-0.434,2.24,-1 +-2.17,-0.523,1 +1.18,0.766,-1 +-0.00948,-0.336,1 +0.142,-0.698,1 +-1.28,0.559,-1 +-1.01,-0.363,1 +-1.74,0.975,-1 +1.34,-1.07,1 +1.28,1.35,1 +-0.274,-0.99,-1 +1.39,0.0609,-1 +-1.36,0.976,-1 +0.99,0.771,-1 +-1.82,-0.341,1 +0.423,1.74,-1 +1.42,-0.208,1 +-0.261,0.204,1 +-1.69,0.706,-1 +-1.24,-0.497,1 +-0.074,1.97,1 +1.4,-1.13,1 +0.411,1.47,-1 +-1.51,-0.285,1 +1.12,-1.22,1 +-0.652,1.14,-1 +0.608,0.0135,1 +0.0285,1.72,1 +-0.315,-1.09,-1 +0.222,1.47,1 +-0.376,-0.975,-1 +-0.356,-0.0513,1 +-0.42,1.11,-1 +-1.17,-0.073,1 +-0.816,0.562,-1 +1.02,0.44,-1 +-0.828,-0.82,-1 +-0.183,1.15,-1 +-0.768,0.209,1 +-0.101,-0.122,1 +-1.55,0.378,-1 +-0.825,0.481,1 +1.23,-1.55,1 +1.61,-0.387,-1 +-0.785,-0.361,1 +-1.53,-1.01,1 +0.327,0.17,1 +1.35,-1.23,1 +-1.04,0.621,-1 +0.424,0.471,-1 +-0.0567,-1.41,-1 +1.63,0.0545,-1 +-0.522,0.877,1 +-0.391,1.03,-1 +-1.54,-0.641,1 +0.0114,-1.46,-1 +1.16,0.151,1 +-0.17,-0.346,1 +0.809,0.942,-1 +0.602,1.06,-1 +-0.879,0.499,1 +1.18,0.507,-1 +-1.06,-0.937,-1 +2.37,1.14,1 +-1.42,0.592,1 +1.88,0.116,-1 +-1.06,0.817,-1 +0.809,0.741,-1 +1.22,0.217,-1 +0.612,1.8,1 +1.25,-0.146,1 +1.28,-0.236,-1 +-0.298,-1.26,-1 +0.93,-0.375,1 +-0.626,-1.18,-1 +-0.102,1.57,1 +-0.918,-0.578,1 +1.01,1.53,1 +0.805,0.42,-1 +-0.958,0.536,-1 +-0.051,-0.972,-1 +-0.0443,-0.335,1 +0.822,0.501,-1 +-0.337,1.66,-1 +-0.957,0.039,1 +-0.0406,2.09,-1 +-0.266,-1.2,-1 +-0.216,-0.353,1 +-1.33,-0.861,1 +-1.87,-1.32,1 +-1.1,0.817,-1 +0.568,0.244,-1 +-0.672,-0.601,-1 +0.847,1.4,-1 +-0.125,-1.48,-1 +0.504,0.0752,-1 +0.257,0.154,1 +-0.269,1.6,-1 +0.29,-0.481,1 +-2.06,0.392,-1 +-1.14,0.849,-1 +-1.54,0.516,1 +1.55,-0.466,-1 +0.209,0.755,1 +-1.66,-0.56,1 +0.299,1.56,-1 +-0.418,-1.26,-1 +-0.0554,-1.28,-1 +-1.1,0.641,1 +-0.016,-0.448,1 +0.463,0.498,-1 +-1.56,-1.1,1 +0.287,-1.45,-1 +-1.03,1.01,-1 +0.826,0.84,-1 +-1.31,-0.354,1 +-0.634,-1.15,-1 +1.26,0.144,-1 +-0.162,1.84,-1 +-0.532,0.978,-1 +0.527,-1.02,-1 +-0.208,-0.646,1 +-0.58,-1.09,-1 +-0.071,1.63,-1 +-0.622,0.804,1 +-0.25,-1.23,-1 +0.504,1.78,-1 +2.29,1.22,1 +0.625,1.67,-1 +-0.165,-0.697,-1 +-0.317,1.77,-1 +0.309,-0.689,1 +1.14,0.647,-1 +-1.37,-0.177,1 +-0.988,-0.191,1 +-0.0273,0.739,1 +0.611,0.978,-1 +-1.89,-1.7,1 +-0.936,1.81,-1 +1.25,0.794,-1 +-1.34,0.595,-1 +0.469,1.91,-1 +-1.57,0.629,-1 +-1.61,-0.451,1 +-1.81,-0.488,1 +1.08,-0.996,1 +0.445,0.135,1 +0.764,0.469,-1 +0.369,0.428,1 +-0.367,-1.04,-1 +-0.139,0.132,1 +-0.987,-0.863,1 +-1.54,0.503,-1 +0.434,-0.189,1 +-1.57,-1.47,1 +1.62,-0.0187,-1 +0.659,-0.985,1 +0.196,1.27,-1 +-1.42,-0.327,1 +1.82,1.43,1 +-0.487,-0.536,1 +0.235,1.48,1 +1.38,0.131,-1 +-1.44,-0.0746,1 +-0.163,-0.398,1 +1.25,-0.322,-1 +1.33,-0.0802,-1 +-0.18,0.595,1 +-1.29,-0.754,1 +0.409,1.45,-1 +-1.45,-1.25,1 +-1.32,0.995,-1 +1.02,-0.728,1 +1.72,0.0291,-1 +-1.78,-0.687,1 +-0.966,-0.628,1 +-1.57,0.378,-1 +1.12,-0.659,1 +0.0235,-0.18,1 +-1.17,0.156,1 +-0.105,-0.442,1 +-0.256,-0.115,1 +0.414,1.16,-1 +0.617,-0.0096,1 +-1.37,0.648,-1 +0.774,0.617,-1 +2.29,1.32,1 +0.501,0.0553,1 +0.981,0.532,-1 +0.141,0.0947,1 +0.0436,0.0126,1 +-1.84,-1.64,1 +-1.22,-0.119,1 +-1.16,0.41,-1 +1.25,-0.00263,-1 +1.03,1.29,-1 +0.773,-0.747,1 +-0.0297,-1.58,-1 +0.217,-1.21,-1 +-1.06,1.03,-1 +-1.93,-0.92,1 +0.598,0.855,-1 +-1.69,-1.27,1 +0.822,0.192,-1 +-0.539,1.41,-1 +0.00168,0.262,1 +-0.742,-1.02,-1 +-1.31,-0.903,-1 +0.878,-1.38,1 +0.465,-1.26,-1 +-1.11,-0.821,1 +2.41,1.26,1 +-1.56,-0.172,1 +-0.855,-1.05,-1 +5.19E-04,0.282,1 +-1.88,-0.979,1 +-0.895,-1.07,1 +0.235,0.772,1 +1.87,-1.16,-1 +0.735,1.63,1 +0.0737,0.124,1 +-0.337,1.85,-1 +-0.2,1.74,1 +1.2,-0.213,-1 +0.382,0.175,1 +1.03,-0.191,-1 +-0.739,1.32,-1 +0.952,0.616,-1 +-1.57,-0.205,1 +0.774,-1.22,1 +1.21,-1.24,1 +1.1,0.131,-1 +0.397,0.513,1 +1.07,-1.38,1 +-1.37,-0.205,1 +-0.33,0.0829,1 +-0.718,-1.25,-1 +-0.676,1.3,-1 +0.434,2.19,-1 +-0.882,-0.582,1 +1.16,-0.768,1 +0.572,-1.24,-1 +-0.757,0.298,1 +0.67,1.15,-1 +0.0301,-0.894,-1 +1.22,1.35,-1 +-0.685,0.989,-1 +-0.377,-0.617,1 +0.13,1.41,-1 +-1.09,1.39,-1 +1.15,-1.14,1 +0.648,1.41,-1 +0.981,0.0921,-1 +0.318,1.4,-1 +-0.501,0.0787,1 +-1.12,0.494,-1 +-1.81,-0.994,1 +-0.748,-0.034,1 +0.544,1.41,-1 +-0.944,1.02,-1 +-0.533,1.22,-1 +-0.446,-1.28,-1 +0.498,-1.02,-1 +-0.954,-1.19,-1 +1.65,0.0404,-1 +-2.03,0.359,-1 +-0.235,-1.22,-1 +0.627,-0.0629,1 +-1.93,0.525,-1 +2.35,1.14,1 +0.233,-0.275,1 +-0.781,-0.113,1 +-0.327,0.612,1 +-1.41,1.09,-1 +0.0489,-1.36,-1 +0.253,-1.3,-1 +-1.25,-0.656,1 +-1.59,0.322,-1 +1.99,1.18,1 +2.2,1.26,1 +-0.172,-1.31,-1 +0.276,0.397,1 +0.949,0.422,-1 +1.06,0.842,-1 +1.98,1.27,1 +-0.279,-0.0928,1 +0.275,1.18,-1 +-0.318,0.317,1 +-1.32,-0.088,1 +-0.695,-0.736,-1 +0.916,1.63,-1 +0.974,0.512,-1 +-0.567,-1.7,-1 +1.38,0.749,-1 +0.521,-1.04,-1 +1.13,0.429,-1 +-0.511,1.5,-1 +1.46,-0.91,1 +1.67,0.0877,-1 +-0.965,-0.978,-1 +-2,-1.72,1 +-1.43,-1.02,1 +0.739,0.139,1 +0.369,0.49,1 +-1.64,-1.8,1 +-1.31,-0.498,1 +0.44,0.9,-1 +-1.59,-1.36,1 +-0.724,0.215,1 +1.33,1.43,1 +-0.91,-0.607,1 +1.19,0.997,-1 +-0.914,0.425,1 +1.11,0.375,-1 +-1.46,-0.298,1 +-0.328,0.232,1 +0.941,-0.789,1 +-1.82,0.885,-1 +0.0949,-1.56,-1 +0.874,-0.769,1 +-0.967,0.592,-1 +0.803,1.35,-1 +-0.521,0.726,1 +-0.76,-0.698,1 +-2.25,-1.96,1 +-0.749,0.0101,1 +-0.00626,-0.959,-1 +1.01,-0.984,1 +0.198,-1.19,-1 +0.544,1.63,1 +-0.213,0.259,1 +-0.729,1.37,-1 +0.112,0.0285,1 +-0.968,-0.912,-1 +-0.566,-1.33,-1 +-0.271,1.59,-1 +1.56,-0.367,1 +0.897,1.07,-1 +1.39,-0.498,-1 +0.0968,-0.225,1 +0.374,0.966,-1 +-0.791,-1.26,-1 +0.615,0.558,-1 +-0.391,0.181,1 +-0.153,-1.2,-1 +0.298,0.509,1 +0.557,-1.04,1 +-0.109,-1.07,-1 +0.412,-0.124,1 +-1.55,-1.47,1 +1.9,1.27,1 +0.838,-0.115,1 +0.243,1.61,-1 +0.48,-1.1,-1 +0.854,-0.537,1 +-0.00294,0.513,1 +1.34,0.878,-1 +0.00865,-0.918,-1 +1.82,-0.643,-1 +-0.212,-1.18,-1 +0.823,-1.21,-1 +-0.835,-1.06,-1 +-0.536,1.05,-1 +-1.53,-1.21,1 +0.352,0.333,1 +-1.09,0.531,-1 +0.709,0.872,-1 +1.54,1.75,1 +-1.87,-0.555,1 +-0.282,1.78,1 +0.428,-0.494,1 +0.923,0.933,-1 +-1.21,-0.409,1 +-1.31,0.175,1 +-0.753,-0.321,1 +0.614,-1.46,-1 +-0.598,-1.35,-1 +-1.15,-0.0498,1 +1.6,-1.36,1 +-0.902,-0.36,1 +-0.66,-0.0708,1 +2.57,0.989,1 +-1.13,-0.0634,1 +-1.01,-0.397,1 +1.43,-1.21,1 +1,0.254,-1 +-0.103,-0.414,1 +-0.969,1.02,-1 +1.09,1.03,-1 +1.08,0.0163,-1 +1.44,-1.7,1 +-2.02,-0.792,1 +1.17,0.511,-1 +0.0403,-1.51,-1 +0.931,0.783,-1 +0.824,0.587,1 +-0.0957,-1.06,-1 +1.05,0.472,-1 +-0.268,-1.52,-1 +1.35,0.229,-1 +-1.4,0.475,-1 +-1.34,-0.419,1 +-0.523,-1.31,-1 +1.18,1.49,-1 +0.566,0.0815,1 +0.34,-1.34,-1 +0.578,-0.622,1 +0.753,1.89,1 +-0.184,-0.0314,1 +0.56,1.7,1 +1.07,0.457,-1 +-0.613,0.775,1 +0.819,0.0462,1 +1.11,-0.0628,-1 +-1.61,-1.13,1 +1.06,-0.0531,1 +0.764,-0.804,1 +0.226,-1.38,-1 +-1.19,1.01,-1 +1.25,-0.118,-1 +-1.76,-1.17,1 +-0.234,-0.656,1 +-1.45,-1.29,1 +0.234,-0.822,1 +-1.84,-0.865,1 +-1.63,-1.32,1 +-0.715,-0.969,-1 +0.0973,-1.33,-1 +-0.551,0.288,1 +1.39,-0.013,-1 +0.215,-1.66,-1 +0.0224,1.43,1 +-0.338,-1.33,-1 +-0.693,-0.113,1 +0.0533,-1.02,-1 +-0.153,-1.22,-1 +-1.18,0.0302,1 +1.15,0.329,-1 +1.31,-1.4,1 +-0.285,1.98,-1 +1.06,-0.0553,1 +-0.904,-0.203,1 +0.661,-0.199,1 +0.896,-1.27,1 +-1.54,-0.618,1 +1.29,-0.657,1 +-1.67,-1.5,1 +-1.36,-0.848,1 +1.98,-0.0252,-1 +0.485,0.635,-1 +-0.674,0.842,-1 +0.722,-0.975,1 +1.02,-0.625,1 +1.26,-0.181,1 +-0.658,-1.31,-1 +-0.405,-1.25,-1 +1.27,0.493,-1 +0.933,-0.277,1 +-0.815,-0.682,1 +-1.51,-0.769,1 +0.117,-1.07,-1 +0.384,2.15,-1 +-1.32,1.1,-1 +1.2,-1.65,1 +0.0529,1.52,-1 +1.46,0.0373,-1 +-0.607,-1.6,-1 +-1.06,-0.549,1 +-1.39,0.862,-1 +0.527,0.51,1 +1.15,-0.451,1 +-0.171,0.0105,1 +-1.63,0.524,-1 +2.2,-0.665,-1 +0.826,1.42,-1 +-0.138,-0.0524,1 +-0.754,1.19,-1 +0.0574,-0.134,1 +0.877,0.161,-1 +1.17,0.446,-1 +0.647,0.317,-1 +-1.73,-0.242,1 +0.631,-0.674,1 +-0.973,-0.047,1 +-0.4,0.00463,1 +-1.14,-1.27,1 +0.42,1.1,-1 +0.43,-0.654,1 +1.49,0.365,-1 +-0.643,1.68,-1 +0.698,-0.655,1 +-0.222,2.47,-1 +-1.22,-0.0961,1 +-0.276,1.05,-1 +-0.253,-0.406,1 +-1.38,0.678,-1 +-1.13,-0.443,1 +1.3,0.175,-1 +1.64,1.46,1 +1.48,1.78,1 +-1.61,-0.363,1 +-1.52,-1.23,1 +-0.0657,-1.19,-1 +-0.653,0.433,1 +2.48,1.21,1 +-0.959,1.08,-1 +0.842,-0.372,1 +-0.402,-1.11,-1 +-0.291,0.561,1 +-1.02,-0.224,1 +0.45,-0.104,1 +-1.88,-0.454,1 +-0.582,-0.752,1 +-0.594,1.24,-1 +-1.49,-0.00325,1 +0.552,1.04,-1 +1.06,0.443,-1 +0.406,-0.707,1 +1.15,0.0436,1 +0.612,1.37,-1 +1.1,0.508,-1 +1.1,0.604,-1 +-0.753,1.18,-1 +-1.13,0.411,1 +-0.872,0.26,1 +-0.415,0.256,1 +0.801,-0.639,1 +0.762,0.125,1 +0.0343,-1.18,-1 +0.524,-0.231,1 +-0.0262,-1.18,-1 +-1.47,-0.0358,1 +1.29,0.152,-1 +-0.851,0.189,1 +-0.961,1,-1 +-0.776,0.127,1 +-0.674,-0.62,1 +0.244,-0.69,1 +-0.974,0.511,1 +0.37,1.03,-1 +-1.98,-0.88,1 +0.308,0.117,1 +-1.62,0.468,-1 +-0.843,-1.08,-1 +1,1.17,-1 +-0.954,0.687,1 +0.768,-0.966,1 +1.64,1.12,1 +-1.7,-0.554,1 +0.404,0.00999,1 +0.704,-0.906,1 +0.335,1.39,1 +-1.7,-0.569,1 +2.64,1.14,1 +0.769,0.772,-1 +-0.255,-0.142,1 diff --git a/newt/data/coal.txt b/newt/data/coal.txt new file mode 100644 index 0000000..053b28c --- /dev/null +++ b/newt/data/coal.txt @@ -0,0 +1,191 @@ + 1.8512026e+03 + 1.8516324e+03 + 1.8519692e+03 + 1.8519747e+03 + 1.8523142e+03 + 1.8523470e+03 + 1.8523580e+03 + 1.8523854e+03 + 1.8529767e+03 + 1.8531958e+03 + 1.8532286e+03 + 1.8533190e+03 + 1.8534997e+03 + 1.8541348e+03 + 1.8563963e+03 + 1.8565058e+03 + 1.8565387e+03 + 1.8566181e+03 + 1.8571383e+03 + 1.8574038e+03 + 1.8575818e+03 + 1.8580910e+03 + 1.8581540e+03 + 1.8584059e+03 + 1.8589452e+03 + 1.8601253e+03 + 1.8601691e+03 + 1.8605907e+03 + 1.8608508e+03 + 1.8609192e+03 + 1.8609713e+03 + 1.8611848e+03 + 1.8617379e+03 + 1.8618364e+03 + 1.8621376e+03 + 1.8628932e+03 + 1.8629370e+03 + 1.8631780e+03 + 1.8637940e+03 + 1.8639391e+03 + 1.8639856e+03 + 1.8654586e+03 + 1.8659706e+03 + 1.8660637e+03 + 1.8663402e+03 + 1.8664524e+03 + 1.8668330e+03 + 1.8669480e+03 + 1.8669507e+03 + 1.8676352e+03 + 1.8678542e+03 + 1.8678624e+03 + 1.8687495e+03 + 1.8689028e+03 + 1.8689877e+03 + 1.8692505e+03 + 1.8694422e+03 + 1.8695544e+03 + 1.8698090e+03 + 1.8698747e+03 + 1.8701239e+03 + 1.8705154e+03 + 1.8705592e+03 + 1.8706331e+03 + 1.8710274e+03 + 1.8711506e+03 + 1.8711670e+03 + 1.8717365e+03 + 1.8718159e+03 + 1.8721225e+03 + 1.8722402e+03 + 1.8727687e+03 + 1.8731355e+03 + 1.8742854e+03 + 1.8745455e+03 + 1.8748877e+03 + 1.8749808e+03 + 1.8753285e+03 + 1.8759254e+03 + 1.8759309e+03 + 1.8759309e+03 + 1.8769658e+03 + 1.8770643e+03 + 1.8771054e+03 + 1.8771903e+03 + 1.8777789e+03 + 1.8778090e+03 + 1.8781841e+03 + 1.8781951e+03 + 1.8782361e+03 + 1.8784333e+03 + 1.8786961e+03 + 1.8790356e+03 + 1.8791725e+03 + 1.8795010e+03 + 1.8800568e+03 + 1.8805387e+03 + 1.8806893e+03 + 1.8809439e+03 + 1.8811054e+03 + 1.8819678e+03 + 1.8821294e+03 + 1.8822964e+03 + 1.8822991e+03 + 1.8823347e+03 + 1.8828522e+03 + 1.8837967e+03 + 1.8838515e+03 + 1.8840732e+03 + 1.8848563e+03 + 1.8851684e+03 + 1.8854641e+03 + 1.8859788e+03 + 1.8866167e+03 + 1.8866934e+03 + 1.8867536e+03 + 1.8869206e+03 + 1.8871342e+03 + 1.8874052e+03 + 1.8882977e+03 + 1.8890507e+03 + 1.8891985e+03 + 1.8897926e+03 + 1.8901020e+03 + 1.8901896e+03 + 1.8912519e+03 + 1.8916653e+03 + 1.8926537e+03 + 1.8935079e+03 + 1.8944771e+03 + 1.8953176e+03 + 1.8960705e+03 + 1.8962841e+03 + 1.8963306e+03 + 1.8996297e+03 + 1.9013929e+03 + 1.9026715e+03 + 1.9050561e+03 + 1.9051875e+03 + 1.9055243e+03 + 1.9067728e+03 + 1.9081362e+03 + 1.9082704e+03 + 1.9086290e+03 + 1.9091273e+03 + 1.9098255e+03 + 1.9103566e+03 + 1.9109699e+03 + 1.9125195e+03 + 1.9137844e+03 + 1.9144086e+03 + 1.9166153e+03 + 1.9180308e+03 + 1.9225291e+03 + 1.9226769e+03 + 1.9235695e+03 + 1.9271615e+03 + 1.9281143e+03 + 1.9301540e+03 + 1.9307481e+03 + 1.9310767e+03 + 1.9318296e+03 + 1.9318843e+03 + 1.9320650e+03 + 1.9328645e+03 + 1.9328754e+03 + 1.9338830e+03 + 1.9347235e+03 + 1.9356434e+03 + 1.9356954e+03 + 1.9365962e+03 + 1.9374997e+03 + 1.9383539e+03 + 1.9398214e+03 + 1.9402183e+03 + 1.9404237e+03 + 1.9414203e+03 + 1.9415216e+03 + 1.9415736e+03 + 1.9420007e+03 + 1.9421294e+03 + 1.9424825e+03 + 1.9469452e+03 + 1.9470246e+03 + 1.9476188e+03 + 1.9476379e+03 + 1.9476872e+03 + 1.9514052e+03 + 1.9578830e+03 + 1.9604894e+03 + 1.9622197e+03 diff --git a/newt/data/mcycle.csv b/newt/data/mcycle.csv new file mode 100644 index 0000000..b61aa73 --- /dev/null +++ b/newt/data/mcycle.csv @@ -0,0 +1,133 @@ +1,2.4,0 +2,2.6,-1.3 +3,3.2,-2.7 +4,3.6,0 +5,4,-2.7 +6,6.2,-2.7 +7,6.6,-2.7 +8,6.8,-1.3 +9,7.8,-2.7 +10,8.2,-2.7 +11,8.8,-1.3 +12,8.8,-2.7 +13,9.6,-2.7 +14,10,-2.7 +15,10.2,-5.4 +16,10.6,-2.7 +17,11,-5.4 +18,11.4,0 +19,13.2,-2.7 +20,13.6,-2.7 +21,13.8,0 +22,14.6,-13.3 +23,14.6,-5.4 +24,14.6,-5.4 +25,14.6,-9.3 +26,14.6,-16 +27,14.6,-22.8 +28,14.8,-2.7 +29,15.4,-22.8 +30,15.4,-32.1 +31,15.4,-53.5 +32,15.4,-54.9 +33,15.6,-40.2 +34,15.6,-21.5 +35,15.8,-21.5 +36,15.8,-50.8 +37,16,-42.9 +38,16,-26.8 +39,16.2,-21.5 +40,16.2,-50.8 +41,16.2,-61.7 +42,16.4,-5.4 +43,16.4,-80.4 +44,16.6,-59 +45,16.8,-71 +46,16.8,-91.1 +47,16.8,-77.7 +48,17.6,-37.5 +49,17.6,-85.6 +50,17.6,-123.1 +51,17.6,-101.9 +52,17.8,-99.1 +53,17.8,-104.4 +54,18.6,-112.5 +55,18.6,-50.8 +56,19.2,-123.1 +57,19.4,-85.6 +58,19.4,-72.3 +59,19.6,-127.2 +60,20.2,-123.1 +61,20.4,-117.9 +62,21.2,-134 +63,21.4,-101.9 +64,21.8,-108.4 +65,22,-123.1 +66,23.2,-123.1 +67,23.4,-128.5 +68,24,-112.5 +69,24.2,-95.1 +70,24.2,-81.8 +71,24.6,-53.5 +72,25,-64.4 +73,25,-57.6 +74,25.4,-72.3 +75,25.4,-44.3 +76,25.6,-26.8 +77,26,-5.4 +78,26.2,-107.1 +79,26.2,-21.5 +80,26.4,-65.6 +81,27,-16 +82,27.2,-45.6 +83,27.2,-24.2 +84,27.2,9.5 +85,27.6,4 +86,28.2,12 +87,28.4,-21.5 +88,28.4,37.5 +89,28.6,46.9 +90,29.4,-17.4 +91,30.2,36.2 +92,31,75 +93,31.2,8.1 +94,32,54.9 +95,32,48.2 +96,32.8,46.9 +97,33.4,16 +98,33.8,45.6 +99,34.4,1.3 +100,34.8,75 +101,35.2,-16 +102,35.2,-54.9 +103,35.4,69.6 +104,35.6,34.8 +105,35.6,32.1 +106,36.2,-37.5 +107,36.2,22.8 +108,38,46.9 +109,38,10.7 +110,39.2,5.4 +111,39.4,-1.3 +112,40,-21.5 +113,40.4,-13.3 +114,41.6,30.8 +115,41.6,-10.7 +116,42.4,29.4 +117,42.8,0 +118,42.8,-10.7 +119,43,14.7 +120,44,-1.3 +121,44.4,0 +122,45,10.7 +123,46.6,10.7 +124,47.8,-26.8 +125,47.8,-14.7 +126,48.8,-13.3 +127,50.6,0 +128,52,10.7 +129,53.2,-14.7 +130,55,-2.7 +131,55,10.7 +132,55.4,-2.7 +133,57.6,10.7 diff --git a/newt/experiments/aircraft/aircraft.py b/newt/experiments/aircraft/aircraft.py new file mode 100644 index 0000000..146a790 --- /dev/null +++ b/newt/experiments/aircraft/aircraft.py @@ -0,0 +1,147 @@ +import sys +import newt +import objax +import numpy as np +import pandas as pd +import matplotlib.pyplot as plt +import time +from datetime import date +import pickle + +plot_final = False +plot_intermediate = False + +print('loading data ...') +aircraft_accidents = pd.read_csv('aircraft_accidents.txt', sep='-', header=None).values + +num_data = aircraft_accidents.shape[0] +xx = np.zeros([num_data, 1]) +for j in range(num_data): + xx[j] = date.toordinal(date(aircraft_accidents[j, 0], aircraft_accidents[j, 1], aircraft_accidents[j, 2])) + 366 + +BIN_WIDTH = 1 +# Discretize the data +x_min = np.floor(np.min(xx)) +x_max = np.ceil(np.max(xx)) +x_max_int = x_max-np.mod(x_max-x_min, BIN_WIDTH) +x = np.linspace(x_min, x_max_int, num=int((x_max_int-x_min)/BIN_WIDTH+1)) +x = np.concatenate([np.min(x)-np.linspace(61, 1, num=61), x]) # pad with zeros to reduce strange edge effects +y, _ = np.histogram(xx, np.concatenate([[-1e10], x[1:]-np.diff(x)/2, [1e10]])) +N = y.shape[0] +print('N =', N) + +scale = 1 # scale inputs for stability +x = scale * x + +np.random.seed(123) +ind_shuffled = np.random.permutation(N) +ind_split = np.stack(np.split(ind_shuffled, 10)) # 10 random batches of data indices + +np.random.seed(123) +# meanval = np.log(len(disaster_timings)/num_time_bins) # TODO: incorporate mean + +if len(sys.argv) > 1: + method = int(sys.argv[1]) + fold = int(sys.argv[2]) +else: + method = 2 + fold = 0 + +print('method number', method) +print('batch number', fold) + +# Get training and test indices +ind_test = ind_split[fold] # np.sort(ind_shuffled[:N//10]) +ind_train = np.concatenate(ind_split[np.arange(10) != fold]) +x_train = x[ind_train] # 90/10 train/test split +x_test = x[ind_test] +y_train = y[ind_train] +y_test = y[ind_test] +# N_batch = 2000 +M = 4000 +# z = np.linspace(701050, 737050, M) +z = np.linspace(x[0], x[-1], M) + +if len(sys.argv) > 3: + baseline = int(sys.argv[3]) +else: + baseline = 0 + +kern_1 = newt.kernels.Matern52(variance=2., lengthscale=scale*5.5e4) +kern_2 = newt.kernels.QuasiPeriodicMatern12(variance=1., + lengthscale_periodic=scale*2., + period=scale*365., + lengthscale_matern=scale*1.5e4) +kern_3 = newt.kernels.QuasiPeriodicMatern12(variance=1., + lengthscale_periodic=scale*2., + period=scale*7., + lengthscale_matern=scale*30*365.) + +kern = newt.kernels.Sum([kern_1, kern_2, kern_3]) +lik = newt.likelihoods.Poisson(binsize=scale*BIN_WIDTH) + +if method == 0: + inf = newt.inference.Taylor() +elif method == 1: + inf = newt.inference.PosteriorLinearisation() +elif method == 2: + inf = newt.inference.ExpectationPropagation(power=1) +elif method == 3: + inf = newt.inference.ExpectationPropagation(power=0.5) +elif method == 4: + inf = newt.inference.ExpectationPropagation(power=0.01) +elif method == 5: + inf = newt.inference.VariationalInference() + +if baseline: + model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train) +else: + model = newt.models.SparseMarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train, Z=z) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.1 +lr_newton = 0.1 +iters = 200 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +# calculate posterior predictive distribution via filtering and smoothing at train & test locations: +print('calculating the posterior predictive distribution ...') +t0 = time.time() +nlpd = model.negative_log_predictive_density(X=x_test, Y=y_test) +t1 = time.time() +print('NLPD: %1.2f' % nlpd) + +if baseline: + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) +else: + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) + +# with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: +# nlpd_show = pickle.load(fp) +# print(nlpd_show) + +# plt.figure(1) +# plt.plot(t_test, mu, 'b-') +# plt.plot(z, inducing_mean[..., 0], 'b.', label='inducing mean', markersize=8) +# plt.show() diff --git a/newt/experiments/aircraft/aircraft.slrm b/newt/experiments/aircraft/aircraft.slrm new file mode 100644 index 0000000..4a4a79f --- /dev/null +++ b/newt/experiments/aircraft/aircraft.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 48:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=3000 +#SBATCH --array=0-5 +#SBATCH -o aircraft-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python aircraft.py $SLURM_ARRAY_TASK_ID $run +done \ No newline at end of file diff --git a/newt/experiments/aircraft/aircraft_accidents.txt b/newt/experiments/aircraft/aircraft_accidents.txt new file mode 100644 index 0000000..e69de25 --- /dev/null +++ b/newt/experiments/aircraft/aircraft_accidents.txt @@ -0,0 +1,1210 @@ +1919-07-21 +1919-08-02 +1920-12-14 +1922-04-07 +1923-05-14 +1923-08-27 +1923-09-14 +1924-12-24 +1926-08-18 +1926-10-02 +1927-08-22 +1928-07-13 +1929-06-17 +1929-11-06 +1930-02-10 +1930-10-05 +1931-03-21 +1931-03-31 +1933-03-28 +1933-10-10 +1933-12-30 +1934-05-09 +1934-07-27 +1934-10-02 +1935-05-06 +1935-10-07 +1935-12-10 +1936-01-14 +1936-04-07 +1936-06-16 +1936-08-05 +1936-12-09 +1936-12-27 +1937-01-12 +1937-02-19 +1937-05-06 +1937-11-16 +1938-01-10 +1938-01-11 +1938-03-01 +1938-07-28 +1938-10-25 +1938-11-04 +1939-01-13 +1939-01-21 +1939-08-13 +1940-06-14 +1940-08-31 +1940-11-08 +1941-02-26 +1941-10-30 +1942-01-16 +1942-01-30 +1942-03-03 +1942-10-23 +1943-01-21 +1943-06-01 +1943-07-28 +1943-10-15 +1944-02-10 +1944-06-20 +1945-01-08 +1945-01-31 +1945-07-12 +1945-10-05 +1945-11-03 +1946-01-06 +1946-03-10 +1946-07-11 +1946-08-07 +1946-10-03 +1946-11-14 +1946-12-28 +1947-01-11 +1947-01-12 +1947-01-25 +1947-01-26 +1947-02-15 +1947-05-29 +1947-05-30 +1947-06-13 +1947-06-19 +1947-08-02 +1947-08-28 +1947-10-24 +1947-10-26 +1947-12-27 +1948-01-30 +1948-03-12 +1948-04-05 +1948-04-15 +1948-04-21 +1948-06-17 +1948-07-04 +1948-07-17 +1948-08-01 +1948-08-29 +1948-09-02 +1948-10-02 +1948-10-20 +1948-12-28 +1949-01-17 +1949-02-19 +1949-03-10 +1949-05-04 +1949-06-07 +1949-07-02 +1949-07-12 +1949-08-19 +1949-09-09 +1949-10-28 +1949-11-01 +1949-11-20 +1949-11-29 +1950-01-05 +1950-03-07 +1950-03-12 +1950-06-12 +1950-06-14 +1950-06-24 +1950-06-26 +1950-08-31 +1950-10-31 +1950-11-03 +1950-11-13 +1951-04-25 +1951-06-22 +1951-06-30 +1951-07-21 +1951-08-24 +1951-11-15 +1951-12-16 +1951-12-22 +1951-12-29 +1952-01-22 +1952-02-11 +1952-03-03 +1952-04-11 +1952-04-28 +1952-06-28 +1952-08-12 +1952-12-06 +1953-01-05 +1953-02-02 +1953-07-12 +1953-08-03 +1953-09-01 +1953-09-16 +1953-10-29 +1954-01-10 +1954-03-13 +1954-04-08 +1954-07-23 +1954-09-05 +1954-12-25 +1955-01-12 +1955-02-19 +1955-03-26 +1955-04-04 +1955-04-11 +1955-07-27 +1955-10-06 +1955-11-01 +1956-02-18 +1956-04-01 +1956-04-02 +1956-06-20 +1956-06-24 +1956-06-30 +1956-07-09 +1956-10-16 +1956-11-07 +1956-11-27 +1956-12-09 +1957-02-01 +1957-03-14 +1957-05-01 +1957-07-16 +1957-08-11 +1957-11-08 +1957-11-15 +1958-02-06 +1958-02-27 +1958-04-06 +1958-04-21 +1958-05-25 +1958-08-09 +1958-08-14 +1958-08-15 +1958-08-15 +1958-09-02 +1958-10-22 +1958-12-04 +1958-12-24 +1959-01-08 +1959-01-11 +1959-02-03 +1959-02-17 +1959-04-23 +1959-05-12 +1959-06-26 +1959-08-19 +1959-09-24 +1959-09-29 +1959-10-30 +1959-11-16 +1959-11-21 +1960-01-06 +1960-01-18 +1960-01-19 +1960-01-21 +1960-02-25 +1960-03-17 +1960-06-10 +1960-07-15 +1960-09-26 +1960-10-04 +1960-10-29 +1960-12-16 +1961-01-03 +1961-02-15 +1961-03-28 +1961-04-03 +1961-05-10 +1961-05-30 +1961-06-12 +1961-07-11 +1961-07-19 +1961-08-09 +1961-09-01 +1961-09-11 +1961-09-12 +1961-09-17 +1961-09-18 +1961-09-23 +1961-10-07 +1961-11-08 +1961-11-23 +1961-11-30 +1962-02-25 +1962-03-01 +1962-03-04 +1962-03-08 +1962-03-16 +1962-05-06 +1962-05-22 +1962-06-03 +1962-06-22 +1962-06-30 +1962-07-07 +1962-07-19 +1962-07-22 +1962-07-28 +1962-09-03 +1962-11-23 +1962-11-27 +1962-11-30 +1962-12-19 +1963-02-02 +1963-02-12 +1963-04-04 +1963-07-02 +1963-07-03 +1963-07-27 +1963-08-21 +1963-09-04 +1963-11-08 +1963-11-29 +1963-12-08 +1964-02-25 +1964-02-29 +1964-05-07 +1964-06-20 +1964-07-09 +1964-09-02 +1964-11-15 +1964-11-20 +1964-11-23 +1964-12-24 +1965-01-04 +1965-02-06 +1965-02-08 +1965-04-14 +1965-05-05 +1965-05-20 +1965-07-01 +1965-07-08 +1965-07-10 +1965-08-16 +1965-09-17 +1965-11-08 +1965-11-11 +1965-11-11 +1965-12-04 +1966-01-15 +1966-01-24 +1966-01-28 +1966-02-02 +1966-02-04 +1966-02-17 +1966-03-04 +1966-03-05 +1966-03-18 +1966-04-22 +1966-04-27 +1966-08-06 +1966-09-01 +1966-09-22 +1966-10-01 +1966-11-13 +1966-11-15 +1966-11-24 +1967-02-16 +1967-03-05 +1967-03-05 +1967-03-09 +1967-03-13 +1967-04-20 +1967-06-04 +1967-06-23 +1967-06-30 +1967-07-19 +1967-09-05 +1967-10-12 +1967-11-04 +1967-11-06 +1967-11-16 +1967-11-20 +1968-03-06 +1968-03-24 +1968-04-08 +1968-04-20 +1968-05-03 +1968-05-22 +1968-07-01 +1968-07-03 +1968-07-23 +1968-08-14 +1968-09-11 +1968-10-25 +1968-11-22 +1968-12-02 +1968-12-12 +1968-12-24 +1968-12-27 +1968-12-31 +1969-01-05 +1969-01-06 +1969-01-13 +1969-01-18 +1969-02-18 +1969-02-24 +1969-03-05 +1969-03-16 +1969-03-20 +1969-04-02 +1969-04-28 +1969-06-04 +1969-06-23 +1969-08-29 +1969-09-09 +1969-09-12 +1969-11-19 +1969-11-20 +1969-12-03 +1969-12-08 +1970-01-05 +1970-02-04 +1970-02-15 +1970-02-21 +1970-03-31 +1970-04-21 +1970-05-02 +1970-07-03 +1970-07-05 +1970-08-09 +1970-08-12 +1970-09-06 +1970-10-02 +1970-11-14 +1971-01-22 +1971-01-31 +1971-05-23 +1971-06-06 +1971-06-07 +1971-07-03 +1971-07-25 +1971-07-30 +1971-07-30 +1971-09-04 +1971-09-06 +1971-10-02 +1971-11-10 +1971-11-24 +1971-12-24 +1972-01-07 +1972-01-26 +1972-02-22 +1972-03-03 +1972-03-14 +1972-03-19 +1972-05-05 +1972-05-18 +1972-06-12 +1972-06-14 +1972-06-15 +1972-06-18 +1972-06-24 +1972-06-29 +1972-07-02 +1972-08-14 +1972-08-16 +1972-08-31 +1972-09-24 +1972-10-01 +1972-10-13 +1972-10-13 +1972-10-29 +1972-11-15 +1972-12-03 +1972-12-08 +1972-12-08 +1972-12-20 +1972-12-23 +1972-12-29 +1973-01-22 +1973-01-29 +1973-02-19 +1973-02-21 +1973-02-24 +1973-03-03 +1973-03-05 +1973-04-10 +1973-05-18 +1973-05-31 +1973-06-20 +1973-07-11 +1973-07-22 +1973-07-23 +1973-07-23 +1973-07-31 +1973-08-13 +1973-09-11 +1973-09-27 +1973-09-30 +1973-10-13 +1973-11-03 +1973-12-17 +1974-01-26 +1974-01-30 +1974-03-03 +1974-03-15 +1974-04-18 +1974-04-22 +1974-09-08 +1974-09-11 +1974-09-15 +1974-11-20 +1974-12-01 +1974-12-04 +1974-12-22 +1975-01-09 +1975-01-30 +1975-03-31 +1975-06-24 +1975-08-03 +1975-08-20 +1975-08-30 +1975-09-01 +1975-09-24 +1975-09-30 +1975-10-30 +1976-01-01 +1976-01-03 +1976-01-15 +1976-02-09 +1976-03-06 +1976-04-27 +1976-05-15 +1976-06-01 +1976-06-04 +1976-06-06 +1976-06-27 +1976-07-28 +1976-08-15 +1976-09-09 +1976-09-10 +1976-09-19 +1976-10-06 +1976-10-12 +1976-11-23 +1976-11-28 +1976-12-25 +1977-01-13 +1977-01-15 +1977-02-15 +1977-03-27 +1977-04-04 +1977-04-27 +1977-05-14 +1977-05-27 +1977-09-27 +1977-09-28 +1977-10-13 +1977-10-20 +1977-11-19 +1977-12-02 +1977-12-04 +1977-12-13 +1977-12-17 +1977-12-18 +1978-01-01 +1978-02-11 +1978-03-01 +1978-03-03 +1978-03-16 +1978-04-20 +1978-05-08 +1978-06-26 +1978-06-26 +1978-08-30 +1978-09-03 +1978-09-25 +1978-11-15 +1978-12-23 +1978-12-28 +1979-01-30 +1979-02-12 +1979-02-17 +1979-03-13 +1979-03-17 +1979-03-29 +1979-04-04 +1979-05-25 +1979-05-30 +1979-06-17 +1979-07-11 +1979-07-31 +1979-08-11 +1979-08-29 +1979-10-08 +1979-10-31 +1979-11-15 +1979-11-26 +1979-11-28 +1979-12-23 +1980-01-21 +1980-02-21 +1980-03-14 +1980-04-12 +1980-04-25 +1980-04-27 +1980-06-20 +1980-06-27 +1980-07-08 +1980-08-19 +1980-09-12 +1980-11-03 +1980-11-19 +1980-11-21 +1980-12-22 +1981-03-28 +1981-05-02 +1981-05-07 +1981-06-14 +1981-06-26 +1981-07-20 +1981-07-27 +1981-08-13 +1981-08-22 +1981-08-24 +1981-08-26 +1981-10-06 +1981-11-17 +1981-12-01 +1982-01-13 +1982-01-23 +1982-02-09 +1982-03-11 +1982-03-20 +1982-03-26 +1982-04-26 +1982-06-08 +1982-06-12 +1982-06-21 +1982-06-24 +1982-06-28 +1982-07-06 +1982-07-09 +1982-08-11 +1982-09-01 +1982-09-13 +1982-09-29 +1982-12-09 +1982-12-24 +1983-01-11 +1983-01-16 +1983-03-11 +1983-05-05 +1983-06-02 +1983-06-08 +1983-07-11 +1983-07-16 +1983-07-23 +1983-08-30 +1983-09-01 +1983-09-14 +1983-09-23 +1983-10-11 +1983-11-08 +1983-11-18 +1983-11-27 +1983-12-07 +1983-12-18 +1983-12-20 +1983-12-24 +1984-01-10 +1984-02-28 +1984-03-22 +1984-08-05 +1984-08-30 +1984-09-18 +1984-10-11 +1984-12-06 +1984-12-23 +1985-01-01 +1985-01-21 +1985-02-01 +1985-02-19 +1985-02-19 +1985-05-03 +1985-06-14 +1985-06-21 +1985-06-23 +1985-07-10 +1985-08-02 +1985-08-12 +1985-08-22 +1985-08-25 +1985-09-04 +1985-09-06 +1985-09-23 +1985-11-23 +1985-12-12 +1985-12-19 +1986-01-18 +1986-03-31 +1986-04-02 +1986-05-03 +1986-05-03 +1986-06-18 +1986-07-02 +1986-08-03 +1986-08-31 +1986-09-05 +1986-10-20 +1986-11-06 +1986-12-12 +1986-12-25 +1987-01-03 +1987-01-15 +1987-01-16 +1987-03-04 +1987-04-04 +1987-05-08 +1987-05-09 +1987-06-19 +1987-06-27 +1987-07-30 +1987-08-16 +1987-08-31 +1987-10-15 +1987-11-15 +1987-11-28 +1987-11-29 +1987-12-07 +1987-12-13 +1988-01-02 +1988-01-18 +1988-01-19 +1988-02-08 +1988-02-19 +1988-03-04 +1988-03-17 +1988-04-05 +1988-04-28 +1988-05-06 +1988-05-24 +1988-06-12 +1988-06-26 +1988-07-03 +1988-07-13 +1988-08-31 +1988-08-31 +1988-09-09 +1988-09-15 +1988-10-17 +1988-10-19 +1988-11-02 +1988-12-21 +1989-01-08 +1989-02-08 +1989-02-19 +1989-02-24 +1989-03-10 +1989-06-07 +1989-07-19 +1989-07-27 +1989-08-03 +1989-08-25 +1989-09-03 +1989-09-03 +1989-09-08 +1989-09-19 +1989-09-20 +1989-10-21 +1989-10-26 +1989-11-27 +1989-12-15 +1989-12-26 +1990-01-04 +1990-01-16 +1990-01-25 +1990-02-14 +1990-04-09 +1990-04-12 +1990-05-11 +1990-06-10 +1990-10-02 +1990-11-14 +1990-12-03 +1991-02-01 +1991-03-03 +1991-03-05 +1991-03-26 +1991-04-05 +1991-05-26 +1991-07-10 +1991-07-11 +1991-08-16 +1991-09-11 +1991-12-27 +1991-12-29 +1992-01-03 +1992-01-20 +1992-03-22 +1992-06-06 +1992-06-07 +1992-06-08 +1992-07-24 +1992-07-30 +1992-07-31 +1992-07-31 +1992-08-27 +1992-09-28 +1992-10-04 +1992-10-18 +1992-11-14 +1992-11-24 +1992-12-21 +1992-12-22 +1993-01-06 +1993-02-08 +1993-02-11 +1993-03-05 +1993-04-06 +1993-04-18 +1993-04-26 +1993-05-19 +1993-07-01 +1993-07-23 +1993-07-26 +1993-08-28 +1993-09-14 +1993-09-21 +1993-09-22 +1993-10-26 +1993-10-27 +1993-11-04 +1993-11-13 +1993-11-20 +1993-12-01 +1994-01-03 +1994-01-07 +1994-02-24 +1994-03-23 +1994-04-04 +1994-04-07 +1994-04-26 +1994-06-06 +1994-07-01 +1994-07-02 +1994-07-19 +1994-08-21 +1994-09-08 +1994-09-26 +1994-10-12 +1994-10-31 +1994-11-22 +1994-12-11 +1994-12-21 +1994-12-24 +1994-12-29 +1995-01-11 +1995-01-19 +1995-03-31 +1995-05-24 +1995-06-09 +1995-08-09 +1995-08-21 +1995-09-15 +1995-11-12 +1995-11-13 +1995-12-03 +1995-12-05 +1995-12-07 +1995-12-07 +1995-12-13 +1995-12-18 +1995-12-20 +1996-01-08 +1996-02-06 +1996-02-29 +1996-05-11 +1996-06-09 +1996-06-13 +1996-07-06 +1996-07-17 +1996-08-19 +1996-08-29 +1996-09-03 +1996-10-02 +1996-10-31 +1996-11-07 +1996-11-12 +1996-11-19 +1996-11-23 +1996-12-07 +1997-01-09 +1997-03-18 +1997-04-19 +1997-05-08 +1997-07-31 +1997-07-17 +1997-08-06 +1997-08-10 +1997-09-03 +1997-09-06 +1997-09-26 +1997-10-10 +1997-12-15 +1997-12-17 +1997-12-19 +1997-12-28 +1998-02-02 +1998-02-16 +1998-03-19 +1998-03-22 +1998-04-20 +1998-05-05 +1998-05-25 +1998-05-26 +1998-06-18 +1998-07-30 +1998-08-24 +1998-08-29 +1998-09-02 +1998-09-25 +1998-09-29 +1998-10-10 +1998-12-11 +1999-02-24 +1999-04-07 +1999-04-15 +1999-06-01 +1999-07-24 +1999-07-24 +1999-08-07 +1999-08-22 +1999-08-31 +1999-09-05 +1999-09-14 +1999-09-23 +1999-10-31 +1999-11-09 +1999-11-12 +1999-12-07 +1999-12-11 +1999-12-21 +1999-12-22 +1999-12-24 +1999-12-25 +2000-01-10 +2000-01-13 +2000-01-30 +2000-01-31 +2000-02-16 +2000-03-05 +2000-04-19 +2000-06-22 +2000-07-04 +2000-07-08 +2000-07-12 +2000-07-17 +2000-07-25 +2000-08-23 +2000-10-31 +2000-11-15 +2000-11-18 +2000-12-20 +2001-01-23 +2001-01-25 +2001-01-31 +2001-02-27 +2001-03-03 +2001-03-24 +2001-03-29 +2001-07-04 +2001-08-24 +2001-08-29 +2001-09-11 +2001-09-11 +2001-09-11 +2001-09-11 +2001-10-04 +2001-10-08 +2001-11-12 +2001-11-24 +2001-12-22 +2002-01-14 +2002-01-16 +2002-01-28 +2002-02-12 +2002-04-15 +2002-05-04 +2002-05-07 +2002-05-07 +2002-05-25 +2002-07-01 +2002-07-04 +2002-07-10 +2002-08-22 +2002-08-30 +2002-10-09 +2002-11-06 +2002-11-11 +2003-01-08 +2003-01-08 +2003-01-09 +2003-03-06 +2003-05-08 +2003-05-26 +2003-05-29 +2003-07-08 +2003-11-22 +2003-12-18 +2003-12-25 +2004-01-03 +2004-01-13 +2004-02-10 +2004-05-14 +2004-06-08 +2004-08-13 +2004-08-24 +2004-08-24 +2004-10-14 +2004-10-14 +2004-10-19 +2004-11-21 +2004-11-30 +2005-02-03 +2005-02-20 +2005-03-06 +2005-03-16 +2005-06-09 +2005-07-16 +2005-08-02 +2005-08-06 +2005-08-10 +2005-08-14 +2005-08-16 +2005-08-23 +2005-09-05 +2005-09-05 +2005-09-09 +2005-09-21 +2005-10-22 +2005-12-08 +2005-12-10 +2005-12-19 +2005-12-23 +2006-03-31 +2006-05-03 +2006-06-21 +2006-07-09 +2006-07-10 +2006-08-13 +2006-08-22 +2006-08-27 +2006-09-01 +2006-09-29 +2006-10-03 +2006-10-10 +2006-10-29 +2007-01-01 +2007-01-09 +2007-01-24 +2007-02-21 +2007-03-07 +2007-03-17 +2007-03-23 +2007-05-05 +2007-06-03 +2007-06-21 +2007-06-25 +2007-06-28 +2007-07-17 +2007-08-09 +2007-08-20 +2007-08-26 +2007-09-09 +2007-09-12 +2007-10-27 +2007-09-16 +2007-10-04 +2007-11-30 +2007-12-30 +2008-01-04 +2008-01-17 +2008-02-08 +2008-02-14 +2008-02-21 +2008-04-03 +2008-04-11 +2008-04-15 +2008-05-26 +2008-05-30 +2008-06-10 +2008-07-25 +2008-08-20 +2008-08-24 +2008-08-24 +2008-08-27 +2008-09-14 +2008-10-07 +2008-10-08 +2008-11-10 +2008-12-20 +2009-01-15 +2009-02-07 +2009-02-12 +2009-02-25 +2009-03-12 +2009-03-20 +2009-03-23 +2009-04-01 +2009-04-19 +2009-06-01 +2009-06-30 +2009-07-13 +2009-07-15 +2009-07-24 +2009-08-02 +2009-08-04 +2009-08-11 +2009-09-09 +2009-09-24 +2009-10-21 +2009-10-22 +2009-11-12 +2009-11-28 +2009-12-22 +2009-12-25 +2010-01-24 +2010-01-25 +2010-03-22 +2010-04-13 +2010-04-13 +2010-04-13 +2010-05-12 +2010-05-17 +2010-05-22 +2010-06-20 +2010-07-27 +2010-07-28 +2010-08-03 +2010-08-16 +2010-08-24 +2010-08-24 +2010-08-25 +2010-09-03 +2010-09-07 +2010-09-13 +2010-11-04 +2010-11-04 +2010-11-05 +2010-11-11 +2010-11-28 +2010-12-04 +2010-12-15 +2011-01-01 +2011-01-09 +2011-02-10 +2011-02-14 +2011-03-21 +2011-04-01 +2011-04-04 +2011-05-07 +2011-05-18 +2011-06-20 +2011-07-04 +2011-07-06 +2011-07-08 +2011-07-11 +2011-07-13 +2011-07-28 +2011-07-29 +2011-07-30 +2011-08-09 +2011-08-20 +2011-09-06 +2011-09-07 +2011-09-25 +2011-09-29 +2011-10-13 +2011-10-18 +2011-11-01 +2012-04-02 +2012-04-20 +2012-05-09 +2012-05-14 +2012-06-02 +2012-06-03 +2012-06-29 +2012-09-12 +2012-09-28 +2012-10-07 +2012-11-30 +2012-12-17 +2012-12-25 +2012-12-29 +2013-01-29 +2013-02-13 +2013-03-08 +2013-04-13 +2013-04-29 +2013-05-16 +2013-06-10 +2013-07-06 +2013-07-07 +2013-08-14 +2013-10-03 +2013-10-16 +2013-11-17 +2013-11-29 +2014-02-16 +2014-02-17 +2014-03-08 +2014-07-17 +2014-07-23 +2014-07-24 +2014-08-10 +2014-10-29 +2014-12-28 +2015-02-04 +2015-03-05 +2015-03-24 +2015-03-29 +2015-04-13 +2015-04-14 +2015-04-25 +2015-08-16 +2015-09-05 +2015-09-08 +2015-10-02 +2015-10-29 +2015-10-31 +2015-11-04 +2016-01-08 +2016-02-02 +2016-02-24 +2016-02-26 +2016-03-19 +2016-03-29 +2016-04-04 +2016-04-13 +2016-04-29 +2016-05-18 +2016-05-19 +2016-05-27 +2016-06-27 +2016-08-03 +2016-08-05 +2016-08-27 +2016-10-28 +2016-10-28 +2016-11-28 +2016-12-07 +2016-12-20 +2016-12-23 +2017-01-16 +2017-03-20 +2017-03-28 +2017-04-29 +2017-05-27 +2017-07-07 +2017-09-30 +2017-11-15 +2017-12-13 +2017-12-31 diff --git a/newt/experiments/aircraft/aircraft_baselines.slrm b/newt/experiments/aircraft/aircraft_baselines.slrm new file mode 100644 index 0000000..5d2443b --- /dev/null +++ b/newt/experiments/aircraft/aircraft_baselines.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 48:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=3000 +#SBATCH --array=0-5 +#SBATCH -o aircraft-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python aircraft.py $SLURM_ARRAY_TASK_ID $run 1 +done \ No newline at end of file diff --git a/newt/experiments/aircraft/aircraft_test.py b/newt/experiments/aircraft/aircraft_test.py new file mode 100644 index 0000000..165b4c1 --- /dev/null +++ b/newt/experiments/aircraft/aircraft_test.py @@ -0,0 +1,155 @@ +import sys +sys.path.insert(0, '../../') +import numpy as np +from jax.experimental import optimizers +import matplotlib.pyplot as plt +import time +import pandas as pd +from sde_gp import SDEGP +import approximate_inference as approx_inf +import priors +import likelihoods +from utils import softplus_list, plot +from datetime import date +import pickle + +plot_final = False +plot_intermediate = False + +print('loading data ...') +aircraft_accidents = pd.read_csv('aircraft_accidents.txt', sep='-', header=None).values + +num_data = aircraft_accidents.shape[0] +xx = np.zeros([num_data, 1]) +for j in range(num_data): + xx[j] = date.toordinal(date(aircraft_accidents[j, 0], aircraft_accidents[j, 1], aircraft_accidents[j, 2])) + 366 + +BIN_WIDTH = 1 +# Discretize the data +x_min = np.floor(np.min(xx)) +x_max = np.ceil(np.max(xx)) +x_max_int = x_max-np.mod(x_max-x_min, BIN_WIDTH) +x = np.linspace(x_min, x_max_int, num=int((x_max_int-x_min)/BIN_WIDTH+1)) +x = np.concatenate([np.min(x)-np.linspace(61, 1, num=61), x]) # pad with zeros to reduce strange edge effects +y, _ = np.histogram(xx, np.concatenate([[-1e10], x[1:]-np.diff(x)/2, [1e10]])) +N = y.shape[0] +print('N =', N) + +np.random.seed(123) +ind_shuffled = np.random.permutation(N) +ind_split = np.stack(np.split(ind_shuffled, 10)) # 10 random batches of data indices + +np.random.seed(123) +# meanval = np.log(len(disaster_timings)/num_time_bins) # TODO: incorporate mean + +if len(sys.argv) > 1: + method = int(sys.argv[1]) + fold = int(sys.argv[2]) +else: + method = 0 + fold = 0 + +print('method number', method) +print('batch number', fold) + +# Get training and test indices +ind_test = ind_split[fold] # np.sort(ind_shuffled[:N//10]) +ind_train = np.concatenate(ind_split[np.arange(10) != fold]) +x_train = x # [ind_train] # 90/10 train/test split +x_test = x # [ind_test] +y_train = y # [ind_train] +y_test = y # [ind_test] +N_batch = 5000 +M = 5000 +# z = np.linspace(701050, 737050, M) +z = np.linspace(x[0], x[-1], M) + +prior_1 = priors.Matern52(variance=2., lengthscale=5.5e4) +prior_2 = priors.QuasiPeriodicMatern32(variance=1., lengthscale_periodic=2., period=365., lengthscale_matern=1.5e4) +prior_3 = priors.QuasiPeriodicMatern32(variance=1., lengthscale_periodic=2., period=7., lengthscale_matern=30*365.) + +prior = priors.Sum([prior_1, prior_2, prior_3]) +lik = likelihoods.Poisson() + +if method == 0: + inf_method = approx_inf.EKS(damping=.5) +elif method == 1: + inf_method = approx_inf.UKS(damping=.5) +elif method == 2: + inf_method = approx_inf.GHKS(damping=.5) +elif method == 3: + inf_method = approx_inf.EP(power=1, intmethod='GH', damping=.5) +elif method == 4: + inf_method = approx_inf.EP(power=0.5, intmethod='GH', damping=.5) +elif method == 5: + inf_method = approx_inf.EP(power=0.01, intmethod='GH', damping=.5) +elif method == 6: + inf_method = approx_inf.VI(intmethod='GH', damping=.5) + +model = SDEGP(prior=prior, likelihood=lik, t=x_train, y=y_train, t_test=x_test, y_test=y_test, + approx_inf=inf_method, z=z) + +opt_init, opt_update, get_params = optimizers.adam(step_size=1e-1) +# parameters should be a 2-element list [param_prior, param_likelihood] +opt_state = opt_init([model.prior.hyp, model.likelihood.hyp]) + + +def gradient_step(i, state, mod): + params = get_params(state) + mod.prior.hyp = params[0] + mod.likelihood.hyp = params[1] + + # grad(Filter) + Smoother: + batch_ind = np.random.permutation(N)[:N_batch] + + # grad(Filter) + Smoother: + neg_log_marg_lik, gradients = mod.run(batch_ind=batch_ind) + nlml = neg_log_marg_lik * N / N_batch + + print('iter %2d: nlml=%2.2f' % + (i, nlml)) + + if plot_intermediate: + plot(mod, i) + + return opt_update(i, gradients, state) + + +print('optimising the hyperparameters ...') +t0 = time.time() +for j in range(2): + opt_state = gradient_step(j, opt_state, model) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +# calculate posterior predictive distribution via filtering and smoothing at train & test locations: +print('calculating the posterior predictive distribution ...') +t0 = time.time() +posterior_mean, posterior_cov, inducing_mean, inducing_cov, nlpd = model.predict() +mu = np.squeeze(posterior_mean) +t1 = time.time() +print('prediction time: %2.2f secs' % (t1-t0)) +print('NLPD: %1.2f' % nlpd) + +# with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: +# pickle.dump(nlpd, fp) + +# with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: +# nlpd_show = pickle.load(fp) +# print(nlpd_show) + +test_id = model.test_id +t_test = model.t_all[test_id] +link_fn = model.likelihood.link_fn +scale = N / (max(t_test) - min(t_test)) +post_mean_lgcp = link_fn(posterior_mean[:, 0, 0] + posterior_cov[:, 0, 0] / 2) * scale +lb_lgcp = link_fn(posterior_mean[:, 0, 0] - np.sqrt(posterior_cov[:, 0, 0]) * 1.645) * scale +ub_lgcp = link_fn(posterior_mean[:, 0, 0] + np.sqrt(posterior_cov[:, 0, 0]) * 1.645) * scale +inducing_mean_lgcp = link_fn(inducing_mean[:, 0, 0] + inducing_cov[:, 0, 0] / 2) * scale + +plt.figure(1) +# plt.plot(t_test, mu, 'b-') +plt.plot(t_test, post_mean_lgcp, 'g', label='posterior mean') +plt.fill_between(t_test[..., 0], lb_lgcp, ub_lgcp, color='g', alpha=0.05, label='95% confidence') +plt.plot(z, inducing_mean_lgcp, 'g.', label='inducing mean', markersize=8) +plt.show() diff --git a/newt/experiments/aircraft/results.py b/newt/experiments/aircraft/results.py new file mode 100644 index 0000000..caca00b --- /dev/null +++ b/newt/experiments/aircraft/results.py @@ -0,0 +1,25 @@ +import pickle +import numpy as np + +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) + +print('baselines:') +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) diff --git a/newt/experiments/audio/audio.py b/newt/experiments/audio/audio.py new file mode 100644 index 0000000..dca17d2 --- /dev/null +++ b/newt/experiments/audio/audio.py @@ -0,0 +1,181 @@ +import sys +import newt +import objax +from newt.cubature import Unscented +import numpy as np +import matplotlib.pyplot as plt +import time +from scipy.io import loadmat +import pickle + +print('loading data ...') +y = loadmat('speech_female')['y'] +fs = 44100 # sampling rate (Hz) +scale = 1000 # convert to milliseconds + +normaliser = 0.5 * np.sqrt(np.var(y)) +yTrain = y / normaliser # rescale the data + +N = y.shape[0] +x = np.linspace(0., N, num=N) / fs * scale # arbitrary evenly spaced inputs inputs +# batch_size = 20000 +M = 3000 +z = np.linspace(x[0], x[-1], num=M) + +np.random.seed(123) +# 10-fold cross-validation setup +ind_shuffled = np.random.permutation(N) +ind_split = np.stack(np.split(ind_shuffled, 10)) # 10 random batches of data indices + +if len(sys.argv) > 1: + method = int(sys.argv[1]) + fold = int(sys.argv[2]) + plot_final = False + save_result = True + iters = 500 +else: + method = 3 + fold = 0 + plot_final = True + save_result = False + iters = 500 + +if len(sys.argv) > 3: + baseline = int(sys.argv[3]) +else: + baseline = 0 + +print('method number', method) +print('batch number', fold) + +# Get training and test indices +ind_test = ind_split[fold] # np.sort(ind_shuffled[:N//10]) +ind_train = np.concatenate(ind_split[np.arange(10) != fold]) +x_train = x[ind_train] # 90/10 train/test split +x_test = x[ind_test] +y_train = y[ind_train] +y_test = y[ind_test] + +fundamental_freq = 220 # Hz +radial_freq = 2 * np.pi * fundamental_freq / scale # radial freq = 2pi * f / scale +sub1 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=radial_freq, fix_variance=True) +# 1st harmonic +sub2 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=2 * radial_freq, fix_variance=True) +# 2nd harmonic +sub3 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=3 * radial_freq, fix_variance=True) +mod1 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) +mod2 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) +mod3 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) + +kern = newt.kernels.Independent([sub1, sub2, sub3, mod1, mod2, mod3]) + +lik = newt.likelihoods.AudioAmplitudeDemodulation(variance=0.3) + +if method == 0: + inf = newt.inference.Taylor( + cubature=Unscented(), + energy_function=newt.inference.ExpectationPropagation(power=0.5, cubature=Unscented()).energy + ) +elif method == 1: + inf = newt.inference.PosteriorLinearisation( + cubature=Unscented(), + energy_function=newt.inference.ExpectationPropagation(power=0.5, cubature=Unscented()).energy + ) +elif method == 2: + inf = newt.inference.ExpectationPropagation(power=0.9, cubature=Unscented()) # power=1 unstable in sparse case +elif method == 3: + inf = newt.inference.ExpectationPropagation(power=0.5, cubature=Unscented()) +elif method == 4: + inf = newt.inference.ExpectationPropagation(power=0.01, cubature=Unscented()) +elif method == 5: + inf = newt.inference.VariationalInference(cubature=Unscented()) + +if baseline: + model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train) +else: + model = newt.models.SparseMarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train, Z=z) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.05 +lr_newton = 0.05 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + # batch = np.random.permutation(N)[:batch_size] + # inf(model, lr=lr_newton, batch_ind=batch) # perform inference and update variational params + inf(model, lr=lr_newton) # perform inference and update variational params + # dE, E = energy(model, batch_ind=batch) # compute energy and its gradients w.r.t. hypers + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +# calculate posterior predictive distribution via filtering and smoothing at train & test locations: +print('calculating the posterior predictive distribution ...') +t0 = time.time() +nlpd = model.negative_log_predictive_density(X=x_test, Y=y_test) +t1 = time.time() +print('NLPD: %1.2f' % nlpd) +print('prediction time: %2.2f secs' % (t1-t0)) + +if save_result: + if baseline: + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) + else: + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) + + # with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + # nlpd_show = pickle.load(fp) + # print(nlpd_show) + +if plot_final: + posterior_mean, posterior_var = model.predict(X=x) + # lb = posterior_mean[:, 0] - np.sqrt(posterior_var[:, 0]) * 1.96 + # ub = posterior_mean[:, 0] + np.sqrt(posterior_var[:, 0]) * 1.96 + + posterior_mean_subbands = posterior_mean[:, :3] + posterior_mean_modulators = newt.utils.softplus(posterior_mean[:, 3:]) + posterior_mean_sig = np.sum(posterior_mean_subbands * posterior_mean_modulators, axis=-1) + posterior_var_subbands = posterior_var[:, :3] + posterior_var_modulators = newt.utils.softplus(posterior_var[:, 3:]) + + print('plotting ...') + plt.figure(1, figsize=(12, 5)) + plt.clf() + plt.plot(x, y, 'k', label='signal', linewidth=0.6) + plt.plot(x_test, y_test, 'g.', label='test', markersize=4) + plt.plot(x, posterior_mean_sig, 'r', label='posterior mean', linewidth=0.6) + # plt.fill_between(x_pred, lb, ub, color='r', alpha=0.05, label='95% confidence') + plt.xlim(x[0], x[-1]) + plt.legend() + plt.title('Audio Signal Processing via Kalman smoothing (human speech signal)') + plt.xlabel('time (milliseconds)') + + plt.figure(2, figsize=(12, 8)) + plt.subplot(2, 1, 1) + plt.plot(x, posterior_mean_subbands, linewidth=0.6) + plt.xlim(x[0], x[-1]) + # plt.plot(z, inducing_mean[:, :3, 0], 'r.', label='inducing mean', markersize=4) + plt.title('subbands') + plt.subplot(2, 1, 2) + plt.plot(x, posterior_mean_modulators, linewidth=0.6) + # plt.plot(z, softplus(inducing_mean[:, 3:, 0]), 'r.', label='inducing mean', markersize=4) + plt.xlim(x[0], x[-1]) + plt.xlabel('time (milliseconds)') + plt.title('amplitude modulators') + plt.show() diff --git a/newt/experiments/audio/audio.slrm b/newt/experiments/audio/audio.slrm new file mode 100644 index 0000000..686cef7 --- /dev/null +++ b/newt/experiments/audio/audio.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 48:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=5000 +#SBATCH --array=0-5 +#SBATCH -o audio-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python audio.py $SLURM_ARRAY_TASK_ID $run +done \ No newline at end of file diff --git a/newt/experiments/audio/audio5.py b/newt/experiments/audio/audio5.py new file mode 100644 index 0000000..b274bf6 --- /dev/null +++ b/newt/experiments/audio/audio5.py @@ -0,0 +1,240 @@ +import sys +import newt +import objax +from newt.cubature import Unscented +import numpy as np +import matplotlib.pyplot as plt +import time +from scipy.io import loadmat +import pickle + +num_components = 5 + +print('loading data ...') +y = loadmat('speech_female')['y'] +fs = 44100 # sampling rate (Hz) +scale = 1000 # convert to milliseconds + +normaliser = 0.5 * np.sqrt(np.var(y)) +yTrain = y / normaliser # rescale the data + +N = y.shape[0] +x = np.linspace(0., N, num=N) / fs * scale # arbitrary evenly spaced inputs inputs +# batch_size = 20000 +M = 3000 +z = np.linspace(x[0], x[-1], num=M) + +np.random.seed(123) +# 10-fold cross-validation setup +ind_shuffled = np.random.permutation(N) +ind_split = np.stack(np.split(ind_shuffled, 10)) # 10 random batches of data indices + +if len(sys.argv) > 1: + method = int(sys.argv[1]) + fold = int(sys.argv[2]) + plot_final = False + save_result = True + iters = 500 +else: + method = 5 + fold = 0 + plot_final = True + save_result = False + iters = 150 + +if len(sys.argv) > 3: + baseline = int(sys.argv[3]) +else: + baseline = 1 + +print('method number', method) +print('batch number', fold) + +# Get training and test indices +ind_test = ind_split[fold] # np.sort(ind_shuffled[:N//10]) +ind_train = np.concatenate(ind_split[np.arange(10) != fold]) +x_train = x[ind_train] # 90/10 train/test split +x_test = x[ind_test] +y_train = y[ind_train] +y_test = y[ind_test] + +fundamental_freq = 220 # Hz +radial_freq = 2 * np.pi * fundamental_freq / scale # radial freq = 2pi * f / scale +sub1 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=radial_freq, fix_variance=True) +# 1st harmonic +sub2 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=2 * radial_freq, fix_variance=True) +# 2nd harmonic +sub3 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=3 * radial_freq, fix_variance=True) +# 3rd harmonic +sub4 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=4 * radial_freq, fix_variance=True) +# 4th harmonic +sub5 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=5 * radial_freq, fix_variance=True) +# 5th harmonic +sub6 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=6 * radial_freq, fix_variance=True) +# 6th harmonic +sub7 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=7 * radial_freq, fix_variance=True) +mod1 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) +mod2 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) +mod3 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) +mod4 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) +mod5 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) +mod6 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) +mod7 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) + +if num_components == 3: + kern = newt.kernels.Independent([sub1, sub2, sub3, + mod1, mod2, mod3]) +elif num_components == 5: + kern = newt.kernels.Independent([sub1, sub2, sub3, sub4, sub5, + mod1, mod2, mod3, mod4, mod5]) +elif num_components == 7: + kern = newt.kernels.Independent([sub1, sub2, sub3, sub4, sub5, sub6, sub7, + mod1, mod2, mod3, mod4, mod5, mod6, mod7]) +else: + raise NotImplementedError + +lik = newt.likelihoods.AudioAmplitudeDemodulation(variance=0.3) + +if method == 0: + inf = newt.inference.Taylor( + cubature=Unscented(), + energy_function=newt.inference.ExpectationPropagation(power=0.5, cubature=Unscented()).energy + ) +elif method == 1: + inf = newt.inference.PosteriorLinearisation( + cubature=Unscented(), + energy_function=newt.inference.ExpectationPropagation(power=0.5, cubature=Unscented()).energy + ) +elif method == 2: + if num_components == 3: + inf = newt.inference.ExpectationPropagationPSD(power=0.9, + cubature=Unscented()) + # inf = newt.inference.ExpectationPropagationPSD(power=0.9, + # cubature=newt.cubature.GaussHermite(num_cub_points=10)) + elif num_components == 5: + inf = newt.inference.ExpectationPropagationPSD(power=0.9, + cubature=newt.cubature.GaussHermite(num_cub_points=4)) + elif num_components == 7: + inf = newt.inference.ExpectationPropagationPSD(power=0.9, + cubature=newt.cubature.GaussHermite(num_cub_points=3)) + else: + raise NotImplementedError +elif method == 3: + if num_components == 3: + inf = newt.inference.ExpectationPropagationPSD(power=0.5, + cubature=Unscented()) + # inf = newt.inference.ExpectationPropagationPSD(power=0.9, + # cubature=newt.cubature.GaussHermite(num_cub_points=10)) + elif num_components == 5: + inf = newt.inference.ExpectationPropagationPSD(power=0.5, + cubature=newt.cubature.GaussHermite(num_cub_points=4)) + elif num_components == 7: + inf = newt.inference.ExpectationPropagationPSD(power=0.5, + cubature=newt.cubature.GaussHermite(num_cub_points=3)) + else: + raise NotImplementedError +elif method == 4: + if num_components == 3: + inf = newt.inference.ExpectationPropagationPSD(power=0.01, + cubature=Unscented()) + # inf = newt.inference.ExpectationPropagationPSD(power=0.9, + # cubature=newt.cubature.GaussHermite(num_cub_points=10)) + elif num_components == 5: + inf = newt.inference.ExpectationPropagationPSD(power=0.01, + cubature=newt.cubature.GaussHermite(num_cub_points=4)) + elif num_components == 7: + inf = newt.inference.ExpectationPropagationPSD(power=0.01, cubature=newt.cubature.GaussHermite(num_cub_points=3)) + else: + raise NotImplementedError +elif method == 5: + if num_components == 3: + inf = newt.inference.VariationalInferencePSD(cubature=Unscented()) + elif num_components == 5: + inf = newt.inference.VariationalInferencePSD(cubature=newt.cubature.GaussHermite(num_cub_points=4)) + elif num_components == 7: + inf = newt.inference.VariationalInferencePSD(cubature=newt.cubature.GaussHermite(num_cub_points=3)) + else: + raise NotImplementedError + +if baseline: + model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train) +else: + model = newt.models.SparseMarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train, Z=z) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.05 +lr_newton = 0.05 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +# calculate posterior predictive distribution via filtering and smoothing at train & test locations: +# print('calculating the posterior predictive distribution ...') +# t0 = time.time() +# nlpd = model.negative_log_predictive_density(X=x_test, Y=y_test) +# t1 = time.time() +# print('NLPD: %1.2f' % nlpd) +# print('prediction time: %2.2f secs' % (t1-t0)) + +# if save_result: +# if baseline: +# with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: +# pickle.dump(nlpd, fp) +# else: +# with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: +# pickle.dump(nlpd, fp) + +if plot_final: + posterior_mean, posterior_var = model.predict(X=x) + # lb = posterior_mean[:, 0] - np.sqrt(posterior_var[:, 0]) * 1.96 + # ub = posterior_mean[:, 0] + np.sqrt(posterior_var[:, 0]) * 1.96 + + posterior_mean_subbands = posterior_mean[:, :num_components] + posterior_mean_modulators = newt.utils.softplus(posterior_mean[:, num_components:]) + posterior_mean_sig = np.sum(posterior_mean_subbands * posterior_mean_modulators, axis=-1) + posterior_var_subbands = posterior_var[:, :num_components] + posterior_var_modulators = newt.utils.softplus(posterior_var[:, num_components:]) + + print('plotting ...') + plt.figure(1, figsize=(12, 5)) + plt.clf() + plt.plot(x, y, 'k', label='signal', linewidth=0.6) + plt.plot(x_test, y_test, 'g.', label='test', markersize=4) + plt.plot(x, posterior_mean_sig, 'r', label='posterior mean', linewidth=0.6) + # plt.fill_between(x_pred, lb, ub, color='r', alpha=0.05, label='95% confidence') + plt.xlim(x[0], x[-1]) + plt.legend() + plt.title('Audio Signal Processing via Kalman smoothing (human speech signal)') + plt.xlabel('time (milliseconds)') + + plt.figure(2, figsize=(12, 8)) + plt.subplot(2, 1, 1) + plt.plot(x, posterior_mean_subbands, linewidth=0.6) + plt.xlim(x[0], x[-1]) + # plt.plot(z, inducing_mean[:, :3, 0], 'r.', label='inducing mean', markersize=4) + plt.title('subbands') + plt.subplot(2, 1, 2) + plt.plot(x, posterior_mean_modulators, linewidth=0.6) + # plt.plot(z, softplus(inducing_mean[:, 3:, 0]), 'r.', label='inducing mean', markersize=4) + plt.xlim(x[0], x[-1]) + plt.xlabel('time (milliseconds)') + plt.title('amplitude modulators') + plt.show() diff --git a/newt/experiments/audio/audio_baseline.slrm b/newt/experiments/audio/audio_baseline.slrm new file mode 100644 index 0000000..1401387 --- /dev/null +++ b/newt/experiments/audio/audio_baseline.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 48:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=5000 +#SBATCH --array=0-5 +#SBATCH -o audio-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python audio.py $SLURM_ARRAY_TASK_ID $run 1 +done \ No newline at end of file diff --git a/newt/experiments/audio/audio_varyM.py b/newt/experiments/audio/audio_varyM.py new file mode 100644 index 0000000..62fbb3a --- /dev/null +++ b/newt/experiments/audio/audio_varyM.py @@ -0,0 +1,193 @@ +import sys +import newt +import objax +from newt.cubature import Unscented +import numpy as np +import matplotlib.pyplot as plt +import time +from scipy.io import loadmat +import pickle + + +if len(sys.argv) > 1: + method = int(sys.argv[1]) + fold = int(sys.argv[2]) + plot_final = False + save_result = True + iters = 500 + M_ind = int(sys.argv[3]) + num_inducing = np.linspace(100, 2000, 20, dtype=int) + M = num_inducing[M_ind] +else: + method = 3 + fold = 0 + plot_final = True + save_result = False + iters = 50 + M_ind = 0 + num_inducing = np.linspace(100, 2000, 20, dtype=int) + M = num_inducing[M_ind] + +print('loading data ...') +y = loadmat('speech_female')['y'] +fs = 44100 # sampling rate (Hz) +scale = 1000 # convert to milliseconds + +normaliser = 0.5 * np.sqrt(np.var(y)) +yTrain = y / normaliser # rescale the data + +N = y.shape[0] +x = np.linspace(0., N, num=N) / fs * scale # arbitrary evenly spaced inputs inputs +batch_size = 20000 +# z = np.linspace(x[0], x[-1], num=M) +z_all = np.linspace(x[0], x[-1], 2000) +np.random.seed(99) +z_ind = np.random.permutation(2000) +z = z_all[np.sort(z_ind[:M])] + +np.random.seed(123) +# 10-fold cross-validation setup +ind_shuffled = np.random.permutation(N) +ind_split = np.stack(np.split(ind_shuffled, 10)) # 10 random batches of data indices + +print('method number', method) +print('batch number', fold) +print('num inducing', z.shape[0]) + +# Get training and test indices +ind_test = ind_split[fold] # np.sort(ind_shuffled[:N//10]) +ind_train = np.concatenate(ind_split[np.arange(10) != fold]) +x_train = x[ind_train] # 90/10 train/test split +x_test = x[ind_test] +y_train = y[ind_train] +y_test = y[ind_test] + +fundamental_freq = 220 # Hz +radial_freq = 2 * np.pi * fundamental_freq / scale # radial freq = 2pi * f / scale +sub1 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=radial_freq, fix_variance=True) +# 1st harmonic +sub2 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=2 * radial_freq, fix_variance=True) +# 2nd harmonic +sub3 = newt.kernels.SubbandMatern12(variance=.1, lengthscale=75., radial_frequency=3 * radial_freq, fix_variance=True) +mod1 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) +mod2 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) +mod3 = newt.kernels.Matern52(variance=.5, lengthscale=10., fix_variance=True) + +kern = newt.kernels.Independent([sub1, sub2, sub3, mod1, mod2, mod3]) + +lik = newt.likelihoods.AudioAmplitudeDemodulation(variance=0.3) + +if method == 0: + inf = newt.inference.Taylor( + cubature=Unscented(), + energy_function=newt.inference.ExpectationPropagation(power=0.5, cubature=Unscented()).energy + ) +elif method == 1: + inf = newt.inference.PosteriorLinearisation( + cubature=Unscented(), + energy_function=newt.inference.ExpectationPropagation(power=0.5, cubature=Unscented()).energy + ) +elif method == 2: + inf = newt.inference.ExpectationPropagation(power=0.9, cubature=Unscented()) # power=1 unstable in sparse case +elif method == 3: + inf = newt.inference.ExpectationPropagation(power=0.5, cubature=Unscented()) +elif method == 4: + inf = newt.inference.ExpectationPropagation(power=0.01, cubature=Unscented()) +elif method == 5: + inf = newt.inference.VariationalInference(cubature=Unscented()) + +model = newt.models.SparseMarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train, Z=z) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.05 +lr_newton = 0.05 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +# calculate posterior predictive distribution via filtering and smoothing at train & test locations: +print('calculating the posterior predictive distribution ...') +t0 = time.time() +predict_mean, predict_var = model.predict(X=x_test) +nlpd = model.negative_log_predictive_density(X=x_test, Y=y_test) + +predict_mean_subbands = predict_mean[:, :3] +predict_mean_modulators = newt.utils.softplus(predict_mean[:, 3:]) +predict_mean_sig = np.sum(predict_mean_subbands * predict_mean_modulators, axis=-1) + +rmse = np.sqrt(np.mean((np.squeeze(predict_mean_sig) - np.squeeze(y_test)) ** 2)) +nlml = loss[0] +t1 = time.time() +print('NLML: %1.2f' % nlml) +print('NLPD: %1.2f' % nlpd) +print('RMSE: %1.2f' % rmse) +print('prediction time: %2.2f secs' % (t1-t0)) + +if save_result: + # with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + # pickle.dump(nlpd, fp) + with open("output/varyM" + str(method) + "_" + str(fold) + ".txt", "rb") as fp: + results_data = pickle.load(fp) + results_data[M_ind, 0] = nlml + results_data[M_ind, 1] = nlpd + results_data[M_ind, 2] = rmse + with open("output/varyM" + str(method) + "_" + str(fold) + ".txt", "wb") as fp: + pickle.dump(results_data, fp) + # with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + # nlpd_show = pickle.load(fp) + # print(nlpd_show) + +if plot_final: + posterior_mean, posterior_var = model.predict(X=x) + # lb = posterior_mean[:, 0] - np.sqrt(posterior_var[:, 0]) * 1.96 + # ub = posterior_mean[:, 0] + np.sqrt(posterior_var[:, 0]) * 1.96 + + posterior_mean_subbands = posterior_mean[:, :3] + posterior_mean_modulators = newt.utils.softplus(posterior_mean[:, 3:]) + posterior_mean_sig = np.sum(posterior_mean_subbands * posterior_mean_modulators, axis=-1) + posterior_var_subbands = posterior_var[:, :3] + posterior_var_modulators = newt.utils.softplus(posterior_var[:, 3:]) + + print('plotting ...') + plt.figure(1, figsize=(12, 5)) + plt.clf() + plt.plot(x, y, 'k', label='signal', linewidth=0.6) + plt.plot(x_test, y_test, 'g.', label='test', markersize=4) + plt.plot(x, posterior_mean_sig, 'r', label='posterior mean', linewidth=0.6) + # plt.fill_between(x_pred, lb, ub, color='r', alpha=0.05, label='95% confidence') + plt.xlim(x[0], x[-1]) + plt.legend() + plt.title('Audio Signal Processing via Kalman smoothing (human speech signal)') + plt.xlabel('time (milliseconds)') + + plt.figure(2, figsize=(12, 8)) + plt.subplot(2, 1, 1) + plt.plot(x, posterior_mean_subbands, linewidth=0.6) + plt.xlim(x[0], x[-1]) + # plt.plot(z, inducing_mean[:, :3, 0], 'r.', label='inducing mean', markersize=4) + plt.title('subbands') + plt.subplot(2, 1, 2) + plt.plot(x, posterior_mean_modulators, linewidth=0.6) + # plt.plot(z, softplus(inducing_mean[:, 3:, 0]), 'r.', label='inducing mean', markersize=4) + plt.xlim(x[0], x[-1]) + plt.xlabel('time (milliseconds)') + plt.title('amplitude modulators') + plt.show() diff --git a/newt/experiments/audio/create_txts.py b/newt/experiments/audio/create_txts.py new file mode 100644 index 0000000..a5ea3f7 --- /dev/null +++ b/newt/experiments/audio/create_txts.py @@ -0,0 +1,9 @@ +import pickle +import numpy as np + +empty_data = np.nan * np.zeros([20, 3]) + +for method in range(6): + for fold in range(10): + with open("output/varyM" + str(method) + "_" + str(fold) + ".txt", "wb") as fp: + pickle.dump(empty_data, fp) diff --git a/newt/experiments/audio/results.py b/newt/experiments/audio/results.py new file mode 100644 index 0000000..caca00b --- /dev/null +++ b/newt/experiments/audio/results.py @@ -0,0 +1,25 @@ +import pickle +import numpy as np + +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) + +print('baselines:') +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) diff --git a/newt/experiments/audio/results_varyM.py b/newt/experiments/audio/results_varyM.py new file mode 100644 index 0000000..004d96b --- /dev/null +++ b/newt/experiments/audio/results_varyM.py @@ -0,0 +1,184 @@ +import pickle +import numpy as np +import matplotlib.pyplot as plt +from mpl_toolkits.axes_grid1.inset_locator import inset_axes, zoomed_inset_axes, mark_inset +import tikzplotlib +from matplotlib._png import read_png + +save_tikz = True + +method_nlml = np.zeros([6, 10, 20]) +method_nlpd = np.zeros([6, 10, 20]) +method_rmse = np.zeros([6, 10, 20]) +for method in range(6): + for dataset in range(10): + with open("output/varyM" + str(method) + "_" + str(dataset) + ".txt", "rb") as fp: + file = pickle.load(fp) + nlml = np.array(file[:, 0]) + nlpd = np.array(file[:, 1]) + rmse = np.array(file[:, 2]) + method_nlml[method, dataset] = nlml + method_nlpd[method, dataset] = nlpd + method_rmse[method, dataset] = rmse +# svgp_nlml = np.zeros([10, 50]) +# svgp_nlpd = np.zeros([10, 50]) +# svgp_classerror = np.zeros([10, 50]) +# for dataset in range(10): +# with open("baselines/output/varyM" + str(dataset) + ".txt", "rb") as fp: +# file = pickle.load(fp) +# nlml = np.array(file[:, 0]) +# nlpd = np.array(file[:, 1]) +# classerror = np.array(file[:, 2]) +# svgp_nlml[dataset] = nlml +# svgp_nlpd[dataset] = nlpd +# svgp_classerror[dataset] = classerror + +# np.set_printoptions(precision=3) +# print(np.mean(method_nlml, axis=1)) +# np.set_printoptions(precision=2) +# print(np.nanstd(method_nlml, axis=1)) +lb_nlml = np.mean(method_nlml, axis=1) - 1 * np.std(method_nlml, axis=1) +ub_nlml = np.mean(method_nlml, axis=1) + 1 * np.std(method_nlml, axis=1) +# lb_nlml_svgp = np.mean(svgp_nlml, axis=0) - 0.1 * np.std(svgp_nlml, axis=0) +# ub_nlml_svgp = np.mean(svgp_nlml, axis=0) + 0.1 * np.std(svgp_nlml, axis=0) + +# np.set_printoptions(precision=3) +# print(np.nanmean(method_nlpd, axis=1)) +# np.set_printoptions(precision=2) +# print(np.nanstd(method_nlpd, axis=1)) +lb_nlpd = np.mean(method_nlpd, axis=1) - 1 * np.std(method_nlpd, axis=1) +ub_nlpd = np.mean(method_nlpd, axis=1) + 1 * np.std(method_nlpd, axis=1) +# lb_nlpd_svgp = np.mean(svgp_nlpd, axis=0) - 0.1 * np.std(svgp_nlpd, axis=0) +# ub_nlpd_svgp = np.mean(svgp_nlpd, axis=0) + 0.1 * np.std(svgp_nlpd, axis=0) + +lb_classerror = np.mean(method_rmse, axis=1) - 1 * np.std(method_rmse, axis=1) +ub_classerror = np.mean(method_rmse, axis=1) + 1 * np.std(method_rmse, axis=1) +# lb_classerror_svgp = np.mean(svgp_classerror, axis=0) - 0.1 * np.std(svgp_classerror, axis=0) +# ub_classerror_svgp = np.mean(svgp_classerror, axis=0) + 0.1 * np.std(svgp_classerror, axis=0) + +legend_entries = ['S$^2$EKS', 'S$^2$PL', 'S$^2$PEP($\\alpha=1$)', 'S$^2$PEP($\\alpha=0.5$)', 'S$^2$PEP($\\alpha=0.01$)', 'S$^2$CVI'] + +num_inducing = np.linspace(100, 2000, 20, dtype=int) +fig0, ax0 = plt.subplots() +for method in [0, 1, 2, 3, 4, 5]: + ax0.plot(num_inducing, np.mean(method_nlml, axis=1)[method].T, label=legend_entries[method], linewidth=2) + ax0.fill_between(num_inducing, lb_nlml[method], ub_nlml[method], alpha=0.05) +# plt.plot(num_inducing, np.mean(svgp_nlml, axis=0).T, label='VI (sparse)') +# plt.fill_between(num_inducing, lb_nlml_svgp, ub_nlml_svgp, alpha=0.05) +# plt.legend(loc=1) +# plt.ylabel('NLML') +plt.legend(loc=3) +plt.tick_params( + axis='both', # changes apply to the x-axis + which='both', # both major and minor ticks are affected + direction='in') +plt.xlabel('Number of inducing inputs, M') +plt.xlim(0, 2000) +plt.ylim(0, 30000) +plt.yticks([0, 10000, 20000]) + +# yl = ax0.get_ylim() +# x1, x2, y1, y2 = 100, 700, 18850, 43000 # specify the limits +# ypos = yl[0]+(yl[1]-yl[0])/1.7 +# print(ypos) +# ax0.text(1200, ypos, '\\tikz\\node[coordinate] (n1) {};') +# ax0.text(x1, y2, '\\tikz\\node[coordinate] (r1) {};') +# ax0.text(x2, y1, '\\tikz\\node[coordinate] (r2) {};') + +if save_tikz: + tikzplotlib.save('audio_nlml.tex', + axis_width='\\figurewidth', + axis_height='\\figureheight', + tex_relative_path_to_data='./graphs/') + +# fig1, ax1 = plt.subplots() +# for method in [0, 1, 2, 3, 4, 5]: +# ax1.plot(num_inducing, np.mean(method_nlml, axis=1)[method].T, label=legend_entries[method], linewidth=2) +# ax1.fill_between(num_inducing, lb_nlml[method], ub_nlml[method], alpha=0.05) +# ax1.set_xlim(x1, x2) +# ax1.set_ylim(y1, y2) +# ax1.set_xticks([]) +# ax1.set_yticks([]) +# ax1.text(x1, y2, '\\tikz\\node[coordinate] (z1) {};') +# ax1.text(x2, y1, '\\tikz\\node[coordinate] (z2) {};') +# +# if save_tikz: +# tikzplotlib.save('audio_nlml_zoom.tex', +# axis_width='\\figurewidth', +# axis_height='\\figureheight', +# tex_relative_path_to_data='./graphs/') + + +fig2, ax2 = plt.subplots() +for method in [0, 1, 2, 3, 4, 5]: + ax2.plot(num_inducing, np.mean(method_nlpd, axis=1)[method].T, label=legend_entries[method], linewidth=2) + ax2.fill_between(num_inducing, lb_nlpd[method], ub_nlpd[method], alpha=0.05) +# plt.plot(num_inducing, np.mean(svgp_nlpd, axis=0).T, label='VI (sparse)') +# plt.fill_between(num_inducing, lb_nlpd_svgp, ub_nlpd_svgp, alpha=0.05) +# ax2.legend(loc=1) +# plt.ylabel('NLPD') +plt.tick_params( + axis='both', # changes apply to the x-axis + which='both', # both major and minor ticks are affected + direction='in') +plt.xlabel('Number of inducing inputs, M') +ax2.set_xlim(0, 2000) +ax2.set_ylim(-0.8, 1.25) + +# yl = ax2.get_ylim() +# x1, x2, y1, y2 = 100, 640, 0.69, 1.75 # specify the limits +# ypos = yl[0]+(yl[1]-yl[0])/1.5 +# ax2.text(1200, ypos, '\\tikz\\node[coordinate] (n2) {};') +# ax2.text(x1, y2, '\\tikz\\node[coordinate] (r3) {};') +# ax2.text(x2, y1, '\\tikz\\node[coordinate] (r4) {};') + +if save_tikz: + tikzplotlib.save('audio_nlpd.tex', + axis_width='\\figurewidth', + axis_height='\\figureheight', + tex_relative_path_to_data='./graphs/') + +# fig3, ax3 = plt.subplots() +# for method in [0, 1, 2, 3, 4, 5]: +# ax3.plot(num_inducing, np.mean(method_nlpd, axis=1)[method].T, label=legend_entries[method], linewidth=2) +# ax3.fill_between(num_inducing, lb_nlpd[method], ub_nlpd[method], alpha=0.05) +# # plt.plot(num_inducing, np.mean(svgp_nlml, axis=0).T, label='VI (sparse)') +# # plt.fill_between(num_inducing, lb_nlml_svgp, ub_nlml_svgp, alpha=0.05) +# # plt.ylabel('NLPD') +# # plt.xlabel('Number of inducing inputs, M') +# ax3.set_xlim(x1, x2) +# ax3.set_ylim(y1, y2) +# ax3.set_xticks([]) +# ax3.set_yticks([]) +# ax3.text(x1, y2, '\\tikz\\node[coordinate] (z3) {};') +# ax3.text(x2, y1, '\\tikz\\node[coordinate] (z4) {};') +# +# if save_tikz: +# tikzplotlib.save('audio_nlpd_zoom.tex', +# axis_width='\\figurewidth', +# axis_height='\\figureheight', +# tex_relative_path_to_data='./graphs/') + +fig4, ax4 = plt.subplots() +for method in [0, 1, 2, 3, 4, 5]: + ax4.plot(num_inducing, np.mean(method_rmse, axis=1)[method].T, label=legend_entries[method], linewidth=2) + ax4.fill_between(num_inducing, lb_classerror[method], ub_classerror[method], alpha=0.05) +# ax4.plot(num_inducing, np.mean(svgp_classerror, axis=0).T, label='VI (sparse)') +# ax4.fill_between(num_inducing, lb_classerror_svgp, ub_classerror_svgp, alpha=0.05) +# plt.legend(loc=9) +# plt.ylabel('Classification Error') +plt.xlabel('Number of inducing inputs, M') +plt.xlim(0, 2000) +# plt.ylim(0.0235, 0.05) +plt.tick_params( + axis='both', # changes apply to the x-axis + which='both', # both major and minor ticks are affected + direction='in') + +if save_tikz: + tikzplotlib.save('audio_rmse.tex', + axis_width='\\figurewidth', + axis_height='\\figureheight', + tex_relative_path_to_data='./graphs/') + +plt.show() diff --git a/newt/experiments/audio/speech_female.mat b/newt/experiments/audio/speech_female.mat new file mode 100644 index 0000000000000000000000000000000000000000..c4f5a0d2a7c1ae3f311516f54a6f4cd470c7fdd0 GIT binary patch literal 170657 zcma%CLzpEDkeg}S=Cp0w*0gQgwmEIvwr$%sU-Pw%{r9l<&8fZ|>Qsj$^+irdMOH}o z4;ww>A2}fvIx|ZfQ`$d&Z46z^?44}6|KmiY*jZ`+h&Y)Vx|o{$vA6q=v$y*r?P~Xj znd=WTGdC+MHw)7rW=3YlKmT9+`ak@Zl0*D&#?A!!<&n+m2Fbei^YZgy3&b}~F14^s zDEz=sN)Q%#Hwl5lmMVY>6>gLWsjj|C@YGL;PvA@JF@J^=*mM5=@t)<2d*GZq^W~p= z*mWj|qri<6Ei6_}0JO#%()@(~z;6DGy=)UFj2HiqWten@#%ND8wEX;p=R2ml&sSK_ zV@KIgn7Hig9_KJy4TRqcPnaJvC6Va}ct228<+AB)%5{02HPCrnMeV=QS|DC5C9u$z z;Twv39-r3gP#An}2tBDjqQ~>aO)ZJ`K5NSxCYx`v3K6gAL8G`tY;ChAHPU=_oeU`}}pj?k>zG$|2) zQ=XXffq#JX&3Qm)O8XLOvNlL}Uq_5(1t#3y-37xEeguQUeeSPzI~cdM5Jp7fYIFVE z2Fs+9lP#xx3bl(((ACcnj^_5=@cNSdit2{-%kxJ2)yBPdtOf-pSkQWKh0Eoiu5!Pv zFK`1J$|a%7FY_*_Im%-EVZ^YnYTx$lF#!5qLeQ1+0Dwg*tzTe^bVgwiJt<)B&+r_pH)WiB_f zYi0kbHuQR%Wbq5;^kFZ-^gVzUhWM14huGlRpIBGQ-+r12Yqekv#bN)DZ9Miyd5vcuIQ`)`95t*a*feT@^AZnts!bv7lm_w;h5PMrD9a};><9V<)Q z=g}bENQ7FVu5AA!KP$S&I>)RsCCV-Ea$lScr($hxq4}BeE=p0T~yq$o?JBzaXYZ!p7<1JT=b@2Ju)|w;0 zRbw997DJurUW#+rNGf76jR%fxBGBi(>xStPv~(hIe=gr94V zvv}hAKG1ae%o7(CD%`16V{o;iaN9EoFfverYj?}MsM3Ffa~x`e?+QB{*tb+1b&Upt zg0oMQF_sM=vsGtMRN{q7DgrO-8y!F_ki9DsYA`srn`~Uf1WnKb3fJ5l@2|a6EwZI* zcvNw@1x3V!`bVwObe%W|){(;Ti|A_CDa;JJKwM6NTU1h1UYYc^AqMAw*H!iBD%p?k7V+wm zgYiOJXFo=zN}vBo*aEmvliNU~PWwuY{7Cw+NG)5A>rW?`j%S7On5vwbXe`w~cihOg zJ*&{(Z5<)orbS7(Lt(?zml$Eqybq3#p`j^4#lJaGf|tE>UhJCr4ZMvwRm>RcJ3Ii( z0>&durmI?Jy{F=1PJ0mMCT$bzRc4tVm|anBxINViI606X#8!1V1;`JJs{%89g^5SE zUdwuEB?19g|-O=*eMu?=s=AFLF{^O3(?K7D&wn`w|-8+&B1h`?T^*wm*yLkuTWBnaLt zN%@`857IE=M(w4K=0xpB3l#Zgnr&=AAZB^8&c9?wFUZaYHNI4ufrf`i_wqw#&%vn* z?@JC3y3(!QMmXYhn`_T>Wrvp5g;Gg$;Ma7%Gqf9tipnQiZ39iBDmv!ceu=38#5{Rj zQ!^9>tb0$oPB2oOK;pRygLm*q9rtkXO4h4pWIAUhX$h{XNRa3(>~`&``bh^SQ0?afPUv>sSTh0J$-aM@iUmT%X~EUZL(p#oaKMmIHuING$m-<%-b)8@ z;`weIdpmOaDr?C_4-nUC7+<05=Nwnuu8VeX;BOFpTZ}42i%k6{Gv#0A3a}!|1NP`( zGRp2VO)n~6D*B5=*phQ;#oa3_b8T;EobQ=~B1h_hE}m+OjtZH1pe%1u+z1C8vp#X; z6AG4VRY}oIw&0v{_>z%4GN-ao-ZLtc53bMBjpnbwgGBisb_Cy{(7CZsPV(M@ntqD6 z*ejIovzivPl<j!|AG)f@jiUvJt)Z#!`^K&cgMsd#38wQE zlQee8Di14u_);BW_4LIL5GbE>Z$CyrXpzpWxAqk7GxO6x^X0^X#kbY0iVIF@=jpVs zEen8{ik%B5W-<7Vnh01#5VhpuzkE~ZD zS5#)4R+)>nNn5&%M?Gkd2m9jri5kh9dz+^~4VF5}j7MkqGG7H!K$c}y#I-oR!pMvd zjTJwJk#F}G7jsh)9uRjw7A9DH9SC8ef-YITxdbI4Pv&aHYZ(H4dyZ#e8^Yn(odv0q zwND@s-z}kz8p2%3GP%VD>O9kz`2s@s2bHf;?Y0FR%)b)Y=UahJu#3z;qPq#h!RzwF zySD~Gexv4=OK!U_gV0xTUjSbz($1haYZpxNnP?uF$PRGhs7R==0WFFDjaanFj^)Sw z?Of3TpIIFw!|AnU07z^3I;da=B)jq6d6vNM+CFEh*)|I~eNVCX(&<^8DgEVZccqv3 zW4rUh4n@V6?oU$;ZS=ni&8xbssmmg z!J03($UgYb*j|(Im8}8p>s!JvXWVMjm0Trk+t5>M3p(HlWd?IPMvr4pT{qf=z11py%@;)L^Y@1tN;FZM zUX6b53r&QxQ)BYF`7hm7^Mt|lx%rba2Qk}i!fCl^xloPiTQh~m1=TzxpXLh*<9Xz0 ze12Dw0n`c3mDe{zG-Gag^8015HM{fpEbU-$Dp(Gc_ogApCWm7ahpve@8#GESr|-42 z;Igv^PZa`Rv8QZaJGV8TtAf9lveKA_7<49|?iu|e*PTBzGHHM4LMPqd7uTkDh&fADyel(M`J9iOPa{A;Jil zDp(9l)htjZMUdN=DRSNSS7Ad|+V9>r+7EQgpR~!S)j(l}PTla?4!xP|&XcCOHE;3x z>@k0GaL29O@y}hZzeCpg^yxi7PH+r~Hb^;0T9 zjGK%gtr2@VsQdI|i=5$4K9DY7SOtvLt~9*b>%TYrP``K}S^kncnlk?>N8BfgEBet% zgqV-(a6Kk?__Mo8c%RioAWE_F{!nogOo`O6U}4A`(pv{(#7hxgpLd`nUcJE>C6(1w zh5&yQHES^4Yfa+8K5JskYX#QVOHacJBoJDyuyyogEy6ly(4+6NMauwUxZ&CwW)8o_ zG0AG{TxOe%UO@GtIBGm%;SSUXY-DfFBD20KCir!lg0=j6Nz`MD-M`q07T|50zwD;w zsxt%wcSr58T+zPNg3P_5!x?xkj&@~AAQ|u2+b_y|TJ4!Ve0!Pww{L2DqkR1^Bw6C; zFU0JI73VgQ!`8}$dm6h+B8OD8o^H5oz5H{3 zvg@d1h&#MD)XRa6-}X*QLi1SILzosq=brZPVt>x{6uT?q>(O4P%sO6-IU%F-RhY|H zj;H52vz3qAvC>5xMa}KV^zo>>3+w+^;WGT_?SAjouPph+E!1r}p8FgVXDLdDZcCc@ zGP5DWQ@Fc59~ssfk>4XGFPm>a*Pm1WPMG1?Sh~4gif~@nZT=bd5^{7bV=^29V*j~RUUKM!C_PS~hoR35 zD_5X_^P}GaE!UZnZmTh7==-QIMFo{Yu?)rBc* zEX@O|qUlwOOu)j(kmg>WZEM_ZZ^pQ`^g(re`1vW+2V38dFYzbE!V|Kd$r%ejyJHT4O ziouUjUm=?Px!R1?H*0Y6WIgrId`)okjRizL3U#6Bj72{&XLK{eJiUh`)%EA;1%?qv zbembu%RIjZm@z_jhj6Ugpoj`XuC_sHEWcBYitS#QIWdSve}p>0lh9-N=WX;?Umnjr zwionye0PG=HSNHDrWF>gWc1sQwmVkyGno0E{l}7O+F(MSB#oUpQ>^#7jr5lmCa)pYtCdv1~RJZqfK#?R9aAgrSgF*8UUTUNGSuA7+14s zg7C5FRHGFYB{H{w(&QRh+ljkAU)m#zVpN_ zUA4H~Ho78@+1~*fIm5q!!Pksvx{vaG_1NDTXP)rVtO4Sa9rrS0R{cXFovuyYa2R!E z7?`h^gRh?2Dbrt40lb98q3_iTPdLU6|7>s?vw3-4U*HNmLFI1iWw}y!GCGfF?_o`K zN1q*zISSug+iev-^)S$JdE zp__rJ zf=*= zf#r)|_&BYDg3XI}G;!rjDAQG_*v$no!&g>ng7zdN1D3}p?DZ3pj!PmbPyir`TX z7)1QjZVid8HYRCk8q*`-HZZ%MzYK|;bmqF7HVlXl@jvOU8d*@2-gO7wOw0~nyq{A; z^JGQOyH%H;A!WhN#$-C8=wd=Prk?JTL1Vxa21C88#Nmd^Kvi^V&L2e!=Ceoe3A}>z zHMz?F#KcAz5HC6Cmyoo3SAcDS10p67XH!66n}URIz1tAXctczIrZ4?NYBiVtxP$le z{ha^@-%Jkg_`Jy9_At4h+9|VydB00Om~kJdAL5J+=c(ix?vBn-7iM)95n_ksJ5?vuM#n&5R$p z2#QKwVA*cZuFDFru4kOIP6|qN+H>bkIfP^uCaO*OP{bvecn^QRFWtPP`xG{=v*&V;q!n&~J z*joVR+eB7o;opVjRAFIB_nont*}sy4V{LV@)}8!83iiJ7WW@^23Reo| z6SE=uIoo>^80%E0MJjsc6U8f?4k3ZaL|yh_lz7oXm%iOGt5Uhb5hiENdoj?!J2~_7R8Uqa{ z@i`JRY+3e zB`3Lx2I@H%Jh^5UU*2a%Sa~e7eBcp_gYkODr@L8}=O@*vJOFK|jK?00!ZY zgO6QL0B_xA*j4ARpt7Cr)e?++v+xbisX#7|g?8-L1a{DPhhej>+f;k#xSmQ?JaB|C zktxSU5d&x0G7U*Ds6u+hy|-lnn6aZHdnrh{;Cpm`_K#O^tZ0|RO-Ait*uAc8m>emz zCW^+xMuBq5B^3M1AA-VyY-9jH(Njjm@JJWU3a7Ain~(F$^neWARPk&>VATc1bngSV z8Dej(;6-mf&YQ1lpIeQsz|#rbWR4%h3ZM@GQ#4{W+=l*&E9^0BQ7crYWtAV)&;80* z^iQXGGXSnM=Y@QYeSd9=YHiI97fEN^_c&mk`L6KD;4?!mU{k^y=8$hXEZ zLrA#ubv;V@ulp~8c4R(~J+zD>I(GzN{;7`Om7|`0-s8Rdoif<44!>^0)(?2)$1dZJ z5iKEsx^B9{GUQ>zb!PsScW{uird(CTQ+?r7vin4m8hurkKPq=SapU3427Yy%M8N_9%xuVPS zU*EYeu&)|d{heKpD0ZL){9cnVtxusjwTPNSB1P9$DTlbPLK~)TVteBo)B5goU)F^cN#i zDS_X^LMI|J#czrVJJrQx!VhQ}&7OdWEV;5?(yS$9H1zX5RXG>Y&!PI&@H0T9$c_Yg zj>tK5ibyl~lk#H-eB828zas=O6S>>o*$13xvaNaPJq<+R7Te|Sh#^?B`T<6i*oDF76v(?s~D!G`cY{KNRNtC#xAT2;a!4rBVh|PW5oUz?8P%hl|ib1PY6zRmZeCbwb8# z0>14Kaldf7#f@_(-iR=s+BLO+XJAUYe2xK}ju~06xlSLg-mv(5J4}=ccO$A8;W{Lg z9)gkl<;7hW2PaemW*=vcEN6sY@4DydCEoDh-`2EYJf6F=vAkYPfeonlB#7Z{Lp5U6 z$3BkAv2^H~>Z1vpB{)&z#IPlO0d8Ryj`@e?tJUsIAK6-%zsQVJM@0U)qAHRXw)Sn> z5<_DQaZx%tL(vzXhumK@k|!m9E33MLu$>4EmVChL$})2QrRj)$2y~O$uCZNdmNoWL zb;LsI*0chxUzr6nZ;#tbjh zps+)~Lq88iV)&7_4`U8bE*$AU*YAo+Mpl!DIYAMne9OtF0heb(HRY6hCoRHg$`i?c zRjI#YJLz_MXtgjfJE3Lbg65$0*B1p)wS#?gtB|&)b?_dXpi5tL(D*!z2$rMwY&XBt z!z!20Zy@_d1-+*ahWe|13x*B288xGZ4K~kSim?Y)oK?7w?S6qRRpbtw1&{ne^ClVr zzGJ=${P+I;`Jqj7=Uw^19INxbPDxP}m)r;?op?i2WNu`mY!oim$WD$0I-rXAMS$lpM1J*rwWRH1f`_EoxKlrpDcX)P#~ zOYBi|qCqzClkCj+GJlD5jIR=;IB1^~@+1<1$HbZ!Eow9GfDF5j|F~&P5Yh;wr}Q~D zqVfkRst1>R9)a9mj+=QO{{#I#XW0?-$Nq~#`?^*|6QD4Dfc^0%?NRmGFdf!|Vw3~4 zya-nn%stsaRX#wUpV2?zKv9J*Nn316+o}q9TlAJAPtxLC?PHYpZ}=T^ye#A9!Xad| z9;5mGpayujo8EV`3qNv`SV}AG+&@i_dK*gh%}2E_`?mA_wVo282?ILB$VV!W3gAY_ zGsmRRAuc<>$hgKgALJF4!KHUYBr zO7DC4THXZX8x_QiJ)x8A?qaEF4GY-uT%P$RNkq8pY<_PY&W@n-1+>2;r-XNM6^yli&Y|%=myLxuFDvXbKWfZ82vB${ORbZE zgGY;yqC7|DTfA+@M|g6x!D89Gn6qP!+guVEJfJ<0<^_XQ2e&Mm|J2g$yWLJ@UT+30 z`b<4fx4YhwIOKkV?3V#Fxyi%-ll*KxTx^UN8Pb2qTXGrSvk=Ob7@Q{`k5XHgqJMbtr z_t#}aQf8X>>gSC6T}s~tNKLawH1YKQiQDzms~F!1mgNLL$X~z_FDR*M%9SyZn%^Jp zxGbP8VlQ7|@^gkyOIF1NR}n2$*+#W!PlGk=9dz9r^{sr0qP+qh?a+2OUt z3GTwsDleF=fO>{Rzu|i}rlYT>O7c$^ECzOC#2%wn0sK|i*b_MM%IA~jNN>rJ1CR~S z!#b*;5&8yTo{QM;2@()UTVKMx!8pE@B*UDJ!ezL3@v1@L_AbP@6uNeVcL<~jJ`w}u zE$}(*DMJs-ZRn;uhR?^GyCHVm=-8bYEBX2*jTf#hoJvgE>TIv?1b1;w&^yT=Fm~dI z(E>GJVY!7o9EUYo19#c12b+2FlA4B1#)-JUX3oo}-G`AG!`360GaUb}Lb+REWrQ*0 zC=-&`&J0Zn`XQobftPy-7^s!Cg1Hwb-kRk$0!S zVj6TFltfs5d%M4_o2AhFu5wbAC=+h&{}3yho1TW?#LumwTW>?KmJ`mK)z<~blwf*a zs5OmXMb+3smk2)TYiJa=RN0Sc$VJg9>7UlA0v)r3(U9%!H0^Exn-|knf6@qAFllQn z=oW)A)UT+h)BE*u^6?RSmUw{l!G?0V<2l~jL^^lG1Z#E@c4CKT0vX z6DdT!@*j~9`#9leGQ|8bG;_t^6Q*RNwi6mImf%&LpK*h|@$wnXGqAM=vSW7QY)e8p zv7FiPXz`1*1f^=3o{>Z9N{4p~EYY?=FAG`u+=nbc#A(^7qYQ}OJX`A*DgNmgrF0>O6D(?1ly<@{Q@X!m)*9zOT-e4 z+ZGFp825BGs?H(z$#c@-_#MPIKs%!2f$mnJbV7+quwYu@uxbVQtr`EqmB&h9r_J1s zy)ze^6*~PGxotW`VZmYd-^K5~ggk$b$@GH0Gkc`e1m+{WPBEG` ze0ct1eJB~lx(N*8y3UER!V~vqbMyt3gdv9-ww(XjVr;A{>r!we6ulHVNB$7@PPhwl z2936*5$3&CV4@U}^ZcR?%;OLK{8k{RVV`kOANOwoKKr=b6qZ z92pHCZO{RTPfC#p%@GcG|Ge%QoCa7RHIKTUOdXK*?^60>ZVH7J zJZ%tq%_{G^hH|5R8)HFT9PsgZdk0Lr`B}g0k*~X}o_ViqNK*@?dsrmF|&~AjJ=YKj~?c@EGfQNQU z0u0+PO6A8ap)!clHlhCdr+m7 z8Iy+-1m4R#PYe=Zz0uzDiFM-B!|edeF0H{58YbBPZNU6xOdpwPpp7_qNmpO~3Cg4h zB^;erFF-k$+~e?oZFJ(;x7WWASMxV17r=vDI6hWR zHx;)LnHRDX1I_D>j09p8mMs`g@Wm1GTag<57NbHw6Ol?^h|B$# z({_`n+uzwm-=0vomHNWe-U$*)Mf|I#vML$*>RP3>E@M8tW;vJ6Ty0s{9 zyB4w62n;49XO+hvFaaXT*O!w&#)0U1>SaePrF{EjSqb@4$0Qla-%>^Eh&A|60Y^Ot zKO|o3oKsjypgdzz)}!YJT*SwBC3P8|$IIt1X#Dy%u+~q{$(;tvVJ~{tZ{Zj4Y|{c{ z-G03F-O%Mt3%R`ecmW$zI<$;KTmz4^3H#&f~FqtWal=a8++BsYBBROszw_3Ic zJ{D{vcCBf=>&Ga0Bo5nU)}eRO^@p_4VdG=SX^!#zUh{HZ@y88K^}TJj?f_D$<4`{f zNe?;=@Ue#h?>g`Rh=tRfMW&2vw^-g|g|<#co5EZ>`$Y&eCYUB4@{Q&5s~f@~(xzRT z_<;wnjX}+ou30otdp_K)nN|hZXLr~lcl_4xl7}@VAT#h86IQL|FB=LWgeAKhL2B5v zk3n#PAeHq}>8pF-gJ$VD_!Bagz<5Xd)-aJodUc1o1!)&58gqR&)y-~3%${^MwCJ0N zP{m2PRCcJ)A8~h)*X-N2pW$VOB4mIH2YY7sG+R5A8`(zI>%A(}&foeWeTh@g+A~d{ zqyvem+P$M-4;u0WA15ts7pN!Z&?g1^JKChuc#fIeh^$a)ii1aT9oe1lA8E{QmWQs_ zp0RA?Dc~FCt9>f6@y?&Vr6OjdJnz``_I5_2p}Lp8b^C#UC!eXVggvpr&hwWvOi7dm zOg0{xT}`BVMO$Q=HeXyMKY5LIKO=CT(JY*pbPGn+NnH`bdNAo1r8?&vjey}eH&$6^ z0x)#4Dil9ikxZ8rZHO7@c=Wi=J}C|u#OJY&jwr-|7L`S-@iM=7?(Mhn3pd0V4uVWy z^iAmY%D2;uuIqUkDzd1WZ_qG-ulsARn!m{{q!!+d;C5m>9_4#BL9FayAr!Rmw})veX?8?cz?m;zUpxRaD&Rns`_vqP`zIMA0Mj3;2U!N6lT-{>r|f<_6cTA&tDvI1lE?xkPcf zYt%#s>``>E_rQ}JTA8JkUCGZManmWFkiKI(^!0lH=c8pkSQWMaYdU6lV$5dFa83J% z^8RN8%9?kbDcaOwur{w>xWZ)^AM1`v;U%{wWRSa_UGpk@S*ZRvX@{t&O`r@Xa4^^l z2ZQ!ek`_7CM}s5uKaLYCN27I(soytt$D`5hxz&t%rvoy|lU#ap_xs8a)i@u%9*$-2 zFG)X?-JCmr_t@^HeA=IG4A^1 z@Y@!Ic^U))U#jz83JS&=Yp0k30egr>?uqbYGslbVwRqDkf28r~>SL zyXj@@I|n+?Ln}Y`Ij<}|aVo2X!`3QS>AGep1I$$*%nzq-kB(Z7s(Y;+NNBnE?*<^! zO<$_2Vr>ScZKXm^&*;(#^3(YFn!uIYhPz6XTMa6l>|u3*JfgE){~xmKBfo3XY&M25`9wi+i zaXf6R-EOjT7Fv+-UrRxilPKA7)yy_UMS{I+?!v`n;0TZi1gsdYuR@w*^$?} zbh&x~Tl(CgMS$`7{6kBZ5etaeWqt=FZQS6R&R@EYNkWo^V)odR?){+VOV<8xRziT= zk0HNW&3=Mn6XK6Y9vm(@6hGezgq}Vhq3+1#FuQNF@qOOi=+(;RI9Oan;ublRwlL76Wc>{fm8Z|~RwcL-@5vx%{qlqRp z1{(+;+&FKhhs1)yORwJ25#r=G{i_~Kp}@v$omIp>3kUZ2XMouT43>#lh$Elw)^ljW zxQ$(wJA&|yT2pQ`R46dhmoaTx=E-Sxr z{~`TY^TVF$u$S*}G2l9b@z5;2QK2Qmr8r zr55kc1WR7PCo1(|I$C@b%iG5=7J@CN0WxL9MOOc!8K%d=(c&(FuYPJwP_{35z1?K$b z6xr-35RLSiY<70f2s^{YQ+EMYTPRacwT~ZJ{qKDH3jTm3we&HsbbWV204cmv3KoJq zvF94o+&6HC@@lPm(jS1Eh~O6^GMFXvOq8j%@>53nTVtTu^A^fr0; ztbuI5FqA|3croBOGu|oH86AK>e*NgbYM@vsVO!;iFFso$!(wgI544Usdv&)J`~qc% zIDftF4&*OgXodh)bR=GQszR!%b(LW<0;dnzz1BL=39Hf&n}|R_ce_@jG~;Ro^Hen~ zxaPq$K(yb)wLIiyrS`aK_GE--nnmFtjFuZ5J!71$Fh>Bf*1mo;1L(_xpo{vL35&8caLc+ViI)$4 z;E#;^XQ^U=-InP6SLQbXxR~xJi;ROuYTPnj=PzGJAaj_~Spl1VEZ1K*h}p3{!$?_t z$bY}9GMzg_=D!*?)mU3`@s4(aQr>WcmZ-djlTsnW5lr?Rn(0LGTRBLvBL7*~i>&LU zs!%Y~-W7(FJ{QSY_-l16BN0!XGQkcbkYsk)rDT3WU0AE?((LdA*{SFy`)zT?NVkJ` zmH37yWISP~x8jIX>AYhkEDTxXbK03I+kV2L@?JgNcN$L1g3mzK(U`?jRCE> zK@V$(M@+0VY?$Y*D|O-KC6BH|p*nwSh-p$TOp~DaUd+wg{c#9m{moAu&T{EUa-0 z`*X5Z+**I=NSlb-p>yRCrTA~>+!(LDq3m8bn~h@k>hu1V?F0Yy_eQii0=3h@QUmp2 zFk!oc^|!YO0m$~NJm2;DNZEF~^n+eo9IM@DUk=RsN4CdpbXZ4B3q0@ZjY#+;kow_} z0tPzb4Vgjjxt`SFoPuGu(5&jMFcfny@7=@H9h;%Iwm$mqOhj*6BXVy!I~wz)TwWQ$ z>1g0AeOd#osKfD~6BF}^q{G2>kG&SvpUW|0_F$^?{>o$j@o_MpAC?}N_CkJ+MDi^r z-22D&Uqm$FJ?)-pUns)L*%Kj9UwjaMr=w9HWB!b{f!4D{s<`N&$gQQ4kc!gmJnYI| z{BAR_<6G4f;JQ3D!x;-K-}pG2QBUoQFLBincz6=902s{4L{|F)=7Ta~GDm-T&AGbG zO+V{@W~O9sGcg8hh}t)_p(QO;w3x0S7LzSUu{CN8DfG%bUe zf0qs(4mwnDaR6*;m0G zpT5$|C^-%KZcTT}eyWdzI+ZvAOj{{;-!s*|$i*1ODN!kHEkk9qc?LW5>j7Ne+Hat| z__-KLrJqMi2ypk2++w-aWB&U*ap!-n#rI9bm}Nk<7nJrVIJo!kEyU-8>?YtiaQh8vCX#E8KIn`t#n}bTlWJfSMy6f zsjxBsg8SCtX-jW$BSgx^xouV+L`}c83qB8Q`tK=UoiG#rDtU3c=VC?#pVVp(7N77i zHA`*A2;LvVy1}EYl(sLABCm|`j4#p{9TST#F6rUYj_|>%!UzoqKR^#OA91V05MbDp zpZM9*B`R-HbXb_wAFKG9#E7al+z5|1a{c(#Z07nuzii?BaUXyH&EjT)?3Jc4=<^Lu z;JzUj$Mm*E=lf7PChj(QC8qkgF4p+q(7_10N+$ZS?Hv6ZmG((;A7IwMyI~o=!)P$9 zMzQRj274Q-6Xr`bh_~8s*=NT_$R2OVdgp@=Te34ssuMpT`u#)tT|nrN3ZE9b_VdS_ zhE@Gt1b$!6gup&W|8GF`l^itT!RF%)HWw|{D@ft&Ev+|k%Y+F0DL(SJ;P(?*qY{kO z_sy-EGZmqj4-_!!Lp3bu`b;KXmameiHASNFOC-a;MRpF-r6>q(a_-0owER|jGp@B#~#h$YZ5dC<06q);y3yGn;#>7@)A<2$beSjI)l?KR%? zMTTv9+)WpWp(9!_F*$qf98Gy#`IQ#Vw56EA(*4edZ73`*>a>oQsB)kaads_{0b2UF zN1{9*e(Z@zzQO+oZx1y!j~_8_>z-=oLoHw>9(syXs=qjwVHrF!fsuzxn`lJLOE%`8 z=gb!@sN3CRzDN=asCbo4-=22}fit>wwU29V#K>X-mn$?F?X@7z)Wf0b_;n^pG`5n# zensPZQ4QB$MyFYgMFMc5g64cKC(NrTo)O;LONL|J0y-rlB&cDYR7z#oz>2RL*}^4z z1KBwbU#4dGi_7}3ux&#qzmtAVW+uqr)oAW+=+v@+?F7y44uriz4Zdzo0OQHhU8Z}B zq(-#yY|gALy_wTDK3QLlz)f;uY=(;0bo!XmQ4=ck)Ghf9_gyx^hs&b83!fV*b81A> z6+8{2=ZR-%8x*+wtn5uhWp+4eRe;R9FRDYxtc){ynBaf)>^byMdZ?7lJ*@TxD2lAC zO*JfOqEv^txV=-hYBDTl>H~eGe_rOzH0@|D&pW2-2^HebyNwj7I#vn!T~T&m%Y`1u zx&0I&RxX^WwqxL1%q~(q-h!OYTX8G2$L140f|fi%b3SaIAfstcz0TF8U;5@;276v! z4olRiE+N<1kpd+9ZkerDdi)kWZwj4{NVglKoTzsFdBa)N|3=dpdgph5dZ9F*juT ze<1(&A5NO#$-46MFxIiAdu6R;iJG9&XqMwHJwg!~(X2!)MpZmRP$(8rE-EUj34>gx zZYuZ8_tEPNYys~1oxK33GuAS=ob?`jrm<(X8bs8!P;P8nX0IoR4Y?9P zzoc|Yd38$3(B1+=_!8B7nmnX94?1)zqOo{(Ki5t!q%WwY+u73IYHgsY#L=opPv?pd zp4S8rRp$Qm*q^qt22;6s=stGN$c;;SBehDT~aG$ZCY7#7x*-S`sMHpG8A(@1{~ zP^mPqpZ?Vzg1e`^U(+8-mpEF&?KYlcAhFK7;^KE_znbk3=pD-r356}m8!va--%d`l zE;mx?)|86zJ_CX8D_v*4%;db=nGX0{D{A*S-oEdb!^P06Q;9yVHx$0p{qn9+cWc}y zo+i-80K1=2Q&()d2en^J;k}G|_+Q^CcffS$&+q9pw{MLb)!*IzI<%EDg1;E*5TBA) zG5f>m+c=ZzVE1$BG}%`Lz;91nE;Y!BBHSG;8DLzY4o#v~dfNhpUxf^Q-N`CqQGDI4 z=+UB$N@x=w(W;0!oJPq>M5RG}lfJK790xNPR>1`F+@(T&7QsZh zZSkpIRxC8!-RLMuVWFUus|y1c7|1zG@`u30FrrHNuG47hMn6poj5%hik+Ifpv2K-A zIP~ALYc{zBA`a{boNOKhyQr6J|NUVCmxFT&d(I44-90{#Ahig`e#^Wk1DMb9kUe9i@K*85>o8J@@Hr-3rNQ+>Cz16fY?K~4G4g0*7 zl$pR#dm=tZVZ!W%sEw!784#}N{Y7Rg1Nv?Wm!0t*hvW-tq}0u$(0wY?@S4mJWb(!& zTm0$+ZEJ=1CKBD?)uGh)XsQ)<8qh=Mr|RL&>e;nM5|!XsxMXk3UI2yWx(x!^UqN$U ziZXZSTez!XajB}}5(?WPqDFlai0Wm09=3EF!|L(a?R4LGxTWuNBDXIOK5z85Oc(zH zUq!cENOEp~&GXvrZknCIdcLrxrLPY35;SW6?=&XzcjZ=w-~Pq;M_|M`PTg}p7z zX@pZW`l#wfBWw;FmS|0(5iNRJc3T=~gnG-eUC9iM;M06^2bFMB z`qg^_sDz^b87;|I6vD|Z@26rV2hqCr;eO*rGC`iuO@Ed~B0{bwT_`=zPH>NXQutD_ z3TD+c=?d?b;G9!>-~91;V5SN0_&PNWI^Tzt>*HB4dLq1?u`muHE^KMO!b6bcz*lPV zsRt}KMQ-m+Z3FhLDe0oJ^`I=`;+pAS2G`s6zi6z?gI~uA)~(3Iz*pXtZ^6p%(B>S` zY70s>vcBEWlN7I9`EVMm3R$MKe zg*FeyWi2E#(Tuw5ve4rRl$6Gi^69@}lo7GAq#@9Y{y26Z_0qq{Cg`@t9j!8Cq#_o& zF((`qv_~hpD*psKIv?r3g=%1YH+kNr*9^-=0$N-)9iXu38c&a9H|%iMK6kID7oLdT z_3t(xfU6|4P|rU@@NTK8Dn)q|c4>Ywrg)BnciGOBDgy>&bgFF$&|*UTm$6(60T#?i zvQ?;-vB2|}P$mDZNhl_cG{w=T;Q0;@!B)2^_~7%Z)$Z36>=K|@d1g()eW?o8y!sR< z3LMXOP??0_&fPon;+WtoPbB6mO~6JHXN+6WFk}>%2ESnRLGr$J`LC(nu=wpuF8Z zZ*E*Vd$Ie>ttPngJN8VuZx(1bH@k(LE<%TQk!2=1|03Ha@$5U2J;-Vwugrn20i^15 zCKS|0kc`;oer=U8D#==IbFua(6ktQrwT0 zLIt1PEbBqW_un#fc)L(eGRW`^w4vSpZTnD43wo$4W^!%31!=9%=+*6SM+?ttVvue( zN>4r`E}+|w0v{BjBDoQ?$Nkqtnh66v2r}wU**l3i?ZRGobIc&O{rd8&Npr}nrv8i< z_ae%s4ZSuKSw>-DXMP>XSwR`@o)+h4R*_lmX{qN{Y&f!#Lz^vx4P(X@xk{TN=ryv-Q|7u-~#&6d`qIXa2|R1 zOUJeJ&LL%WwVQj)=TNoS@t!`}Ikcg{czTEB94ay%+RL*rhgf3?jTWqVRB-L&`a0c3 zWHW8}&V;puj@yVtWY4akY8$!H3m@4qx8mBeFd7M0hu`v#ctXY|^&9RAt5WdWdWUe8 zc`9zx3c6HLK*yV;CT?i@ap4Zz^9R1m@nD~&gWejkyco11zL4AaFsHoV)JhFMHZ%CO zrcFlxe+Zb?Tyhh@wQS4*_4fk!(en)EoqPdYeE9la%US{KT05;AIV^y;ACmlJOcTWP zjvG!_WCSrwU{mhXBZ4?~vR_~Nydb{y;)V&f6vT&aD_Yw0$SQgvv~xP)`y$#9YW2?j{VcjxV7R9C^CUW2rcEhqo zR_@FW52CSS+|lc+yV3Dt^Gg}8T2Pf$&cgM5b?9{3X-5CsN_0^&{EpVb9~AcTaaa7N zDkPMbV|dHB4%vL;5udsH7dcV=G=2Iy(YAZh70n-dQS*_G>2vf!Bt2^LGWFpw>bq^o zE&Oo=)xNkn_(X675tqbjKHCqXQa;6ZQDHqu%QJ)Lu2?f#az#vX2RQgi~qpA zFY_^TjBe1umvww76L5cM*ZGS^(_qKlaJbrS0T%KO9NvEW#24?ueeG0RNf=tEi?%s zCSEhNIuKO1Z8QBWz`8nET;mBa?*lWa94y1xTaN;bJJ^7Noc{hruw)_q9wl%3W& zzkal5SljFLrG6yDC!JWt*@rZWTBq9HbfaGqZ#KDnX-5gyZ+-mC(TF?}4_Kuel_Nhb z(*ncgEaYB1{m5xoJa{UpKQ7ZP2DKfBzgBK;ztZWWsCz#5YHW3#gt2DVuml?DvK zs=`z+Yu5yvIW(Aa_%;iy_zEQ&^{1g?6I)n2YZh_~r+yTjSO7`oD|0_AmS96-ZPab@ z3d{sETl&pbLHf6@p1dj>(f4r9_jo!R@osp(Sb-cnk$X#?S1*H|(0M#VF*6_$`1Rur zyO&6WciX$;+PP$+cM%n|d2$f4%Kh0Hx)kC`*9)VE%M>EBIVn!GwfCmynyeV^6kBs{?QnQAyE@zEz!thkAj z5G_zt3ny_AeVd%*!+E%f{Jc8Jn+#53#lSTuJ&BXJ*1aZwiv}n0c)NT_avYrqCi=F2 z9ib5~zVPb(5up*&6?%v6ZleDb?>+=Zce>sPfEQjD^qSuiD zfTZSb6ELa;yL%>{8@dbN{-dYAW3}FZYV;oq4uedjo4%s6J*yC1%Kns~l=2%n?xu@o zI+voG4|uw>Tz{e;cP{7CJ|-iJZF71&;UB=%?!e!@Uw;Cxo)vEyDuZ1U(iKHFMp^*&#&m|+Gw|-l3hch;YM9( z{b>I$uGTK3D6XEwv)F;2j`dLFA{)_>ny;Haloz3fxrWy5+mnF%#bIOH+8P+X(kn%8 z?u7nlaid{}2jHH_&hotnMnMWioRy860GkgDd3S#?pf%-tZTJ=@$gUwV0=6^Z@tfeS zSz=7ERXK1mLWv1J8D4y+5}2SkXTss)&w{7AIZV~2N#L(48C3I{hO|%RAKqKe0#Y|o zHP)L4s|S^qOs+);A=2Em9xQ=@U#s_j9?KBuz4n&@`wDFM**17~aT%6ta#seOm%-~~ zCaKYD3AF1c=;t&RA;w|7C12h=OjN7}7D>&)BRxYqQqTze7tTLyc`4B+8+*r z#mj7yOmaVH=EBwtvmR)vI)OYxI^onr$vwgF7FZ6dOY)LvfP{GWSnIYb2wOk(=5%l| zoZv`ciJ9kuojHyC;aL=v`H2;ttowkr?E5Sd{~V+3?7fZWkCmaLZUt^>J8IE?ghjJr zW)t#Jce^~kvmI@9btjXux=_*kN>MeHUX=5_Hg7PrAEjM(wFU%qaH7!^)CE>Y_*rr`I!QyhuShq4Jy~x^Ikz=4}=Y) zc-U~PPVP+j+&?}#U7&eui5*|Q`(W#1ZZZ~XTpTSI<-mDG4>gPUD7Zj`KboVTg6A#= zJKhMP;%s3?sgei{9~2tue{qe5C89C{wLj4CF&F)u>P8wCmaaXZ!AHl>?0F&$&(d*k zd53Ub4IO)5V5n|!=EPz9^JLr(aAA(3 z^J0CPW_*Y;H*Thi(VdUe@uwx`nmQ>8-qfjjSGR;6&m^ijGLn~&-aCAh!J0;=N@kcz z4<=Aw*af|+`d-v8!zOP@ZAPAYr}A}Pm!W{``KErR7*(4WA7ymsqy1SiiE*mMsG{+{ z-jjqHbbEG^S23goUHjSYuNBgb_C^VcIxHu-!S`jE{%znl~wLA&NI>9UG|%#&oB_(`=!_UHDhSY?sZP*J`EsaOR4W{ zcmMHHyOC&^Wd-_uZ3%ppzCv(RvwhXG7KjTQGg{d@1h$@O#izEiAV&Nsx3J}7g%Mx~Cw}b3en?oeRs{Mg)lr4$4s^OU|q(&kr zd3*jEB6gzP{^H{>c6K7T{Y6VOg^if}sk^F|uncqD+9@W?1!z%A+k2dU4s1j{-fdTy z21ZlAiRm*IsCUN2+nr(nIy7yT;V=fIue{0n_eY?s@g>{t`eFF0{#~6zZv^tyUJ7sa z9|g;w&tJV&8i(k`jfP*QCqTv3$Uv@u0T279LFSvyXJzC0xhZI;~;H+C_ z`A_#Jv-0>&zOumS%h`Z{nB#fRorC*|Gj+a4iunOSbEQRAaG# z4GzO#YyMmOOe+H(jOAX5^q7JS{dkE(egA7oKatN`0FQ-?=^Y=rjO zL$*HE>_m}nzX+K`CZZnPeY91AgZMV)J9|ixLhMo;Upu;&O1!k{jFA$h5#u|me}&c4 zh&h`nd$G%OVqBs?Tl5E=U~gM@y{3;&9L|5US>!LB(9cm~WhT-Iy;mHJ%u95lGMxFw zPn=G4$a`AVb^ePVQkPhpM8_ z12!+|#NSKV#Oq-?F`rY(|5TNe5NVQL4shipYI-;B(N5qbK11H|sqdUbmj>!Rke%t^GmNTLD)gf*kYLlh?2Vh_vt@lx%-^;Cdk4UkJ>b(_mtJtTGj!Zn*$qD| zehpRa?}n5@=e?oH-C%C?Nc7at9(Z$BGs0e^55g;b!Yw8H;o2*n)rLF$V3$*P>V9?~ zWNtqthckL0;Mn1c5!X&okxfxM|GWv*x=A70k`=IdPh$(m&989a{m4~1GX>q)cktOw zyJFNkFIDSjS%X}!rPp0qZbS?r^7l}#HuNFstWuI-7ov#rm{B@=9{8(uDf(OInk z?K3mr7PI4d*1v!MO#7qE!5=+PKDbuszwBx#Xc}%C^tVG#UVWGCc=sEcLepD3Q@J}QL50Bm+&agnA-TU3S85gT`fF1VXp96PxgaB zaPvsB=?aDC%v?{-P%|L5@guOoKGl~4xJV*r645Il5 z%YJ8@Lm8uP^`nmSC~1lubjWA{HJ;1cn;E-+uI{KX+2*o<{Ou#f*R;)}<)on7n^<$m z_vT=Yb+aMYnL$}=EKmNHpF^KV6&ed27f`wS9!2l2CG>J7?ZULtDq412 zspS2`hFJoe&z6+3C6f^I}Rz+>3xPKCI&y zvd>_QAIpgFyB-Y_z{^5CwwgNxafGw({okpA_@hP4x`;hO*lxDHQSXBgUiD>}8;J|! zn76sxGy;XOOki}08A}*f6xJ}hj*8%nyho&{P!X)^TW)ggrwC4q-Fk4QLIj6$DCKX@ z5W#&b))$;?MevM%-RloS!gym(=c2!YFuweQi~lt7@BMG?jV40>`04i@HwhDd{Af+& z4*vB#*sycJ&D@`kbLEpyv9dXE4*zp4({eUEu(>AA{O>%{D##r-^<|+?Gxnd3^bMfb z$E-_NKQyDOcY^%OG=HG6{l*Nx`Sm$~! zIq2L+e(LP!0$6)fq3vv7D^z_R7UnG(00*ZDYy5BTlvb&_S{yVDQDfiphW0GLT-4~c z+N&$D@K)KmlFm+KNUv*ZzC$8X>SZe>Z#u@40NeS_(&E;u?o+2}3Y2AdxgF6BL<({H&6Ygme0P`Aqc;U8jbv-Gc)JKK z-q;ZMgtGw2ubvT#P|HCwO9$tRYXXr^vm%#%d@fikJyeMetAY`aD=}xgTEHN7Lkovi zFKD*B%{@sT0j1z;S8vgoApPT-gK5|l7&o}*?BD`_MWLLWM%ATqA0{l zkMDpggF*xDy$l6wrvU8jlMfD`*5yK;QjOc{D=K+RMS;pGKV3XbW`!8ZoDAURl~qC3Y!Zjis1U z2`&-Nmfl?OKmbuqr>_pbNl~>N1+5Czx~c=5aQnW z8NBxX3l(;ouDhM?fy1|__^*x_=cD;3&{T(IXv1g3FMZ$z&Fc*$&}wu&9Lm7r%$5zb z(BH7&WpUKx*6E0aQ;8BGX9O;h}*3IUYqsUkPi&mxHBocXd z^<+o$BsxOQ=&ofSN4?Jn?b;Lu5wDeatZhjr!gA`x7wM(wlCKGK%l!&SnEJ?1Zw1dDn2Pf?>sOlKCT+|S?V zDE)hnjZ=bl=>+^9)39Fi8UV4v2Vs`o9q@yFlMemn59m88(bzZp3GU=~AK+9;LQQ}3 zQqpU4kU>84N#<+@nvKr+sTzI(mGYT4=ka=Wht_*lTy#tsW}n8+Wcd{T*zdTXpShEQIu}?Ufe} zc!SJq*xcyw1(oc+{awbf8U?)HW%%UQZ`9=*=%?b+iI}pP&mzD5LGdylTrO4*Ajup0 zHs=yYknwf?F@Bv1G#K-CZ(=zeJv28wOd_U`vQogGeL+lQ;h=lrUdc2n(>rD=-aLac z9uB7*|22zDjGQE+vggo&&&<6NH|LSo)^?H9m3fqoYd$|Xynu`!&Hm9muz(Jz_O-ql zoJUPz%U8Oz=FzDz4+Xl>9J(;E6wIkJivqVgdrj<|L6fYtu>q#jXrk0HGN77?c35@! zczj`^&*ef9w|7mWtV#32okwQSp0}S^WyWUFoHqTSJI?~jo8RboF=z?-Rc^VfcWM=d zUY@^f70iMU96{Up7g@1Z2)}~l5i;gwTC0uRV#C`sU1`>LD7bi4LzQ-tiZg#7kZWY8 z;k(-X<$DrnxbIPNe2O|dmPxOKs$zERzwuD!palmmv|ioI$Hj@`U({>A`o)Ql@lSm` zlg)+K9x6w3C#}P^-xnt$V!83r7Zo=g-tyo=(I+)gp1gR+{t6+zZG3p{NIWIt2_N44 z?;m`d_;4++PJ}rdHpDatWUl78NeU&!V z9TUR)qPrwGR|Iiii9s)~s~`rUM*@@#0lX>Z6+JnTAJ6%YxoMm6VRz3F&AS2IxcAnA z>fT8XY>e-%mC`Brh2bY%iELJU@71v_oz;uz;ME`4^wt!zc6u1^8`zHqUEjw}=hdN$ zr#j5(9T{NH#nk%M(EzE*1E+qBw1Jm=sY_5w2W*{wlqTNS0=DlBT-%E3Ky_A&r|@nr z+%?<#hc5UP9e*ON7W)=>m{7{{!s<`@4cOjAWnyN zF)9l$Y6P8I6Kj9Z`~{VuLK0{C7<3R~(zQZN$dTe*H7%V3t2<8|cN5DX>FDmNs?I_j z`pKoyP9qT-q_dOU&&b5ou#98uM>Zlid0Wi54Ta#mxZa&5ib7K&~A$UD2;Zz^I1Hl?NUoqH!+;JHi6_?_~dKchVW?mgV zeGba&HM+j^&H!0UsP?hrG&t`F+~IMM31-gGx;}eh@Tt-At#e&H6o(oP3^c`-JNK|17q;fwPQp~J5&g3e~8li=j5VKw%z&a&Ww zQhdt@q}}7T_J|#XqHom_TjlzoTJXo|^#MK5Ru#S5VYn4894G83{SENb%$pew#&xNCLC?(g~0bP7MS=b+tgOWtcJ(qP$QT1~XOBu=UXh5N)Z(njXVrsn+Qq-+N zA@4GR6yKB~HrE+5Ex{bL#{KS~(uPQ&J~Cbkb}NVauyrjtO26TeY;FeSLJvfCMV7fG z4#4V>?IS#z;~@QeQ12dj3XJ`X``I4Oz@7B0B^Sy9$OMyIM*Nl`#d3{ueQ^ycx2Yd( zjbPYSUVHKl#!1eI9ra6gyVP9=nY@GgpPp%M9e zFAPO!(}SH@9})f3EpE@vNUCnvF0#^xomT92>#V{mSG2oJ>&Vd_=WcNQCSbe`5|R ziID2}k@D$ZzdnfM3?evK2!#!fRRRSo;C_B^@POPB1pV6JICXX&l*MnhXkVTIq4hd< ziyfxGHg;p~zSaq7Hn!0p(i#TUZ|5z$)ce3K_W8O;^loTw>7lp(HN>)w*eIdudmNvV9Y3nzfqIs8%#*LR~j{TAKEmbM6lkG7Nrq*Rl`2xo`F( zzUD7#*|!vaMXndUeZG75!`T*OYm{6~5Z};_mzr^cda0n2Hg=WweIqy$Xy)0zzi?31 zM?q$K6cYUBrISfhkQ#MLO7g`F48Lw54|~pos#vpLd(9#oP0;u@b7L9SMg^Z;c3Od) zTEAPNEh`Y2&T@P-dj+=qQ7YapumVGll3jHxOQ0CNHT$;DBHRjIHd`p1hn!PV$I~-r zL47RwK&bmP$i$fJaZsLu!#kd;57bRU|B2SI6z@@R-kvDR^L`L`{@nU2Zrul}?VAtX zY3T)OWOUkopGW3NPawDGPE%!ZIc%Nl=VPtbIK@Cp;XXIT>>voYlwW(xjm4sOp`3xl`$(YtW!j{R+ zhDXk>%eEb3!*dZk`CKw7_|2tDfuR;uyzTCpDzRZIc1nA&Mb?gn{WZrzrfO&yUAy3$ zs=B(dZ1mY0HeWI3>qM3<4+XAa!zd39Zd0VjU;)c>^G3@3JH@3vh1z=cg; ze(iC&unwoFSE_AQ;l{QpPUl`zd2pzv?)fMN5B5wHF_D?!#g#mP#(e&KILEwbPkb^n}-lKb$X-qkX;yuSeUvCX5sAD$W=^62cel z-i0fL2;xm;Pc=wa1+d|>#Mf*Tew-T^o3Gf!gVnyJ=r|g2VctyX#7jXmTyjg?autzr zNT=4l*RyL#EyJ~)M*C|df~i4UXj%TJne4Z9J|amSYam^#F#YcnBo z;|qFnnJ>I7^FG?uwt6@-G8j4@Z()l4^+H+^&*OSU%e>KSMfr*SOcmPrx~Z4#lWX0BTrsP zwLtukh~WIkK{&97ORXS-0q&u>E9|Co5M@H)mGWE$?$f_spLJ&;T3Ib&1RJ+aVRmQrZJAoa~JSa5FUc%smnht%kfWyevnBGhk2NdDo3e*@#dW zuKE;Hg`CTmuU}$qL}p=|g6msa(1A$BlDCiA(XmHqRj9ljDcV(XYu{@{0$fP}y6+p& zNJ02meMS{BIeunR&Fd4|n5ygdE2;>jzkHscr2pG{7nBdaywVE^TZdRp8irt3siR+l z0v!T10=94KWpy4{l_T8 zy1=^6P@i>py=&Sx`X{9bkW|K^KG63roGJh)hH%hESU^F zPN9RcU-+sbeF#>c1jTGJ_yfivtRbf+T3|wB<&bUtPxw|$R@8cm!7R8tvsWb*%4LPh z;&Sdl@O4j%mF`ePYl++zdm#ypH0W!Z?9W6TZ+6g9WD3y--2%~1#-(UaAX87K`#TEZ z<&E90T!#{?TXdC}4an*5Y|-|mMpWQuD=2lk36Uq-a^Jf)quT;Ci~-SBwDR%P&zWntxk?>AefrH1icWC86ojjA9+egF=5T5zB>$*9sS{N zTOA#3dU?@oc+VK}E(mh>fj_9;tkQt~rwGaU+glpwbimJ2-tg+J42Wy#@i%o|0+aUW z-l#hyV#>;`UCxS1RPvjLhVXC@)|G^PZ6Z3w;xQu^|z^z^|53rk+v& ztud*@xaD0`bBtS~F+B=3s&P~`x`shf%ygo0Y6&RneTXYM(+GIsZhdr74{Q$5O*&sX z2+g0aUu+7SfYZmn-1>HU3g*&kTXvLAgUg;*&%>_Fg7O#b^V<&0fe*=LZCY$Q@)8R1dYdhuH2^iR} zW8?c`6ecTPaYpSQ1i2H3XVz8pLT*?T#~HCEklJW7{Osu$s4z^j{YWoIO*WEsp>rK* z^Qn4=x;=yFf>b^iuf!yJ;$qL@-ZF(aYa)a#^k-0^PKhz2aTXnZyKBGKR4mPvoLsEUj!O@E z>h3n=#L1+0+{?b}aIiq4>~JLyMwitO#kufd#j&f`8!z(Xp60DW@jC@@Z|M0ti53Bz zw`cI$R}(?}Q!F8{KSL1TI@-3KZ$=Pj9i6h+yiEw-@N<7ta8d{x>5=Z+xCr5(Wi<)q z=R(+(V;@&*m=NCAxc$^fun?|^;op1cpMTCRnh6^j3gNf2TB#=~LO8G@bv8Ot5bxhJ zI4i0yhw9 zmsF2(E~*9H+~0v}&OG>PfB7$}?Ojc3%^O5Jy~G@+LPikvbB&J6&rxLl)wKSo*(l<8 ze2UH7bQlG6B)t$_?MIc1t~?xXdXf2k?`0yr1u5N?)SjLFjt+S9ynAu(4&oN)a&zwf z4)Y}ZZa;2=eHVgtHC+2({K#q7vV>8{I+iZwf0Y4`@0GnCpPdHlZ3n$H&(A~oM7jFw zm?hXOnQ)*XWEFVloVj~KS%~VQr85S}tVG!RDfzUUBtmhC|K6P;5}~kS9%sNxCU9eU zd0QTd@WEXfS?fr|{f|#OENQGnjfa=_?;C4yO)%Jqc6Ay4yLPEsTyp^=zp5K8i_AjD zGAB3RK_);lxW>@wP*Rd(o8>(L=6F{k>&r37*IXLQU>$=SO@9tGWsSiMhpe#exe1uM z{E(e&MTh@9*1eX@WWaq9+cq|KCP;VXNegICLs0wQv!d!VAbho}POxnT_%3EeJIc<2 ziBrhKDb86qb~20Ot^W+XG!4lRj-Q5|BYSdLt(froPvoucA_icM!eRULNvL^y+Oy;4 zDBvU4qeU45@M72|;-<+Tpv<4GzyG)${PTIjuYGEO&1~NO;%~~J>Qs!P#IE(QAI~$M^ciW6&M>Dc043<+3>Of~aIVXA+x>0!I;qc{#9u)NX z=rg6@9yB)idFsuME|jsQc)Iv`3lgQo^d}qEp`W)!zj{pMqESiD=q+glu$p$IQ0Z_Z zOpL~Q*=h8G#!L04FZ+ie`p$m+u(b)OZ?1Z_C1nceom=H|-p+vMMl~O^6Z4>?D%9B7 zu?XfSv46(&SD-jixo0+M4ZQxYh1_3cA?`Ol{Ng1-BD_l4TJ7Y>#5qPAS#uK`@$$Vz z&08iLapB0<1^0Lg!7%ytp+td7$mN7T+vY*Uag$Eu5^%A)MV-jeB(jh^g8rqjU7TRrT=%fbaWa}^Hav(w;?G&>F=@K~ht z*fkDqRA=DI7f1ag;(# zi6rs+50i;0vY^hr%Orwrec+a|Uo6BKi_FPW4y&-CqI$qxatX3^L(NZ)&B4A+Pc{46 zrXg3_E$8PF173#U{k%C7P)@NnN?9C+^X_4L8!G$Z>rAvfJI^0*IriwY*ZEHPBtd>z zF5L_}uDoqIDo_t))(5Y5k}IH5+E>jxEDO%{L>$TT2t@f0w@T|@C_trQO#)c90{zZA zy4AX<8a@2az2`3;v>EAE=6$r7mY43+OnuU@VOBNWj6Q( zjAGEuT%DZ@oDk#8V6dDbAr zerY|M0V`2`)wyf4HHp~d_#o!Q7ZSm3G+2Rwf!a~IK+Ieq~ zUx7u*-1D1r7Jze%l77Hu7K~N}f`+a!A$4v~r09AEB#;6{mqy#GZ6iKx6Twa`WBp%E}{dEzKo<}?>OvHyBck=JPd5xuDG@e4!}vN zxy|Z!e?a7V@BvMME|Blt>N2O+0xRJ>J=H?LAh<5z+5`3+xr_N;8!YQ7K0>u^WJ|=R&{(@@{T&x3QVq3>xcO=tHJvFi z%yOGSJ)^wF*Z0k$aRDUe#+XGZ=W^j>p+S3?nR`y zVWYL#jwRF|e54>sW*N;<cN4R?w9O+PnSaRb;JS9QHtZ6*W#r^!#O6 zMKy%VHNQ_Q=(K%p;?lttRAAb;d*bghawRi6DIb@SnhXD)lLgBt_D!ROuI#@&iDGrv znH4l$60#+5`x@dlicA)@W5FDqUq3aUV8!l-JI`&kAmJ1J>_?9i1k%e{4kc?kF}MCZRk=JnO*ES){gtVZ#xI>?p>1>uj9n0*E7y^f91j_ zMP(aVgv zKCIm8qxn;dAA7pSh-W18W5bN5pPV8B_^pKY)BoRZ9#js@Pxc7l6w{8v@qaG4eUv3l z=C&XXyBuCH8Y_sSPlqa$ei6jnI*pY!MS|EyWVfGimLT@<(=BAa6U67+QdZ~g31Sl~ zzR1v1f|#Vb${Qddh$%5_68nuxwBu~OrKRrfY+Y*}D_?RU)f{FeI00960 zEZ28D72X>M@O!Q6+B-!O4NXxLspk}BR#ZkAsf^H)1`Q2ViV!Ioi88(=ip+8j$;#e4 zGn;$uYySN4`|JF3o`24BUZ3av7*z&L=koR8Vom!q^yyl>I58bJcP9|nY`dtKv``G0 zwhmX=MC-shF<{HS@dhZ;bcnvos)Hjf1Dh(t}L*UjaVqeEys2{v?3LhL%%JwV)BZzU|Z5ZitKhv+`ZjFywS9 zYXm!tL%!IP{x_GV;hfU(OPV#a5FiSP-+dNf|Lst%gCR>0Mzt5S_oAQ+Z}hzWo2Q^b z=i{8y`>AN8F@|!*n2Ne0_1_-SqN244!dKgPsA&4H9ZUBk1=Z$LcX&9bY~U@lQJ}#w#qo}xDn0lSJ_4K$c=tSQywNyRV-jk>J5Q`HCEgFG9Lq;-Mn z81$;!w}P?CH$lJ7YKV9gI7v34uAwad$c-n#&Y;3xJ3N+)rnr!XL-1Ru%WQTUL zP6b|jIMu!9LKcn@E!-A8n+%qF{uGB4SHTN$7vrVLb{H*X%r!LkLBe?KXO;5|*uiT9 zH`OMfWq9YlsD^2XwUox$%5#txBBSx^;36oh=C63-NkO-}!{Qb5mirm|>G^{WHl*?O z+x^5AcJ!mKr)KYW4wQ3*M}5B|C;BNo5&3wI6BVs=xe(yUg=)4h4a;S5p?5E@q(qE! zA)ULQo=!>8(37+B>|Zo!XujU(fR`l=g(ZZ>9QLH4{QAe?rO7mO%RvAB>haqFDFLqm@a?{f0|$c1*+3V9ul;6#EFG99NKI8a91u-qwrb`<;Z zs;Fxq6}{LXAoG)pf{M${f9CYeLttH>kVifXc33LEa{e_1CYR+C`v=FNb;Fltq{jq3 zMEMq1FbprdIjKr6{V<=~8zvsm0|F^wN(WbT!hmIMsO(rPEd6+9Qg74%g=Xk8L#Z0B zDQ)&B=qmw7pHbb$v0p$7`}D{AMkN zgU!Y297nvX@iy_0g|mf?_)_o7u|mmqEF@yPjj^)}C!eaYx;)*3bvs|)$E*7A!&}eQ zx{vqcoHOyqlu!5LFUMxA?=AG=121*xZEJgQwt~>0T+wnp?|;`A8|pAMfwQrmLGb?B zskUg{EQs#j)9yB12kR2P$?Od7g1IZ~^X8_55TksLa&8wB^6%;9#~+-4Lh9`?UOWZ1 zbteZn!)Jgkeea?{GYbT0&XT>4X5ne$eQxKNSum`cc=f@07XIkc`@<_(AZR_)eR%~7 z=9czO*PNVzJ8#6rI6h2+UZ?+F1IcOl5&Yx(ovbOa8Rc_y@|}Y3zENwGyrv+2pVf`{ zz$s7?UDw|eGXq`X2(D+^ywj`17omF zrc871X(q^yCcQm?7?7$zJL%m!4A?0?XaB?iOvi}HD5v(qc~xD-@x9%!IHYQM+_xQa z@4dJ1G-`z2JioR)?n=l!+kIZpfWXk_hTnZI!MOYNuimel3h-9HX)Q;^I$W>w=JG42 zc5GB)SQzHngN<(})Sp>7fL-)FPh|BBxxY{) z4&*cO-XPZ|_IM^P^YE`u^=4u({kz6@0-4x^7?j2LFtJU#ox76X7(S}@arYnJaqKyx zr9-Qnz;$cl?R;Sh7o>~`{)eY=7WI{}cdFc&dZrqmjPkf$QD^+$6nh)>~(Ro7ftqxodUN$;<`! zZQ5BpWXk0b_r=G&q(R$Y_mCbRxul!8`<*a9sXQw5VXTv%-0x96$DSuZ9^QMPxbwRp zNw<8vOW>3c={Y@~b*oW`C|gHN9Wxas7yj+GNhuR12QQ{pD(({@#x?71kGvNlPZJ&K znhPQ%#mUtr=9nlcf5VtfeIrV;Z%7_cs}m)xxVI;}*u@Cm$CchFB}QnSnhP;2#7K7N zz#r8yQ4;DA+*B7QN_!@)Db6MlXPVA$fj{sceH`GtU(wS=^ zPCP6a&NWR6dOHW=$$43a-4?( z*Vl5TB}i&kr&f$Cf)~}|lYPu0Jgxlx)8p78SnDPIUR}2Uq0jyctdv`T%!+R6XSsR! zCVfMT)g*+ z0W0;b1U;{eg8lCV4b#XG=qo$TeKdUp3d#(>XGV@f;r5vr&rk-ef@^=T#W8_dV;a}p zHU`GD?Zp?@O@Pqx-PYKA5;kp%YR|Trg1ns1{T8dI!Oc1)rowg_>^OT@x5!UJ@E3le z{AJe!metOhRZPIfC=pxpk}>dLDA!0>FyPCgV%QerK`1T$6*61i1Mjn2f>J%&p*c|R z`Zecz*b>=tJHomQnp+I@JK_m^%x^Q@gf`5^owyhHuU~rglUCXxyYW@9FUHC z%_T}1RbgTw{bGao|Re<~Dc)Pa)m>b*6^?XdZ6um!j4KR9lwto?*J2=z^YWSuVq zn93Eay{5)M$LQpE*xpGvbMb(zmHjlxlzBXm_GCfJ)X4)6UeAF&zrMwh$OX7+8TcYA zcnLlf#P!*wQP7C2<%vaKD&k-!o4M+-A+J4eX>x;XNVhr7O7j{!^2=w6>-MmtQ~$Qa zr0a8_f;B8r{{#*cd}1*1i!di@iuZ7PahDS%3ctEpG0KSsUF-!LXp#dbeE(!5p~f2G4Dfmv`@cC%cYr$x-(8hrmInqLz%(_ za%2HcT7PMlKQRmTq@Zr{$uxxec66^8m;eEf5a&9VF%W*TF4%ev1H9u~DC*UNaOh;i zYp&Q{m|qGeA$iO5pK+@|VQ)KZGB3{TRBD1xH!ew96xTq_e97)3$`$a=-9)s~y%4A; z?-sV4%7GiVw_W!RPl5c;irm+$U%@$sOgpD}EY>x?7Z@Onap1Xsm+r4F#(($Exbfeu z!rBerO~1UY$BQk8E!;+$ajmD@RV)6#`1iYQz8@^wadl>!f`nT;HhsV|($4ye*EkEB zebaBlR+dgU%P+N3fCH(Iv5Hd&a3m8qSP&b^BeIb+3rt&xJ3B#h1TBB;oQBa44c zE)^ZF;r8BTL`95Pe+9+!6x2}EWBPIPBIqd}aK0!o2M@PxwG~d91{EKng3Ednuzu3O z?5+qCV&3tmL|qwzacdp-)WTkPvFA(ez^!h0blmq`=jTp{Jeb|0b+;2LV>fy->bm~# zqkn+-7*g^I1F#Pj>ACG6fweaUSH?&&;g4&~nJu|v|Gz&Dk+H#=vPu)6z0NP~bnZBG zhGi@11dqW_v6!_2Wemt%DtfP>JPhfhx<@Td`(Ra(k#vw#7x8cOi_HA&7<*0s1W=BCN+ z=oXxmQvYa^WG6oRyKtyyt_Pc27Hifr`*FW`-=y^LF!ovZex<-D1B<+^4B>7V!<5Es z$HwRhtjJY(`1ye;tiIs6?s>#C-uHT+dTaa)?khE3*=EYZ(s44X;&Ck8!bV{~6T`wQ z&W_)YaAIN2R2S^kA-Nj45|ReNK{4bBUh5i;Fy-PN04fpppL+c2VwB(MYec%X>LjZZeRXaie{Pn`qTP z9QbEMCwJ`6-~IcTPSmZ++9spuB*k)b&bD+q*-}34zO#i+Zhf}=Hz?0Tb|jrW)EvP> zzGZ!szH@??+;FO_w%ErC}FeE zxMolwO70#K*tk|-j11QaPFZA$k^Dn@{(8{G$$^P3ALT9L#9%o48NakRiMXI(bg@m0 z+zkCNx9+wWFRP$?YeIZ@vgokChB~yF!E*Y1thPHWDUPg?_hb z_Cmz%rj>fqO99gREIx|T%u6=7$y=U?>yVna=N4zKm7DV|G04)<{Nd@3N`x#XT{y*xW+QTB6Gk}U-lcQ zN$mRgd4mt0>0kBZ?OK9y&ryG&nKHZ-pu9$lQH!l}N8;*Zns97l|Fs8uTd`DGW0i-0 zD}E;)q)OtOaf6g2+8k7mIi8oCP1LW%=9Qj5YwzaZfT*$!KUckn3l1Ly8>7k~KmEoT zA@4Rg7w>Pl@SzucpTDv&QegnsCVT&_{u8jHi9?L<;|y?x9uyPaHxJPrRKJ}&mcTje z*Gzv31%)&I=*M5Dq6=KEZ<^bvXv`wa=q7~?RhULiv-eWbTxMOdRX7zbyu8Wk+(bna zWhcFrUQkf&?V+;MFPGpU*`}kHya0z{X*yRQ%|ZHzdiE857I5>e$(6o51qxXgRgM*n z!{Oix?JQj;+*~x)>`)#7cV*d$y3_%f6*Y4|cBBtVZmH*Ta`l2Vt2ZpN@*jMyFy@GQ z)(dCaHXI@gebB-`8p*S~4|y&==dM>D1}`pyZ-?VXVU^##Hd6s6)Q8?U=?-JCzF<{r zlleG2Y+5ksFrEO(oyF`2R!@R~gs}39h)Gblk2B}_JqZ?{*WL}dHwk;UtGH@pOu*>F=P*>4>H+8fd9-CcPER}5q182K8T{)GI zLb9SR)n-BE>X9{C>tnG&`xTWuN&$W*A5aT46}Z+ z6~DsFXfoZ3g-xZ`^^Ps~d#ywL)Zco1FxVsZbYUgFb$iPWiO^rzugrU1slpbIChzoR zfBqZ7c|{srLh7JYG3Nb=%N-!2y61|Ue=h|6rHh~S8iq@h5u2%CCX7T}aCoqP0tOrh zQom+SL5|+`Z$Cs>5azA(dXwQC#H-~OP2669o2z#0FkAMF>f$j`i*gFmJNN7Aa2^$@ zM#pt}ePBaFsUmYqcI@a6o7b!e2M6je_^MxbhXdJjc;25a=Roms8Sm?8oM^*M1!2GK zoCuFPMjIM&B1JV3pO?$7Sq(O+0-2oXrbFiCx?0=G?j9F1iCUYrl1f9?+gP7} z2GI~LKuDuXi5u|;N`E?6#f{3_CGUN9UiQqMyUzc3mmL!n)V?>I2jQk38%tSU^yp5I zi-j*Q`kuP&%#KE06egUe{e6lTX|{yU;09iFu9Q7L_&P6g=`G^R%Hu%+PU4|C#dKu# z=D|8Me{K{#?PRiQ0}bUy++dnUa-t`{W%fska-iWNX+DtvHpB``o%NEZBEJYlvO8=E zl+_;WieI$=HEw+m*Co$_Z-e9KO~x~jk;jxMo?QN4*9wE42aSVOLbJJ%1`}MDlGCCn zBXCGXPF`TBA2JG@cOJ3$2kj0={*0gM0-H6wd+jUQ;mhi0AJ?yGg^8jsqlKRv;Jj^X zjK=S3SeYr)u~D!DBrd8v<2d*e?uGTU{~5W2x3W#nEZib^Z|NJO6*YSOD8OOspOD7$_E}tvtx}6&C8ohrs1?oQ@PiUp7yD!qEehfc^}=-$e)c@#=N7Y|%Eodms| zhSbw$%kQ>+MCswBMG(0Sq1J?g1asRD?)yzeGKXc#YYwp?g|EIv#fRAtw?CaeT1G{& zwR_uC`YFhtwQ;5KktO(^k#Hv&=HUt>RycKV21bgqEh57wp@qI;ljVmoh*s0k)H%!m zrE=cd&P&5kqp9t6T6_qC?!JH1;WPxZ;myactr>v^&!sf z9|!%?TqS9p3HWxM<>c*d zq$!+wSj=Ccd|FYvHi%kR2Q0no%ptw`V8plE$)PZX6_nORh5LH79xW zMSb153@#$O!qn(UIgL;PyWg`4xQWL8?|-C%=)_P_MdI~l9>U>H7k>AQhlnNyW^4c8 zAwQmUYu1$Ukm1SqlcBXd7*_mVyiBbGfv|r7GdZA{?OwkUec!p6ccy}$0qAq z^UZuj&VVxvXYdhyNjLoWI6v9rQ=!Ji;3q~TubAR5KqSovv`?xElFtVcg1+_(lFG@J zi{H(J$h*xhVf%}O$Su|`>Wjm|WLhYQB$f)3maPYGJ~I;`4zH4e2HQkP{}XQa0#i{k zEFapCoheGbt$uM^mnue#qzis=D2Nf#AkzGFyBO*GWCMc|V&r~Ak6UWDDCs}>Gx%|+ zC^6%x*}AY#lz6}e70et&ar23w?4t%GtG-mt58vH2VkMo~#E>1c9yxo5HOZh z5h(SC9;!uV!lsb>SA?{)A+c{NSWxdL94c6SBb+}I&PbXV6NOjZR9eH!iDp)4?U}j4fqpT5E7^v!qmhMt`q3~p zw0ZotYN7k`UiHXEeWw8hDGtR5CP*#<|HuWA>%Mcq9L*28LSupHT;!H? zICQHI#0}H9ead?wW810C66U?&B|>p=Sknt7vl;wDTY7=tJcE@L&mKi+Q{hV7fveBXr+1Gjik&4}tSXqtp8`MciQr4_w#ZrN;Jq9h{%;;~)YNiaM3Oc%z}(Ms+M zTXrnF$yJHPuii{UO2N^)N=mrUXqdC(*e6c3M@ON0{x%2tTN)6bSfR@RL-zyWw zfa-Pssm=letR*9Eu4C!r$DLr~(slWu zR2$TFOLEGrt%sWDx`Kh1D?lb~;=Jz5JlG-PoHunO8PX>DwoX><#w&k2Yq#wBhFeR- z_nJB6;&q;O^zrEuyu71@*{v`=!+w5U!?mLfmiy7R0_A z)`v_(+hMDD<}3?r;^c)R?B}3CH}J=k&N)a?ta!wEe-4g#t=y(!F$?knhP?qFX5fDS z00960B$#(J7TzDnd5njLhX;|B$Vf)$8)f9)5=l`~sFabCG7?e|m5~SyJB1RmCCSLW zwyf+K*?TU+?p|pFi&B-1E7g^Ll?akDQ5o;lBjDvrm>N>MQ_$ew*Z50&}3O z>X7jhMJi~zSLjZ7H3fb!ebsj8oB%w!T~A35CqTH-D7})|B&c@${L{~F8r+cKU7gCL zg63|Xl8Y{LV0y~3-Q~&xsPty4`*&~&h&9(VMAa?>`RuKK+q+jmgXFD_ z+*$+2uU&0YyS@g*k8|-_jjn>M{gjRnj#UuNVZNw4z6|;p9%brzErGA|TQRNF1+c|M zGwP&150*Rri;j6bFpKJNy) zjp-Nk%A0_FmlAhjKrz_O&+?m8@eyS_Cz~{BSEIIP2Clb4I?%`CX16`l`q8%1tJeuy zqXAzOooJ!1V-#PXxCgDztZC02etN99~VU!E3DnNk*!^=+MdsyT~@wZzWv z!rmoBY)ai%Y`KIkw=0{fe_28w>|YM{ZtR>t;(p%Yxr{#4%-^j&vw|%2Z%5uUT}8^< zr(9!;*HBTHZ7;9m23m;75URD_LeK1*1Apq$U?I;XBfclJSbmPFRy&`7XPsAWc5l;R zEDqSH;no%Q8!e&&#{f=2Lo&npr>`8;Dn(0(T`+8?R6 zqXNFYn&y!teB@JahxsxI>m;}(8~L!{+5jv5kfT}2x0{TGMRnBVHpqBWA*61( zhYd4M%qA>nvSYftY6*{(D7Y0$EA;wMut4$5@K7}c^X$5js>sZNzn#4?BA~&6*<>4c zi9F%Jdv}}9RsH0^lR`5o+>0Ez`2FKA?+$We|AJC0DKk#IdaQKC$%_+zejnPv6~>A0 z_8BmxhH&Ei^Nbdko^oPgztLM_mpSo%UC&JeQBHh)pl)AO9|sPLt;of}9Qei^{l76P z9N1UEEM%>gg6}$? z8=ceuTm|7B$U}pBaPN<1v_LmtcaQuNmkLh%vzmkHfuMV??NV ztBH5@1rZ7bJTi`EBSQ7G=B&KujPUgHr@X)W8K7@3Re9mfHICuqN;+SClaw zd=Nl(x7Q%RT?V}7H3GD7v7YYZ*?-&MSXzx?#`{g+XLw#C+01r?9Wa{KNpi1XO;78-&m-YA@Tgwppl3>UP_wdxaRBtjuVf%VHk@rOST`Y*d;{X#} zQ}<>~Ph^6hyu12__?e-4@mv3r7tAnS+$}3&g&AHLtxf21Cc*yk>IZdnEYMl7xmY@e z1vY>4lzeoY6?Qh7d9t;$!timy>6G(inErN>=6X08>QA+mF8sla+3b^ z+rO+(S5%Eth>sP<|F>U)EY1RzCd-6nDI_=^`sjG>C=(>)-%jJsCBnx*wX4X75&n>z zbNzdP0bZ6n;65Ng4Cmo6|-PfJntx<>w&b{eB25ZqLjp75YSKCl& zQZ<2DtQWacJ}Dc!45G0x^Nh<1qiE2|@vP#%apd!Y$j~i0iAwSfq!|V#P~^n+Az7|* z*uuc>yrHv)OffwcMI4)8%r?B^ZLUT~wS_nYIs0q_xi{={Q51XM_GVEfiEkfB_@ z=Ug-b=m*q}gY*&b-6%8aa{n+8-}CUF*3P?_xE#|wyl)WjBBzDt6aB!mg@rXpzaJ1o znNKXf?+0;jtw&Ez4gd!?eY3)&!{F$HN1T0U#z4B=LLNi&BuI9y>=Ra@g5C-zUfqdV zuv>e+Rq@)+e;8W0aMEJ|jOB)Gnx9_;YkLVF7djUKP1%>@w~W_ylo!D%@}g1w%>|&adh0R8a2^On zd>vQgoCAG}OMDygRB+w@IHC5$Gz1)y&KR;A?YT|hnL_+WG^8|EW>8LU1b0##{$;zWH;RY?<|C;B7+#Ldr;hjq~xKD?BFMKC|cBjWp$#Q~nuNiRJS%-9o zFO1mUhdFsMmx#@FCW}x76ZYp5*NZ4)#*{{SX`yry?$+|9oAhA8zL}z^>j*1uv$>dl zx04k!X5D=A+LnyJkrtO_R>-(W2n8rSWy7kU8Z$PT*zwzPIs@~!>^P%@7U{E6u$+T1>cH$75J#0g3rk}=dSI<&A+^F4fj&;isY-{^<@f<*j-~u-OYi&2M!s99^=4) zl3c8zmK=DccK-(vUk-dC@auu}&m4HSHJ_4S)(+n^mprq|=D@KD8f`ZdIq)nk`(oEC z4jho^;W~4J13x@{yJYns2Nu80o5(Ul!5mM*7EIzO_}`d13#%Rl3*~MPSTwWatA^$V zhLdbqr4kz?^^kGr{g~nK1XjFFJmbx)wZkzRn#D^6%-GO*Q&xtH35WSRKmTOFh-IQW z*O&b1aZ&h2jb{}Be>;7iB}kAKFPXY4?B&}=A_s(|Dvxg#)55ra`=DzUN{NiTSaAN>~(lGn=<&TG~Kj|%lmnKeB?JoOUC zy66aylIeQXd~^nIlXtzD%bN%LMG{q58&@5-!XP7*Y=~UWvL?ckld^4Yr9>#I z8}@T$kqG%XezkdT6QQ`;T5?4{5pL*i*A2$*f3}xO2ZQqI_}h5tV1V|hhRP8FOxK$z7Afr#I(#&SHaJV4XOC|EqSS=DLKCRq6K8hio4P4r#mpSXL>>QfhXN353~bDE$s{+!x|8%>!l_xmjNR0MQwaP^Ainep1RZr*U%InIoZj4ota2X#2ZuwA}qMl>7RoYMAFFGKd{WX|9__uWH1X%Y~_E zZ^m)Pi#Alm?*Dy|q)$aBU4H7x=FXsl_7Yd$sZS#`$JGD#{3Nnn{wcJFVH}-O`Sl1{ zj3Ag|K9G515INwyD5jNO^zzI4OKpKpG#ngt{;7NeGM7`Fi7hHXcDZNX&5_f9h4`xT zP)!{$@s2G>zS{*_)BEXQ;Q)B_foN)&I0gjYNTqL^%zzv6tM>n_=YXzB;FKcMA~+(I zyf_iK3_=R(f}QEsz}kd!anbD!P+xyp{o}7K5G(dx0=d(`GdI>s%-_($aCw>^^}z)A zLh+76%FCTA?iam$;7AX(rhZrn8Z*G-!MP!h>Wnbm*y*6K3=wkmT^znF$^_Go>D~$# zVTKp8l$ZVYlHf#xSEDs23uKBLj#HUtf$|rV`nwZZp;~|}YoHn#I&$g|TN24I#BB~s z&XHk;jNzfbd)Z*xfs9a1Wj1ImJ~9z>f(`Z&_&+>TVS`a-49{fw*QoV-OV85UXx z47FO2VO0?iDc}byOtOp(XsTm@%odC3hM6Sz!-Q|D<}NeLETS#);9!D3-z##{c$OfTw4lPDtxKp#OP%_aK@DDq|u%rQr(Jha3Rha|x?^Bp%o?Q=jDYC_hQXJr2EoJzkguug1Fw_Xk9|)33+&Av zaz8U{0bWMje2on?;CC^=z_4DbVJ#~QtZR~s?o}p%Sfy09@waJU`JRZ;2qgzN zhY;?+j=`Y7)AVQ`u?*z<;p$5g6+q*6-#6I@=%YBbl7NC49XE- z3g|J-1V_ugILdc?T$|0|(=+NV;Hs>v&RfYIFps2XP6!Nu6@Km~#U{g`WKG!KGHDbT zn4T+>q#p<7TRp|DA>+VrYOKLKb{qs}G$x(18wclj=#y_KjDtna%^4NCaqxZUUfQ9* zW8ld{z3QZ>F|dnS`qz@c81Qz!Ju`ZAbjKI1pNSbB1{<;Vj_|)BFj1Sjv8XWsn)22A zbWZny@v|yT7JWUyjMVd1FRcr(%x6pZ({_Md#(rz3%vQjxRQy`ksTu6QDDI=b)&R1y zMY3l&>%hVC)B7?+D*%V$XlqYXAu!5QUhFGR1^Fn_Trpe^#m7oFKcP!TWYtenm%M)> zAbLFG-VjDkPVyr17t0WBR92UvM-}RSI`AnhvmP~et@}z?wV?QF5q9ghU5K|fDCSp4 zA3CpP5Es8TguZn+-_oiaM^9P{qHC*W(4PB;?bz}If6ijTlbiuR9KBhw)S{2EzzH&530B&} zK1Ie+N=K6kL2TI7&Wa%TA3J_?>!h*x06SiHV;Veofr9Ia9YKA06wDb<`6VT}!yWts z-`@sv;MX<`RyJ!KSSz0IRL~_(OdEFe{=+yt5r+ zsd0IA#qYTAMD#D--4$FoJ6Sl|b&dRc5>1EO6t7gNx|M7F%kC5@4!Xd${b*wl;m{FzpDGRPw z&&_UPA>p(!^SA3>OxSRyg`h@H#C8>@T*p5!;1~6S5B#su$!*GoQ4RjbJLmw+}!VHr8oC4VOb zE}*09!h3~hX3=a-i_hAZX;l1b&kb4KF*NneV^ZMF04ldAe=B;b6W#uELiHx00vVmN z3C~H-2CsPBYA#)E2O7^=jReewfggYA?1mi`1cXWkZM|6p=lqD&n5%1mz9_}_X!I5s zxl`9JAVv%8N|a;EQVFpBup-y4RXTW8*MUQWmjO!c;bzPhWQ2?-Cam~5cl>f5A795h z5hC)xXM0+h;EQX+!iDdcA^o*CBfn*K{IRy+fzfahyxDn8%)N~SukG#kwVolts}DBk zT&GA-s_LMKZ!Za|R@!L(XePmsih|c8e@O8BP08-Q9W~QY4EK6P*L$3lcj`H|GC=!OmEVirUyL-F9=_O=6E8nY2RZ&xd#4=<(6RaKfjDPc_&V0k&R2&9 z3S*M^z2Qx8OoH|A=czT&d+JX7p`vB55YD(N@@N5sdfopREjbH<9DU5LeV+jTVBW!T zp&=lzowkz0)eSyb(Jn_7`~ikG3_)zhUqC~_=aT5?Qj|8p(KPa{6_NVxyM++?kYVWh z@73WUWbOW_xubmyQKwmWA5=^tH3hw4=F(|&u_QmRXJZBlY3TZqE>n?~rscHNTPjlR z@j4+`LPg$5bXS=dsmL(uqOZQlEMk85qHpWaESj_}{&AK(i{w(Z+sMIGxW>0C5XK4pIEMzUA)Kibjx#Z;& z88Gry?Ao?Zeg!Q0uWtqkmxBC1{`A^En?RrGl|4@MJ)k%3mx_qnAdrmc>SQ}Q4n!8y zf|^NGFw9Q#Vr6*_Oi}a#`9$M08wLP>!;;ru$#RXM+%zP-bni*_A?*J)t( zT|JwWDq7gCNIdO%dZ+&!;9O@rJ*>*=T5h?=2y41Ua_i`spqq_8IOD?%&lP?P;u$5u zhfny!#)Mg6&wZZBTp2Rl97vd%Vr7FY8??<)?9K|0+=yUW|H1-k78q4`SCgQRl0#h3Br{|i zd|~0n#0+J8xx|x%n4kb9*?dBQ2;W2t#w%(w!bz1#-kv=yGD(KfU zD&t3hJf}nHlI{SI5Q$c;wC({+jUT9y5v@RK+j8G6jXGd9*k#9a00Tw6c#+xG2#};^ zbfR271GyVd1=iXZp%g#=#E9rhl(^iM_p7KDt!EFHc)YJiwWg5`4HXS2h3>< zl($%OSQ9c~KYH+WP%~mqjOTjrvIV(CeR89{(27E&gKqJCZbeNFt>nY%t>|U0!%;m# z3$j>U&g)%oMDCQWIK$iZ=K+5H6!M{;y4$ol7l|&n+{H3 z%|}Mg-4*FB7a+E=zDLuG@;wVQ`5yP7%g7Gl_mQyC~7MPa%$= zhG;g`8ASi@7Ma0 z(_sIpXiN5Iw77LlAovkEu& zMdTB)!1roZt6V0mqbZ8YelX*y8}Xl6zmxFSN@{CvItzZC9{sN*yK zMhWgmY&eoBaJc3?J0^r4zi+@!!QZMwQGg=_?=xaNI^RaY+YOIqxBv&Xe#I=roy38^ z>OHZU+|7wiKa>9L_U6P&KSiIePjKQfO_+GBYFZKOgIAbf;z+;6I&ui$( zT&dy25B*O2{D|Sibi8zi?;JRB%22?k!z!FuB%Xfa+7bu0Iz7NA|Cs|nGt#z|g%MQc4QIAUAl$-zJp zp4%fATfa2?m-B~+y$fnwmaQ3aAd7|f10n-{2Aw@vXbOfiuN=M-vWFphc7#OR59~dhwneOlPOS;CsS78-481=a{opO*lIY zd14?Croj%~e9Ruk_OL-!?hn@uEZN{)sb`Nk|B#_UppTr202ww#T==D_%?gcjtk`;t zSzwl_qo3LZ5>#eR$r)5;hFo_VC7QUHU~y~vr>$N_NVbu_miv(b-d9Sz{Ph$)JRx%5 z$-0FAv6g)P`UzU7YI4y)P-q*-wmyE7|9BnPT`*0}O|nF>T& zX9U7WM#1R^#QbT{4~lhD{AwPw10#pAj1AjrphwgD((Q2uFj7qwaz5}4eU06JHf*aB zJuY#X+%?mLq{I0G@~NHZe*gdg|Njh^cQ_SZ7{~9jt}RMYMzR{DLPqCUQQ3Z#nUw6L zGAbHmMD{8qMU;BBu>o$|9pu$7`$HW9OVea`Y&p&~(c3B?a%NaxW_Kd5~rjH=Uu&cZZ z>>-p;TR3^~dq0X-iB)OU=s}9-DW^26I*=1FsIaH?2PzAa`&IR?8nu-ZoMfa@P?0<9 z#7Oa5uus~*z(V>vSi$w0^$TslRluob_-r@Wd$IiPtHypX^F+gA=MNf4F-&Id+BXR- zcjkYpnqh!v$`)FMeN5nx$8O)(HVZ!PVvGH(TmYeCYU{SfE`tqVA+1}Q0F!mk6}|8! z!ng{@znrBc=*=mf&!&+fj~u1VRdJkTr9A$Tr`3UA6;9sJ103x_H=>)Y<|!sw@>>|tp>sCv)OJ@pwM>`dnF zKB&bH^nmnSw;cKUlxNaju>ypEX6&XZ0atZI?OXtN>`tp4a+?LWB3I%fKQh4=vLR*0WEuqS*kKbsM+bTix@9h3$3RoT z*+;74G%$Bb=c~lz06^`7N=oK^An1ap{+{hUVCrV%fO+Fz(0f>WY^nG+xcEk9y=YQ9 zz${(*3$a!Zndur9^tS;VpbPf}41NS%;(BKy?v?=SSz<(}O)lb)c|7!9wg#E@R1k;{ znh^VRt%!|sJ8E@uA$oLlq8c+-moK(G=mFpN@;$bF$n)w&1J&7nv`dCz9cey@KB<(j zQmzc4hPjNw?6hHIvHV0+y^Mz9f@%jMyGKx8PGnHyrZGfvjL?mC8%Hg5U*1eqO`u4z z`t@&Q>FDi;iNv@=Qz+YpTD^;hfvn6W&Fp_L&|g`Ziw5_m5&2=^;`cK%sBX;6f0GOo zNnX6eH&w+%e~V9gWT>!^V$FB?7&jJ5BR#2?W_ZR zD-#t)iLcWum_giyuECOKGpNA+sKf=88Km6VLW{PWLFmZgFF(J`Ait$mw?F5Z$bj`+ zZ&Z_oR_#?k^Ag#J9MvCJ_Kc0{iW5)u$;~3MfDbx%9cNMdMEd%@*0YFmzv1E$fmtLu z<#{{i0UO;Q59NtVvXRRc>*bzg7BW%p{<(J@3!Q9awY@4}q9$kU8}c@5_lTQqER|rQ z)X#0z)?_9UHa4kQC%{C(r~iq_>oHM)cj?)lhfHLD@1XO98Vg;IVP>71W+BxLuQU=n z*a*#E@K5`-7N4MSva4$jDab5P;(pJg(PYtk`X3fhV)gmOnVXAfE=|%zXU`IvIwsO= z)47CH&9a@GQkT(^k^-Ik@(L3F@2_;`gH<%K{(5v-ECCC(iEMaufrvj0klzc1kg({- z?vER@$yoGYls~ScV3(&B_FW?!*oyqfSaN|ASLU4FVLio#v%WWszx~FI1!Sv@RFZgb z?b9>t*h5sjnic10uu8=@)cmdtRPth4sc|?xf)DrK_(5uS=EsqP&4w$+0ys5Gj_clj zL2Nep-iW+G2#=OrcrBD5gf+Xrx_lxE4OHz9)KpUsqxc8cKq7Mr5KpGEMUjHjfHuSM|L^Sl0gl_P?$`ab-q94Ue$ z>O(t6T}3c7n-P@P6u~*Y_mzGw3FEE(+ie3&g>j8$LCo=E!gw}2hxg=!5Dvgxx}KMX zuy$0~&DRTpxHh*|=p>y3={_3(Q&%j!#wOzfmg_NB;+g;sA>4TKH@hAUY*BnFSM3G z_Zi2`iPlw+9ABmvl1GHxkFtL+o0H*siNddY=@i&yU6B@jlM~ho_tCYvxnZZC%+Z%Y zJn-J2rL6CFD%_H3BSa(d!7$zn`9c!>@JRgl|Gr@e7#c;Hm@c% zgJVKajjv3$?41xyQc$uB+b9gD&qO#2o)d<#X=aaay%C0ROWsA1F<}@e%cN0wL?EBZ zHpTUVA}}iJ*PpX1!qBFheb?oWFcb-UE*brJ&CccMy6Ghh-A?2tcghRHC(ASok!~UQ z(?4oFG(iYXD8=YBXbQm?Tb)(kVnLX0pYnD5y8x`@RJ5Qp@Wa=6Tg@$>^FiM+zDh~} zFC>dS(yLRT!gszd0@97#P^wV5=cq3i^!K%=Z{_2JSH1n$oq9rnuIG|G!eWjF~ek_9v!}wp1g6_RnKuOW0r*-GQ}%WN_eiGJOi>#+th&`I>B@KD z^Jcr(f7zAbTuiw{VYn}{9JE>oMBgLgy|T_@*S{jZW(^;{niiybMM9+A?-z1EXiyD4s@#enoMJ2D=d+6!C$n-G% zm!5bpih4ci*Qncr#JEZl`o+4@_!Uo& zjjlLo*Q29M@4kc$AJ9YR`K`*k^AX|u$#7nsJaGBDm8X(02Hq)Y(vOP11LxF$0w0?; zP_l)#ad`YMpdR}rp?9qh>^Zq=b+l**oKCICw-_4*_HL6qPwu6I2H*7^S3WX;dHe(2 z1lA0oULtvM>|=u1{3u+3Vy$0ynfvTG5_b>yJ#WrH67V zR2W45ytgHv3cpV%%Jj5T;jkCv*;r47Egyaaq=!*qYb{4r_C_j1JA~wqXYs%zg2y+0 z7U6+mpBrxLMsP!eXWM7%rn%sf?vd6wTQ0bF>Sc)FD^7TemwtxC$q5~MJBye$9Pq;8 z`eTH(x}=5jZub?Sz#guHg?&k6IAL}BeYOS}uIPpDyt71teyp5;oS!84Ti5hsX%Pwb zUT;6P*^2}lgxinbnkT~A!Qx%>hlo%xM6=uFC;__V7+bD4SONBiAOB@tT?FBq{L;Tg z%>jdLdFMTnS>WZB{IuZuY2cjDOO4f^0v7FUeCgujpnTTj(53w}(0?OJBLfeBcm5Fg zUF!v}EcQ!IB>x3pMy_eD6K)6D2geKSIvRlnd-uC5zSZE})Jg55w;ljP->J-()4$N~){in%+y5cnUmE!$3H|7UH`n=+;bGJrU2?=Q zdK?*+&y9xVPN58e2b&!xr;(A#t(C9dfHEqr*NnL63l6 zDbRoXV-m3NA>FIH?h|qCgk*`#ZW5NT{r(CT< zk%R+7g{B2}lCVeCwCk%uB6cRNgq_MKVjaf6U;}?5mN224Ti6mY6v;QbY(vB!hgnA( zPZ9AQp9hEBuMu%;@j{bD77?F$n7g64n26nqj)!Oz5pn&QB|X(RB0i@p)*@g{#F0t! zLR*&!n8g?O^=TRb?~l1)rnZfM@42{jW_hn7Y-hDc^x+B`PM@3WnOjDItFEn$A z%0oTwJT)RQ{(mr-}&#=-5`%P4Os=UbBU3Q8oXL&5$Pbo9~7;_Yu& z(blbDZ@-!n@RgD&_Zx2r*tb5{@C82+=O*3|kuV|RkQn0DOlKk%O8myB@gEVdzj*x( z+)2caYQemba|En(usgo+GXXnPH%S@2Bw)Mq0Xng3HnSugxNRh2mV!#n#kG6(Gb++r z)_CaB*AvsBH_7-Ye85Y5LBVS6GIozgIdDa)!laQ57f#;!=VBJ%#!vws@>b`;i%Bk) zCl-0IgvSG6tr#j+th_y+)KA5r{^?OAK3?pTKAJMayT(OX-m$zBRBTaD5p=wgiXX-u z;V`*L#SKe&5f)ZdEVtm~F1(wH$G_-Yh~GiQ@!oWa=ZC5INO)z%#ds>FhHiU#j7`NC zVq$iYLU^&VviR02Q9dlVN1;~xB_H-kEB}1PkRQvx)M}Hh)uWlpo;~V6^q%1W)eAn}xw1+M)-fV2a^KvH@HwO`? z&IVwl14%{A?q2QxI!5{9q-P;sG#$1Iu z6=`2d*e)nrbE7y3U)VLwPp~86WtYQ0--Hox)dfeXy_;6iolIIZ*YFZz`wUfhc`cxu z^i4*Cg|o=0{NDBOQYJc)`QwR3A_J+HSfnNvjU!sgMpYy;gw9;iuQJg1hm3qpwR3D+ zkmtO{&-Fs(C`D~rmstB2%vTTXi`~}(%8#@Ey$I<7vl6WX9?}Cq`D3GsnZXDsJC?z( zs5%Lj!_{cR!3@B?*SdhwF#}@CK7RSE!v^{{&#{)u=79Ol1utpiMWAq3+D>e48Q9Bs z%X6vGAG zTNfvr*5>9^`qX-MB{wYWef(;03lF3keDv)196L;ee4_vf&p5Mkt zh1Y6|9H_c$bL>w>jnmmRp8Dsv&fy*v5*}X`)&D|;?MGqF?-?q5vijA_Tap)=wEb?{ zVZaMZ0|@s`-Fe}EzLHGwNM5M&C6yVP$qO|@lf=xjdEw)SHg_m%>#7ZhYk(;))a^Dp zJ~2XtZgm^~gg8>+`{H@7#czqw%WpHAmje>fq!F6C#~$pLk9 zBUjfAQK0q4PVP@63Y`0v-5tG;3<;VnW9jQ8sNVVZOHmgQj)4u9U1mgBr{?mr_ZtB& zKJb>O{zrfVbL6~^PpjapdgJZuqboo@M@5!La0Nv5pRDlMz6|bkvzKDkmw?u5t@9QN zi(qk9s?Uu*4>D}?B2^yD0Z}>Yh->n*V6emT-jyU42>o?c>jZrUIG@-4{LXqB#A(Mk z`7}*|)cq_O3Vjk>NZFm=Q8Etj7RfBn=}~Y!UZJbEbOfk+hK^bF(!h$C7!RKp4JeLL z78-Zczza<$=eJTcK&+yDy}PELy?Qxp(LjuB z@I6KU5nx9+otq{<3Z#^~+j_aifGR_u`>xP982TcyIZJH<4CQg!J#?4^?M*{!gO}*Q zGSDg@S9c272xLCETRa82?@86W@iTxZYKwk&iUF#d+Nu^`GeCyct4lJ%)4*Qs8bd8) z8cc0*`_FH28gR-5MVrOX0Ku!{k787qAenD?Q$#xxDD8c523%%=A9nF&e+1Z|r-O3e zCW#HWZzT?ytgu12UBaWHW3yoUAGaf}*|We_zw}XQ+bsBzbD~p+I0w!~pDNfcH3zuN zh3&L<%mE3O{ocH-bHLs;O5bE{|5q^iz4gagz)IgwjQ5@eU%Q&ss#n>dX3p8M`W72p z^6X8>5n_YIK0*mShXpj{>{IV+vp}tzi(EuM6S&szwQxvgg4gz4PiY6%^47jGmD)W6 z7?tJ(O^+Gy#`>y#(=mAsz_vEA}UV2wKz?CjQr&d^yR}| zA;sf~RXKcBNW^ca(tXQX)NSxD^oiJ46v8M7<&ph?{@v{x2y|^l#xb8SJ^I{^D3v@; zUWR|r^y}x!L)!@AbdXIWgm~4ict4$vgYCQJNYoH%}{YjcXZ8C_o!;?CEFASjr zpgO?Oei*e^P@W!N8Ag$?t*7NQ4GEW@r1z%NP;GIFNx~Bv3S&}DYl~<|r8tow{E~*Q zgv6&9f1#mt1?zC71sZxE8`?c*JhJwl`v|&~j-dTF+m8jPjUvV^*{=O{qbP7kl;k0g zF~rW+HaIRejyhL=rt^GTn_mUH>JLRtAj=|Iz5~XS$gfw%D}j@aB=h83wj( zE4>9UPzu+h$74qrNa%~$Z9_)}Iwx=QV)+6CN$Oht9$ouq6P`wW56rqN4o;)#iUz&l8`G#xhVC~|HH{t{AFA6znn8Js*LDjXo})n)YMn*I;P@)e|<|2_2Tsa0e?TpS`uAmEqJ+c-*0 z2$;O2P~R6z!0G>8^Ob2L;JhrKuNy^(xOf|1z2F*G{&~{$@=Wd;A8vZ^#*sn9?&Trh zHXD*~xs=QbZw3kLZGD$@V3LI4DRQZ}J{ilEyWjkfLdHFohFgvM$(XfUBc)Y&4E`(D^)-g z2QIv_&+*kE4*asZ_~mUb4r~yiRk#^Z@aN3k4}bbnFnd^Wc!wATXQjOHA1xtcX_2u@ zGmwni`*nhSYe`rnThZ#9772G#%niQf5%E9i_A}M11Y9yacgoL#fX{BIGv!oVMfaaD z!e`g`_`s;Q2|r~CZFPNSTJNcd0} zlC(ZwoaNDuoJAho*uA9*CHQpZ4F**s+Godgl@s@oi)|>S&$$dlM2D$O?WzaDj_1?3 z&07G)Vr$dS`3|5`IQO4p#y=1?PqrQw>jygBmseeFhrqfY?@(LH2-r1d^!h;kI57RC zog_C!2PZ^Z`nVz(pmY7U=*?U+U{gV5OaEmiaNL!YX4%043Egf^hc?fGrYiUMbi+B& z8}qJI*lZqjDsTsP?^^)ALhI;4q($IX7^aZ%W)V<&PA#k2F9F*0YVX+05^#tmSxUz& z1O4NK=JPTupmRp4P5J!_ICJ9d{5tzpu%ohI-{jmXusGD^^7t?Tnip&dZ%re>OrH{`*U3CI4+`{Gs(F9TodWe&$dka00{NvQ$VXf$Fs{uwyZa;sK2KX1 z?{}a;a~naep`#Qi#;yF_A!o+PP$~A8#P=#Pw>SOKp@Qge@tv(Cp)dXgYx!K?tFaPrBZzh%C!@ogXiDs18)t#{R#|a8S&5g+y*r+UZ>iEi_q=L%PsG46{>7`zAoii zJ<7PhF^cn16EZ)mDEjVBD;ho>Z{{TX3+a>=-thMSg9MUGUj*LiLK1=@-l3*FC~)`R zIIoYr2*yMcpX}>Ls#g66lJW;o(vqXRqR0@EJuox*IcOLSN4PgXXr>`iuGltebI(S|=cpMqG>-Y-XnLwS5va18dljz!@WBQb3NEn_eYGUCw3>x(YN_jv z@w1Uum~5h{EgRLHfp!tTo%8|MydO@m?lckqMtFU-3r>XDBp$q zvis3l)IGTFfmy&TQdVG|-=8y!OoKuKuKFh$+|i8{Zf{s|M5BG+S9kwAY%?i zDjz93*D{CxG(USkxIBmCl6M`txN#m$hWZz}%FiQa*G7j8O7kfClZf35g?aRHmsD=F z;yg;Kl+G+rokvYAtzUxl=TR;}OxV?F9+|(9IO2Y79+@7{-XQgG9_?p&mL98~NBq}G z9u9x!(JxEO$_2(ex~}vPMo|{fvy;ObD&h+Wr5?zw+`WKqk9RD~9bG^IcH>0F@C76( zCdO|5uz(^R9{x*PSwP3_=y!GX7LoD?Sq|+RizqYd&lD_QM7sIQk~Ra2i0NxPyq|js zmA$m|+a|w+9yYmtc&WdH{s#a6|NjhDcQ_Sp6uwGC@{7^Za!AJ3??HS8B>!R{>%jhyP|EpfRB(j1>|4J3bYp722mBJ^PE4Vf0 z$d$3~74)LyS`nC6@lE5hq~W1eWGfh+d#1ID`;2hk(1%s5*q#@{l)s86D|6py&#&SE zbr)54i8ZV)TBBs#T|?RwGY7}3HH>+Fc2e4H4O>D*sxWyV8 zE|Asob=R1lzJ`2LF1yp$V}YxOtBr53VTEH#wzK9Mil)ft)G4g< zTpo8McdTLUZj%x5_Ej`9e^#CQX%)pNNuO>XUB!DE8<@4~*Yk5xswlNy!4SzGMmP4a zVBrgs3ag%F>{IATIvlx-4Rv$hb005b{erv5i-XJf-GS{?LCX?q8{f0QXG@qmr>pdA z<{vUGoplm=_z(B~yw z-ehH_PN2oG5^Mis273N!wTZJH#~^(}Qme-(Zb^zs-IOwng~{Y?N16t4L;R=s;t?A5 zUGsTFrS)OG>l4GJJSujS?_<~O>qfy1o7YtTYsZxfw1O@CP1xC~5f^i?9uIZ8Y_&@% zMMJ&)0jK$Lu;XG-lTcX%zSv>@Jc};~Y}nbEnSZ5&9DWY3sD^!QbXnl?A1o4KSH}kLcfaIANkC$@2@X)MNo{g&?0_PUD zdsWeZS0RU{XE6w`JFTy?FAafxM1+9%s}aZ*&DZ-qISQXo?oJUf90zu>`b$h?I!H=t zp9&P30Fqj#3wQkloEz(Kh<-B(9LMlku;>&JH^%KXi+%);(vKJU)tSs>R?&ZL~31Ag~u;oJFhu)I=f3a94bTle?PW3lsK zc*z)#aVyr4}?4)@z}^Lfh7NMu66JdC<@dHS8nyR2Dj|!W&o1U*&PDFs8n%roCbn+O`#2W;R@XV+lw5H(aye+?EdDy!Z4vhD4skx zS9baiL9r^*D|KFvpy;JMP+DNT&U>jBB|jx7N?GtF-?1SmUQe}ip0Oh+`W5+H{rhr# z-?Aqod!G>$2Ry9jjyxnNex&rVIbI?t#`s?P{8?gsofXb%tlx{x{Mxuy!5UcKhjTlO z*I?{rSegvm8l(t{Z)1&Jg=haoZGEY-3R63;%SyMe!2A3Ao!1_(fd1E937Lsyh)uP9 zn`ycXEHn)vgVrTTFFO7D?dc^TDQpgo@%#r8b-TkIyB1;Btj#b=Mp8DFTmD| z`7bN}%!3Mp$vSLi4ipoMD0gJ$U?O2(@qbpckaa__v^;(W*8UnrX?9M-Ly6Y`b(^N) z=sp4L5TAmt96qMEFHM4yPI98R#RQz{Wo}6pV?e@SpvcuX;}GZZbknJhQE)W#nw(b{ z0YUwvm&3w`K+#!pt#e@j?6{09w3TVlvvfhw>2)96?pLn&h@pbnP@j)T2nD3>#z@Of zbwIIONTlVXR@lP%d7lq+6GYpN)o3*}0K?2<+s4QmQ0LwB@k(nMur%%}7!)Q#g!{Hb zI~Hk{befalfto(Y|RqkN# z5zH?>VfBJz6z9cdb$T9;BJ!Dr_RpEB^{w1-mJDgzz+2R8|-OkmCZ)3ydS6BrZj$Pwf{fxd>hhPQnu zP|vchiraAlw;Q}^=GC9TZ)3$;k#Z9_tvJ-jEjWSuw$FAKFi&7ghEs9?je&b7G~P;= zGEnBq=LGH_47?ZH)m{IDfw#*7I+7$A7}fmH)^CuGPX$sfPDRu4rC_3wq#YfFGu}Fx zo6%8vRUvb$BOSwRC?~`I&~eu#T?=(O9mj>3>)ZD;P{@-l&h{JwIg41%7$`H4xnBG@ zzrcFk_SbB5>ZBu&LH7M`U+JjTe&g5dEjk(;w7oiejE)hr{G>Btbo|*OS~4U_M;qVz z|FTch@l?8pgUa#sbFREOs<)AjV(KRa&c%#l!L&&**WPjbSlE&+^==H?nP#+wdPY%P z_4tbStx;_LSu^x|b_53xD2cr*96@f!KMxNjjbM+7v>Y*J1Uqgo|I+as!5cdZrG>AK zpil6^TzAVb{v{a`FhaTmPPs4Y&SA{obI;N@5b#cn*1Ucy7BU@_D8$!cVkH7 z=2BDDZmiabe=hZ_3;C}t6fGHc;)?51UCg_7tb9mhmsf7Zmi;mJgxQ;M|6$+H?-U!c zVoRzE|1mOF4CYXm$4F>$FJz;>IvEeMXAblXl96ST<=ECMjd9Ebc`<0C^crFy4_+bUyYNx!$l+z%ka#1g| zIvf5`!{@W>(jYwHgZi43ANV`1or_~h#B&AKcl9(2Fx+YK@%z~lWPf5^&Xiq>WHyP1 z9w9{Z60wr>8%@XUJdVafs$MvAx}LuyG#ZNLm-8qm)8KsPWPl+t4ODg~*re#jgPHS@ z>|7>CI4wz=luV02FZ^O)yw13VCA1Z?7H1X`?)`Lb*i98!$ec*NNRttGjce{MFyPFBP2#VIm` z8wPG5CNzT9Ma#i}!Di6iyr)>mvmM^ePCD3d4oX#ZZ zyt94vmS+-3^^X@)K2LzpuU8(&(-}}UI%piGMhEc=+*VGU;}E-SrPX_83=Vz!cIjp7 zC=f3`8`G#Ch1fakd|t*Vc&SfxMbC~xX_Z9u(}Yp@J$AxwZE6IZHOc3nmk)zST$4|< z`VjD>b&Vw_3;;d4Wh8b#4SF|6VOLKdLc;FNZU85H?4$Hg^X`@1Nc4O1~ z2UIxeZqXQ>+XJgP&|Q{ZKCX=|Wl7bt(K8<4%;4!vo*j>ULvb)nEIi4WsF1sl=$$)e-Gl}04wlx8=YM*E$h&!mq0c zbz^4ux%c;OQgA4!LOz~G!4=Lm=kwzfl-SSjmlj3Adg{5O5$n8eR_2|vrzqHYX7i|` z?Ye%$_w2VEJs7r)T~7CX55D8hZMe)r#Xrn_?>OzL$UQlNF=Q&ro$Mj-E>V%qbc*nP zmWsy~c{MBQsHj(5qBI;zMH|-vQ2}cz8g{={K5Rrqo+=VAlN}Y$CuU3Bh^L}U{!~PgCS4|PMV3BfpV#0^73jn5QTfw~qJ5}++CxX5 zrw>C5^-l{kdNESUaey32o_dt zJJgAu7OoXuCLQ?W#HY2euy%}AQYM|RYeV^-vuR38t$64AuAxIBt@tTUwsKXu1y@N$ z_oj84v1m-+o&K~5v!|WZ!{0QbxcZRkJ!dj%XHkKSbK?}*_dz6m;p*h^=nv+m6bu5zCG72d|E)Z)UfRAdNIiDKZtoO6_K}Es` zs9IZgbjb*Y27}$Hk(;ByPE+19pfUkCHTe7nAE$tX{k`_*p&8&|FM9IhzihZbS{?2; zM`)O9IA}at2Mz6MlME^0Pw*TLc6>}A0^^#IWYOSiT(fJ~Z7yh?fl>?AOc=WCJRgwfrb z!b2n|`(D>2^_UDVVjniOw2687X7IvcfASDecZhH?7=4B z;b4hSm}&&(Th-b^SFZG^<{`WC@WWZ>lds6cZh1HbV_?TaF0 z_!asi%rt`pZ@Kh;tezo3XG;pFQ&Izj9d@?-&f5SbtS8mpTh_z)F~T&<$2#b{fc$^N zYC-lXF77U<27;_4Rr*F1+zJwEQ|+jLf!CknJg=9-KaK@=ov(u5{M=L7ClU}1#M83&1g_sO zw`;rAnnMcA{<#)F|CJ83T0Z0B-?P9g!jAsKJqPX*=dvRI*B(H~F(Nl6qT#iQjwe927IYpsO&>;KtK7}mgxMgyjC zT%2zO*{wV(n}^qRJKV4Etn<2zqU)2}+8{-3SM~+1Hpo5|c(pFE4TxQCxsJ^3khL7! zY^c}{8855&@-DZ-1Bn-BN`%^h#r?&P#H=>Zxp0+POuP;L_^2Hm{?H0MbG1~z;T9-y z3)WwG*aE%IkFOOkHpAU_LbHcsnxQr$Htnx=GyE-{Q8kyhgu7&#&w#0bRYS_xcrJs^g0hh-t40Hp_ zz$f_T!>;lYXxkpEd469p^n9T0;d(~|M=sr*>zM_x?1Y|^ck)2~Y3A^ij4XJvNph+; zG!;Y-y;UsoN`T1@Ly1SzKj3L;oLzynC#=NQ5|uQJP>Pj&)#rmR#?UQBe;WKiwp_ze zdyiN=@ZC}UnqLwUvL9{SbuSg8a|tA8mQ0M&lM~%qk&V8O*!M8|=3{h%{TV6oLR3=u zSNCX@i0QP-ro75xba-NJXXjsv-op{{o)+a8On6=Y^KKq4lP1CC+oEaAWICGGUJG!2nyFeqOK)d^-`G-a%DZbzTj;9G< z%~abk1vlaSU~9^A;bz?Mezx*_VlzsWvm}};w&2*lTuN7C3+7AAPvt#q#l18$5v9IX zH0#%;&E9Ckbe19M+N?Ib*HH73Tc91k7seC}e{M(W?VyHA-VVH`-kKCt*nx97C#QP9 zbYenYgj>jyF7)zM6mh@Wz5YH9v3xufeC1D{>5rkHZM}#X%Mu0kH{F;^-qeGPdpCck z_ET_NZpGdxjDo`=YMmqN{>VgN=Vy5l3Z@y%UC-sCpsB~dim8(nhbdTmL;g7)NG zS~3Mgm9CmDb*=j;`Y+)R>o`c%D|iq|!8mH@b`xm|RtxV875UkXbHE!YyQv$msCI>? zzUsn=ohkgIJGyYf)5-F8eg^fcxfa|IJ-#jcSqsK# zx5kZ%wP5zm+Gy1cEyyu;)0eNW8MXG?sn|s}HMUZczgk&z>sr^sGl$^J7I1MeC6B`d(h9gc@A3G`v+ATa9kTTE5D( zDtyOeX0wT}3dLo~e8uz%4CQ`a@mr=GNjt_>nOjOQ^-$1^P7e`nsCo3FEr_BA?szKj zXW@n0NwdZ;lkuP2SgsFS49eX`-VN?SD9|DM_xgf8k~Y+Ge{p;VlY9zyxsrpxqH4{Z zUo#qPE?8g63Ht+@|MttURAhjOm7-FmaUN8NTvjpPLWGu2M{X&}l)$&IKC3}n%fWW{ zr`vtbm2iA`Us6U=HN@NOP+-le1>3%HjsxrepJm1AYudjWz<)>mShD~bn!lA7oGNdG zk&5T{?LwO&vVE!8>0K*0w~~@NZnnb)lXnM_mO4Not@4tJQ5O)}O}w1Hc7y4S$E6qj zC}0p4f1^&Z2mJgv*#feAz@5wAXhxX|73`Xa6T7K!?>GI${?EP8V_tnu9{PX}ZBEUN z_kr+{TY?up^@He_kjYJ3Xb>V(CIA*RXpsDD?mFKIM#}Ety%rr1oXAK!5o!tT z;cd`9eZnR-sudcK$J88&Y=M4x(qtFNyUyqyA z{vg94$^*ZDY-I2^y=%0|gam{C-belSy8#L|z0_T6tcPQi5j%^#IuH~oW*&^M0pWQ0 zJiCA@P`EF!*DI<5RAUUaVrR+#t=@<1@+$>FMXMocmlEg->%)wcV$f<9Hz`{y0=1~H zowUnEuzc|FTBjoshB)lSHh30-q3_miQ@a9q=FM|JHY^`@UHq11aVZxz?2&TWxGx)~ zMNSclRkEP4+`RV9pA69Y>yzGhCmrqyHwL=yO#?D9jN~$q0@_EDCOW*6LH8z;=zn|u z0DIbq*mt=^nDMk%^SJ*D`kzR$N{__=&!@rUQo(4@R~jCw+!O@{au-U`H4*L=}jd(b~0%?!&${q>6QPNnm^ z^Kx&{izg-Q^3D?UC`)ksbfyFeUX8aV%t~;ZsoRclpAuBzmzIc}EWsvGxpYEGDYn@9 za#`e;;ZUHAC)?+8lnXKOlaQ-GjiDi*$jge zo*b~dE>wg4Teh`-C)8kaOqryL4-sQjd5BBiwfLMZ+g>QM4qKY|6jb}_aen;p?$3+{ zq}>pEBKR7SHSOt;^&cWn_{j)QHWCJR@7QNEfLN-Va{X=+qDLHcYWXQ5C+(BfRwAN^ z!PkE`_mFTy`%Z(ACnQwdd^79h9}?b9O_ycJ(T; z5&zt+e5c{ph;#3*`ZcIEqN)GL;u)Ssbl=o*<~4sKilr?0%2_v}#QpkhcV`+gVdo2m zc{Lg5FXmX8WHjOKss+2VubNR~%0tJ|mcCDIinu_Ogtc9UB!x zDeJm@E>V$}|FM2o8x_+PQ+2HjXt*ZP=>oNB10#s{)(o$fYitI1)<7 zXl9zhMh*tzb0G_7cLv(9>}K+Y7?|W9>(Q${fDNa@w=n7kFnQtrgD+}KR1KOtGTOw% zmWjYFhjWAYhP`W!)isFEr5;AqY7F7IPgLjIsY4iV&haj3b_hp(Zb?2@8pe%YO8TXD zhwlsAUYut~@aK^V*Z<;2u!p;5g#X(J{*E`(*E-6=?_611 zdfZsJ(=gufcnk|AF4elbx?w4r5aE z1vmEMA?%`OeU%wltqZ?HZ9I#K!}*i_zk3Go&ljs&?-~Ybiv%1QE}|nV=-GF2ML()u zW6-)8H0%)5JL$qj!wi3!L#R*1Ybl$YJHsics4bfo__PnT&Fg9w>Uz*;fp^F5@-Eza z$imsjsRIRP4~iH~wW5E;e|`x%&8V=$rJwb>5sQtzUEG`Nv0PDj@%^o8RA*J@8Yq^d zF5iLlb!!We<4x1L-mob)3$%8+Nl3&&e zl))z3hhrn+RdC0r!e07wEfjq35So=GL7^qDZS7eySOi^W3Y~8TDdDP!d4pEi^wwh9 z?tDA^h-=W1H|qqu9dYL}ZM(t8)#Qc0eJ{Yf^H#0W6wsV0O0(dlf{C!{isS?ptQ{N7 z#?onUbc1}8@x^|)zJV`9{$D@DnfTqGETDtYbeo~dRR+B3JoDqX*Ba5M2Y?ve zusl=Ggs$G~i#8#H;L-fn^t|p6gi;J*YX2L8=?~M(r(O)h{?Ts=C%8r+I{Wps==~8m zy6Y~_WBLdb>xp|S!8~CwIBZqv zTuRA`tpC67gB)4p)IAnl-Xly70TytIls=CZV8JChi)s=(3la~?4cBjA!B#f`wQNxq z@YvR>2C1^3OWeel?;H!tYHW_}RA+%6cgSAFbu75SUh`5qWdxdB9rRXsM2krVE?lL32fwM?nXFo0*1F0s~>0ZfVP znvcN@km$LgA@zmn%fWi!iihoCHKRdfL8j(!hVpdbvbI-(GR1K zzCN^`>4$Rl#lCtzI@G5AeDPP74ulQ1jXCOcpjqvDl&ngJ{j?23dQx;?Q_*(6zHU|f z8|3>3`oX$4%P=Rc9|%N`_enSU;j?;w$;`2SF!|Ih7sT5S&3ht#i_Owt`RCB@fngem ze30}P@1((BkFU|fD;XCma~l8 z;j}~lj^(Fqz*r=>Z7*+y_$PFm7)A@+FsqyYIo%9uYu?OcFExRmORk^vEEz_*vlL$U zH-eP-VZJNLB-nV_&O$J^0RUP+rN6|fvtKX%tc7ery%|k9DsNXbJ_hbDa_=KtZL!;N{RbPVaD3u*qyMX260Y@( z_SBBZLN=AKIOlzZcs4Zk0SJ|19A{+g@K8Da$?YYdFs#HEviBzWgR8M?>k`|87$QEA z^;viqQHNTkU*gh&8_;-HbV!312`}u5?Ug^>hzBb>dMRhfs36mE{{ERJ6uOz4qpQ)3 z;|#?WvP26eRqf|s{%%31c@=ft=2l$z3&b0~Z5S}HHAzLG9nJ3rau!dv<2wxvo1~Zy zEUwfnBj|Ucfh0^fAPYW);3L1`%ZM6>UqT$Q6HJt}3G(6wK9k#KJhG8q0F}ae4OS<+u zAcuzfU(WERq|s1jyYUWMAq~f0OjeF|)9}fhR#zxnKR&T*i}Kg*N99-CyV@iBF|h8! z)c5gz{ONdwOGAu~ueZ`wt1i;fFxJ~X^)4MV!oTty4W?smPb?uSl8!GcLsS2y((x+? zo7deMI&M|eC{>uC<9PvV+Keaz{gbVlbX^!|?$KBhLt@~G-s9I7`3A6cLF9a?`2dP4 zDaou48^Fr~ag7|s1L%tE8H=m=#gef>GMqesHxqAcr?n5@bd93p$)N$%cy!_6x?clm zCUks~=O7bLw_GNXoSB#+=cL$H$i$FWNzNW0nb_$_`q#I45RHqMol~cq6PVDw3sN@n3p!TpNUVougt}WF>(9vF2(Oh zRyFBlsN{Jj+Pw)o{nCwzvB^{`_9skqyw;F+KZJ=6b0f~=`%HYqd0-(ypNS<~90FeR zF!9-cO$PBz19(*G+x|MY0Ss{JGQGKD0Jrh2XPrU@E}U}F@3v(iP8E0(no3TE?f{y?4|j1pdy!v z$6!YrPM^5&+C@8c?D~NYAuT7=n$BkKmhiaqo z#SRVsBYz*EU4uiFvgKoVnxC*~*N%A5>>tc9=FWn-+h3R>wS}O*^~{~w^<^MOJX@0S ztOEArd2UErsD?cOxvbyjb+E-OAop%3!ag&HACkF^Q2u4(?>+aLpm#{)uaj;InEW@{ zby%nkj-R^58Xj(ko1QU#e;;Gc;E(D)0tLR5&ru^6DG)F!)ubd(g}jT#$xAj=pgWegy!WEQorM=xT|rck z-^$Z&;7J9sZ~ocqu24ak+bYjRnF@+&pKl3sP+>`Q%i|BN6p&jY{Iu{{y>G(4TSt-t zrn|)BjJ)~)`6oCn{Ci>kvf+bB>mKOF9!($NZV28K=q%9H2`{fmop|Qk0XJiM9~Bw4 zL;R+Sb>H;b;LzxV|8c)o;MnSJz3Wp8*hjo7xpb!mgxxk=@!iq_DaSLMcvG5T`>zdS zgIk*6=dL>k70)+8g~Sro$d(LgLfVg4NzKs_0*Hwb+A+` zzgcNhEf5|%z1Yi7gz+ikL&v>pU~ZGhxDt! zdff_N{LV^n_)oClK?(tE!)`yVSz8YGqHPbfxR*kH!uXfaZ6%;2rBEESxfo0aZYyx4 z6oQk+Ii<~{0=V?vWvEiV06hO1F!b~D!H*&kcHvtdn8gW4Q3rG3z4B<6)WaMQ-?OyR z@jDCtY+m4yEy#ogQGP?{%>XX8LmJJA8Q}4s0bj0ECgi8s8S9v5fw<+A`Mc?Ci1r(K zEgq2zvspy>OVfGqe87Ox*pv^gJZd`^SMs4e{5p|XkPkQMdm@&3^WnrZH(7T7JUDJ^ z_1wWY4}@is{a1SOfOq^@q0F`d*rmJTo-R}bOZv3eYwF&>!co1ly=JALH=!%EnOO#n zW0~?c#|aRqCnucgS^>%@PL-=USHcGm8H=!^Rbbele1*ZP0x#2hUs7*YLxhiLVcKFf zOjPcU{NPao$%7~Mczmt_U;Wt0EFB_vHPs!y@r(!rt>MTIc|`a%D;CaOM1*j*^ijnW zBFG;$(h`g$LiczS7T#Mf>jpnXAifpjehn=T5N6xTxKsdx@U-8%3zkB^s_ ztb@LvK8k2l4~vho6W(UlgZfMQS?g6Dm*r&sx?B&tkJk+z8K{HS;e3M&(seMmo?SQC zy%v5i`Nk)-5+S8FS6W1q2o5^Zs+^x{potkGL8z{QKoWEDeRd5H?A=GB5^CV&_lxh$ zylY@LjqgyXVhv=%WA+^-)$npgOIY$t6})@k(tK^A63i=%&T$u3z_&RL^Rt%-&|$5S z_V{-hILw&Z^p%vtp>fL_!;ecq#oIepWT_Y&KQuW7$`nE1SJ~%kPYYo9sKkvI@AKeC z+Yd5rOD?3}zG!9iJ`0q8MplZQ$N-HvyDGAHDr^$gKj(QO4kTN0i@6#ifb9>#rNKTB z-j134OPzFtD!Gx6&CQNzHR1j1;$=V76T5mKS3MZVs-9f_WDthjYPFY1)e(3nsO!u0 zwb!_kNEZ&Ti@_Y4Z=KwsM11mQR^;wNDym3*PG&e~;_Sw^cTS$oMZfN$0^5^?*y#M{ z(jK-F{IT?BUtK~e21yPKWek+zYi)Z(wgBS7G$2FG}30 zRT#|QkYZC`h4*t#J<{7oPPcP=iR+Z(zGgk2`nfXvd-Q3V^2ah%s>obdw!R#lDgqCf%9Udx|MY*2YUOxd zZ+HWq9zXS=;9FQWR_|9k~6v1o_Oz2AiUaakREco%yo>SK4ICJ}2g3 znsO_cwC3RAH?75WiP>1ME`OCLeh474vvR!aJ1FR zLCwF1j}=X4Gi>xF_^kXR$Rj+4(*m)rmKQfhX@K`6JyUw)(B6kyTpJLVB&5z^I!#|Ak?`mJR+)w@b);SUnZQH|7ZVt!7 z?nmaEhJ$f!5TzviaR7E*c>1fH?Gc_HySeder#o)rJsBjMV2gr+p9G&6>0;jQy!0va zV^A6~DRY8g3hE8@%j8;H@Gac)!u5bN#B{vvFh6q(zCXy2dF^r!^ml!-Z~y8AAEW-{ z&-e$xcS6*&0;{L+OYi2z4|ecd5L`2N`mF111h~Ql3}FaMTKHr3LJcRzdG<~Dzy9;>)mXV2BjkE7uI^F zgH`?;ee!q)JUnW(^?uN5-tfEf-PAM(h`Ul!P9Dtzqgo54T^t2qIBR%fu%Hm;j}T)7 z_7(&CfK>aZ>u=x^ujwsDWMCHrGD{S3~&n+lc2SRUn#B?-XBG348c7J`{CSz;=A#<@AF9 z^?bu!|nR?x#=)2fFo6*m>l^1ATMXJVG`M-In{c zg)TSW+3zm-zst(xR{lQc<%v+^Dc${&p^|&z9qTys-#>)N!?D=9l1F ze*ItumZ7#_pt{Zk0eL@flh!<1g?{h1@|^Ce!F#c;`D0~u$VvLf+C79Q>vnAA;>$*~ zjJe34Alrm@o_G2YOPaCE)pCi*(TZ1A>P(!)+EBiC{m0wf?HG3qGQ&{SrX_fp9XMepipC}`7&wf8X`2rdSv5y}OilpJUwxXu*_h{(-@7kWt7Bn=|$7?KlYZQp=O#G zF7eXPGX2Nt7xPqnVIEj4zsi{-X{0Z=o>MVULcrCcTN@r0rOqfsRd zb@uHWG^I+M7K&ZG@Tv?i4IasKg)t+>LRaN}!v&is9zAk)e*ZvN~g)Qef#trv}lR~6D%CcDz$_{z4}MgMGQR_Ry& zpjrT558TnuSz7|xJM7!r?v#N>w`Xk2JOQ2sooL_bUj>4}&YGIrS96p1%fwCDtGPDp zYN*TQCo=rcl1Y0dC5bO{6OTK<=7 zTVe1`;Kkm~R=CUMvZm0h4eE9!U8Wpq2Ne^xFI+S2AW;^rxwo(bB!k}r6)!D}BO@!B7)ki3<*XZ^KS5Kz9nAWmrkg`V`RODZj}a`?q7 zqg5{XGq6TuVqG&F$fQIVIXA)gCKvOa9b~X_wNk#fn+&5n(?p1ljZo_3=yf=s1mq@x z+P!oHt|Q$?B^?`J4<)L0{lR+ZzH&OQ@>m_P1}zn3Q)|Ixc(2Pbn_B37pzgZ%S}ln7 zGIW!k*TTmGcXYeQ@WL~vhQWC%%%!(n!Ok7x|aaG>en<)v&-R_=G)tQqsk#5<`0#puN)jTINoUO zBY?rFNCkBZ0t~SgQ7@h(0Qt^t%fDC-7q+h`aCetMsBw^>y>}__ckjLZY55IonN_La zSS|#TcSh<@<9yJ`*!PvHoeOjewc@;Y*&sTjGtthO4gK*qW=cG>K>vKi)q&YeC^|k& zfmOaSQ{!glEvLh?k6Pyb*VCXQQPr1lI0d9!+xG@UCqQ4bgK^!~1fVMC6=e}PzCEy1uRt?#BB@Q7T9iU4sEPh!v6DCiQ1N=)R917)k`2U6pSAi`!OI$>8WWP~e@+>)#Zrti@A z7rY2>O~U7=E|NfLg%Zm*MS`Cpug$biG=dI$OB07xBNSO~*?mT*5qMmRbh>yNq2!Y5 zidqK={x|Oh<{?qm3v&`|&$K9)Fd%^+m$i7%4H9^m^O=1sC4uX#d)>BOjUW=F;X4!6 z2=A9#>t67YVd`o-Z~P-N?CqPH-%BKez=8*>pkj1s3ZN_>FkKmUsg;l)Mg`+$#`ApnB;Qs&s0RR6ynQ1f? zP}j#xLK-xPl#-#MNdqd$CQ3zyq)efR5|Vi;Dv`*P%A7)GF4sKmJ8@m(<=#u?kf{MF zRFcIqU4b&VT>@`;6SzGPC<>i@K6k#-@_Ev1#Uq-~-4nd8l@x>Um`=`LI{bguN%_~H^5({# zOf5PRe}w#f&&$A?qD#6pQS}&gDeKy7PXqG(+?FNM*@(A76pezSnvgr{7;DA8X7v9( zVP8OQMuC;HpIJUFI6vi1mp;ivBl&DK`EDluZB~hLj&4Pc>w127%QoCC7FChEhlOi$ z-%6M=S?GPzi1O+p8|C`r{ntmcanFrUwaED$rCnu zh^|n5{FIH3=kA+4O=sgOC!*HAPi)lmln$sp%)uwClO7&?#KE6eZaMy z4zfzE5|n;&u%s$#wNeiU4Y~U74d!srCS+PRz>R~8wY1=!>KsfxU-@c}I0p%*Uq^9m za?QWk}(S~RIG%6!bwBcCUG=D6m6}QBS&dywE#R+<5 z@97^*T$GbqaV3(8yu$_RM`W2OX?k?mj^`~XXmRqBUS2cu4fze4)HUItjCJ8;ZzBp5 z$()#(22>Nw=^x(EfREpOd{%#`9&f!}Poiovuv&9+QNXwk6PLMipVy~jjP6hm9;wBk zj@xIFe$&vJm*}iOpyC1k!ew$+6zq($(h%NFMw5#v!Vg!HaLntjT>iFdY;!fOun4Hc zlzPf9d$kJW-W?VmajG1T|8@Vc{%RRgN)K26xLb-zylny8ZgAIj z6ry}Y?usULBKjNf$CQTU7K_eP}Dc?RQvhf89<U?__R;8@GS1Sx!Me~jp0!LXHWa8yRe^}+wd>j6LzgR@hH&k5jYH27?p?z z!OTVV0Q*Pb;2OPKaK_3~tp8IbzA{@{D)v??3q z+{pm>TQ?>`-e*I*XHWc5dLAUi^PPNQoDb4Ae&~@^05&EGsWA;i&~^#5tRNPGch0FZ zc%ukfG$r~C`-*@@ZOFQJq!`qGhYG#BTMXC375rAE7sD=rYe(07Ee5HOz94gr61ama zM{i*X1hEdLN$Quvw!O-X+J#am;u(`~E-eEA`zsAfzU9D`uqi57rvmifi2s_~R|)Lc z0#~msRq*-ai2{VY~L1MTeav5FN61c$iKC)5|Mt9*L7-!L@im*^dlv z@z==_m1H2@F~3~LBEvw)pp$Jq85&m;-Z*5F;Rb)?fK@OV)>-6VOZ6hd;inOSoM&YC z{ba^7w}uRgBR_9G*hc|7!Vs@!0R;;EA612DP@&s7)Pc#Sf{&42%a}S1BovM$iv`hO zluP}4X*>-$G&8ly02&Apy|kHzG}y8Jfs48f4K#dK9ls?&gA*K+fuq}LV7ggBQqq_P zSubwx$|cj_L!tQOv|0^lKG|Jp&Zq(BfCD2hkJW-@pZK;U_gZ+c^^t~z~8SF*m$BVV@EdyW*)>AyvU`%4|BD! zv$rTPEvsW*w~hk(E)QQ{Uy7ep_oo=P78!il>kfSVLxP>U8T)Tmlc48jbFgtN2?%vl zl5-&>IQ877DL#w@2{j4Z)<=`T*KzoU$kKe-sfp`*pOK){>7k>FISHl~Vyiz2k{~8} z>(mKDgt1j$SI=Fo1~a+#4LMK+GW}Ux_x4u8NtHc+J|C`t>m#2sD{qy9{Bn``ZJaU~ zX>k*?zFP*-sfTrxCClKg;Gx+Rb}7^zySu=!E(JnGH=i}F1mu5T&2t(l28*zEI=iC? zHXr(%B3W1nugt6E1e1y2E%8t1c1r<#cBV#zm=pjX!~4QxqXLMZ{h^^iD*%zhhMar8 zL@>EQJ^#z35Z2^z-LDrf0xGvjN=|$cbh}L1dZZOWz1?33kSl`w3T3J_x`p6)aqDch zI1w&&^0m)&<%3!6rxpJS2=M%x-q4=hJlNtlsP<-n!sTR_3p+Ew_maGCJt-ZsI<~YpyiEsJb3?aBvFQ+3 z5S+6sART-kGG;ejNC%>8*0&u!Y4G>3w#LEJX%L(zx7{%0J#g`F(hq4$1?6%-Vb;b} zSjq5hyKR;NDz#o!Pg0ZN&KK951FUx-=9RSSS@T;s7}=t}@=+3;I;{2X6*&PYHDM{_ zju@a)?)*@?@e=m@IpLWb9u6l>{?2`u3xsr!sJ!P?FSzyQb-Uy_b1<*s4;&A-LXDC^ zE5Rmjj8{ziKARkXe;WnZ?FC_&Vr;lvj2MNcuTBoh&c~q8m4)3BrEidUIrRCuiFer5 z@21iz{vLw}(bt@>XJBN^$-_=Bv(cdO_`mY(Jd|)<9C@Idj}7u^J})W@(EmV3;gD1z z*4V0CT@h1+fs;1FXZDp~v+7uez}HgTmG)=dq0w@rE~xJu7puZ;qw=Gl!w}c<)J3^& zA|u}q$00cz3SKlh@Pb}UMQXE6?~OScHmqao{t&Ol1J8{3*2vQ_{HH4Mhg=<^mQJbq zUItF4m{z?MtH()x^+K~94On$EXT8||Mr=Hlr^9ou3C#;icFNyr#y{GJR#Adlu&<^t zZRi3MU6@hA&SWN*t7{2NOSR%zQysSxuB}*B(c)N8)QT0BDQu6gt(e(&==J-ZZMgq{ zR_Im3HoRl{n=|^V4LL7Xm2vg7q4X0^2W}}A=9;w}`f`hf+-%YJQ7J6sjU5-tX=C9} z10EOMX%-sXNs&w9VPny81D&s%m-r_+tL}(28i*ic9K5-1)6IJqIk=|jPmrj{ z63?hrEgEy!NHm#va6WkHJY5}|tkZ1NSQ+Rsu*kxL^87#bsVsa`S3NQs#KLX9YL0w5 zEYx5`ZLMF&!t<*i%Pg;JL-*ZK4)>S1<@CL8T5@f8pqZk6J3swyD_-RJNO`2uiW&iG_W8?O(cIoxj}gnn;1`8g z?Sz>a*idS_muHD{YDj%f-Ay7=Oh3m!&r>_D?R2Tb7ai|CWS-IS z-7Br+jQ6!zD_KDQGgyO-&PJMo+BLYxbfopq7!6TO!C_q*4f|cS;%c2}ShU-?`L`Sm zACaeR$vspwJhb84V=F3}Ze>+zuqe30Aw|sU1O*$bB|Kv?$(TepIme?wMqviQ9zK$= zXe_m2qRJH0+lKzEwdFFVUT6iCSmm7UARzow}@ff5;LFd^v| zus;PKUZ^Pl?2v%py7(4^q+j4=;>#P))(7Ck_0{X|Z}h;&moq;;Mn|M~nZvt4C)~JW zWw!7KH@xa=Q76C34Ih0D)KuPTiGtJT+uunzK~h0;Hs6j=*kUc0NBa>6qID6DUa2Xt zDBmo-{Bb7y?ahpjTGILL>Fm11KmwQ*jz3!6Mu4ho$-j>$5+MH1p}o893E-ySRbN*{ z0C$__0`Jj$AnQmQi~J)(ir{{S>Xu^g*6Y;vnJEK9F`VT3Q3-z(6uxs^Lr`X19aVWi z0&C%~pfy2)vt-W3CE~(<4JAd(XlV0vTEb8#<;fDPU;K6Cm$I1)W5nzjjL; zqWt=ElZ1FJ1YDk!>@uQ*T8`yc7nM3#ZFNHDOidlsnbwpH{i=hj|B_iZc^M$dO+6DT z!vL#%9mAs$41jy?#wAzkp~WDzzwd4XDDxWB+{?Q;NC1 zt!RNdI}II$_GWl?EY7i0vl$fKeX4phn&7IFW=@Jq6SRj3E)&vgf|T&67w_kq;MLTb z5NGig__^AE^H+ljwOlW4KL2EbtG25@`A#b+9HyCH3u}en5#KG@#!DmVN%!_oOfZcL zzpQtK37_9fDK|1&Akv-p(`~mF7?@EQ`Pth7QYyAlGagJ(Sk9E~mv04C>u?dFsaB{m zTRHpDu?-{(bWI|{+aPS0Zn2PB8)U0*PN&4RLeuYpE zPD0Pd%9Sk)t4rCQ@lFtS8W44;iA9SA7H=_Atj;2+&Xx}%htbew+@`Fen&sw!hn}o&;8oE z6mQLQ+jjM)G=OXJ>YZv{jqvonne^Vfji5p8(^kCE0OLw!z2~1W;Kh8r*E%gae6q8< zWO%*?corQERF~6$vO7XvrkVcIZ|8yy^jiA+Xn-~g~*`Jf6rQGdlh^bP&^?+FN5LrJX_v%mO_^E zkB~=a%3;-VkBctKRZv&K9p#`zf{~<{p8m6B5dNK#P<4U~a#{zD>#-0%2lKc--BSTm zz0$jjqKjadn|uRO2_Wk>u=#~%7DPIHirZ3@39BkJ%QublE;BIccMrCI^=p31Dg`cfF~{ig7SzYuH>5bU+{vf(3K4d-ik2XfD%efjEQ zL22+GzoA$>Y~5M4rqCe;qT=S?cFX62rg6&-o^c{ve{gb9h+YiVesiNSH;Q3Xvp&1Z zq5y>UBjxDoG#K=mIC1UcJ%g#e$g?QPdh_h@w4|Du@tdds0!}}IC?2?|z< z6Z@m`NO*EH(<(x*40$?>jn~8yP;xSPlQ1g<-JBag3i`go|HLgD#gEK!Cl8jOwo^@R z-+Bs)J(GCbleOgArJRN4mw3h_(yk@ZssWoFeZ-Ai>rnOY9hGb6sJJKIG^RnU0&lb| z_nWaRK=0+w0c~P=cq(oWVPicJ4O%(T{ky7hf`@b=^+OFNt)ICneXtSvuf6c_TEoOE zm3OX|9$=!E&k^bK9~v9?veu6VvtyttP@j|ngiHS#_4{J&^qIuWb6HN+K ze63I7@m@{DghLWJ3j*O7ef&uHsniFcQgc0ItJ5<)Jmxvtda3}Ot(_hn5T)V-r(nE7 zvk{f9xqx8_6A!0KE0!H%V$injlNOzI*vU~^9WGmjTA-R<_sAE<7p>_F)DqzRPH<|d zC4r^8_>VhhDk1F2iBc{1WQeBc+B_b}#Spxc`iD+I)75{!b=uJJQa`UfWj6)W_F9}T zzZ{8?kBl$uh_3+)(*B=+R(t@HR{tuF&j7HeoK+_GegZ)qVuiufAXFYSIof%w8^T-? zCqu1ipk13URdkSwPe7#V&AV%iV*@yI>xNFg6bJX| zsk8G$lQEJR{?lr03t-KQ{pmg3@Y(IqI&H^(xB>Pe4<2;F2IIL^vJ5)dQcp7sZxvxy z>h*J7k{mokQ#%}1+J_sA`n?u}`q5=iN_-THgZCOus%~#D!feY@pWob#Q2*|D+gi^) zhzNN_?Ft_P+lq(x^E?N^u6N-_d2t84^=cMBEcX_&^p6F8OmD}e@3Uq54MuRi*qZ)7 zir;XyD|Ei+>I9AoTh-il8OJ6)yC6u!S6LPZi@AGp{JbqWT1 zcNO%(l@DJ;Q{J`1`-QN2l4CKfintop~2sXH7ay{ihmsV9kveF0eefm*!Bo)E|LN6g0`8@%@-G`n!hd(XZaoU7qLotE z#UQChl<|_H^~^V*rFox#qbC))#Iqcd_oU$OEwkl08YS>w9tNSOl4WilOZ&T1PQ7$t z!@Qz2?X(pOq}IDxxr;Huh2wRf-ARXxWj4|hr%B*fXK*@Wp&0Zf%i09Ei-6VtK==K= za+q&cRmc&cz=7+uJz;ZnV3|=&+GU!cTeV~K1}f z*Q2l8&<4TLLr?izT0mT}R9CCD9_rO>%va6TfVhOKz3xg1h}ss0zm`LgaI$Z77O95Z z3mc3EElJ?tKhEjAM1}gii{BDe>EQKC@6k_-2B__^TOr%n4EwL&PpUCy{*SZ1$bpFM z(PkjLNm*Ai!GLBicHc-K4emK_^T^s&4R0r0=kMAVfw_&TsREM#jiCq6Z_~>MTGTN) z>5O7HME+hsppNkCXw%<47ikdZ6Ft)Pkq%|Gh4vps7(gsm95FsihZB}=m8}63AdVaq z>U>iI#r7j2VPo&0GFC-Qez_0QDY?^Pe`3-3cu-p%cPL)ixcA*ku6Vf6zk$qkv;^KC zCifh4rh!h;wINb@1ALTfxTb8=0(%#&B&^k%p+{!dZ{KruFi{|ywA#1|91k_FjLAm77Oq}&WWlI*B;|<`lr2O%L?F7Y^c^r z2MWj;oz@?WtAmZoBmuu44EU__)4O(GEo?D}8Q~eKgy^C8S2MyXaO&x%e}zS{*e%~> z(xXB^-|t5!Zgk~hrjTqOKVK+X)+K%Q(Uw`?8|;60 zoco{~3-qqd~lGX+}^*33TgL`hVFuTphZ?+c$(S>*TJEp zWRU?oBI9pXE>IvVBjIVdX2E}bM-V2RyUw?%qZpxMbAVbg1vO10ScAc}IDNrhdQE;U zQa-))FMUkKeO>$?Z)R8E=pj7zdO8ye=6T2gMlL`TKPk~XkPTj3Atk5uOTi?7+vGV` z!O;BSnJ%enxYy$AFcDb>`gt3EGwTWPx1RfQm17jh{R1P7dkeQo#wbxR@5|Httq(~U<6GU`W?1#V|0OT0GRFldHlTS2E%4tW=%2l?Wsq< zgn!vO+iUQf$_W&$twgT*DQ5=~0n?tH%P8he#>-VLuUTBlc+uVJxm!p+Dl2vi%&U?x zwmxu`VG#rWrfANSpS2>hGUaCHP7dl891rKBwd0n4S;H?p?f>gvRK%XPOkdH2zrXZ2 zy3;5)NR^)+2O=JIUS{>(J|4H`?j(k~2V?ZN_6BACw;1p6Va3h&g%}WNd?!Mig841c z%JS#xaa+*+R6W%tJ%7W!d~&iCReAZGwAZ$wl$=9m%McScJay7tE7pv@_d^7w-qvGn z?H9H{avgeq`F@o+TZfec&AF#a8gRWwdRU1h6FYe>*m+jAVZ*$;Yvd#wcQ)i1q;6_Q zCuK-9%;%s!qp8L&mW3T#FVEch)PiAeX8gy_*5c@in8GZ<68s%TK67u@7q@0_KI$q@ zfDis|&+>z+KwbCp!Hd@^a4FHlC7_Z5PE{xRG}n`0;wOz!FjoYk*U#Nw#eNIR#J((? z2#mx{yS>NO?;>EC66X+JDZz#_GJ+#9r8p(8tH^hah`;<_l09dVa6^OFk@1k%kde3| zZevg(P^dx`;%o$R&AIKr6e+Or^Rs3<1rpTRDz%745rL9f{=S@j3d>uyP0V_VvAei` z+xS2Yo?ann61S8OW%?dQuSg3@x${Zx3U5G+GpH+*rQ!x6W7v8z4~@3Z&lX&aft{0S zaSI*A;B-u8Ym{6S95AT~r(~7Dbb^Y#dt4%P)U15`?MF7QT4`ss=NtuhCib^y$=2i8 zY-p%yN+Wudzacue)S+0%thV_t*b^PXzhaBRzr>Twc8 z#Qe-i|5XTGgu%M7Xg4gF(VdyiuEhMO{CWE$>v6tf*jQ_niEC&dhPXSKI6o&gXg$lo zP?`>XthW?{x+kX_RpVgl_CqZwNx45{(HU2i*!=nOS;6mnZ zX`b$ZQM2y_$1^?v#f9E~SGEZ}s;E}K)DuAQ$0aST7gVh9Iu*&FGtsDYU)Z+gtvDfh zb>Zm)8V*1G%wkr=!v6sP0RR6qnfW^u{`>t2k!T|+ElP?~BxI@XLQ@S<*tvnk6B7aocxe7>2=^F~+{{qWHM3_xJk`+`l}}bN(a|1Y6L^?c96c{WA44H3Qp}!d+^#Z5&e48&pJpE(8H~~ zex|Gy^VGH`yJ)oH!>5sUf+y>deoF_!RARAf#I`>ms~ie^R|K3qTHsFUNa+y~3ZythcU2eNNZupUJJrxngjT+|YqGuca^j_NU;t zxq8Jo*Jf<%HjlVAQ-rJX(-g18G{`&qL45PyS~%RiKAa*-g5{ZyWP8aL_$etDvzhE}cd}sp%YR4HJ|mPA5+egfsvvvx@#L}> z0jeHe3QcNn1Ou6fkLjs3V45Yx-D;Buc}sn++Pf3+Lra-Ee{vP-(m1V44>hB2!uDHZ zjU?2YYdnxL+K2>x!J`6Pg;;gg%Pp`s9iq$Y>}*&Y;Z6L!saSX$SPAQ>@LuSEsE>g^ z@>|IedLZ9!#H${p|D&1m{rCngc_|0izb(Yu8_y(N`d5eh{!8b|kZi#6f^T<(?Mkpi zlFH?=@C9fNb9?0(4WQ~O)%by{13nOVb3Omj;P9QmQTe!TAnZS;mPl*`+HM|wE|)^c zGV^~X<57yg=HqiSCfYD-k3y}G10D6Bc0U+eq~q|JUGjP44kU%lSgT6ZVf(-&d%pcw zn2?Zazq5}B-Jdw+WsTcm|K;hTPr>c5HS2>$cqkD>m!tA)?#9B^t8%x@Z`R|>pEQAeqv3Bfo<_t_BD|GBBZwW3|X_Tc|?31~;(bt*OX8@9FYl-1y>gbh_Jdu;XEz`S=u#gmY3SezLUk05n} z>x0JbYkS%u=pw;3+`SsA;<8o3+&-fE*7{?rGWDqNOjeGcmy9+{qg8cc8`l5y8S1%5 zLgM@HB?Z)S^hs+U-B9uj)G{K~UNXzzZFv|sI5vXpUQ5rvH;F(k`Yf`KPz7g-tDU<3 zeFrU{{)06Sa`B>UxqakfJzja`q1B__f=~MS<8C}4qgBnhq7msrTI@K3;@=GRDZ@oGSsWD%PFu91_49yE{EJ-yI$QkNI^*gnk3m0(TG}?lk&3zpD=>n-;JT;xsFg1yf0Tjs z0Y3;$Rz0XF66d$hl#V|w!=F^4Ray#|Y28uo1(5i4B zrG*`j%ZU=e*E_bnM2`ZYKCnscWEb$yH^5EHHs~L)JtZ1h13Nc}PDS$HLz*eSJae!T zCrUT2CO>P!TqB$<$JQqq&dpgUuKlouzqTEB*60>BjP_#3&^7LZA_F+RQ}FY? z^?m5i(E1YA)rs+S2Qp&#e&c&D%-NPmfR;mLkL63~a480pbL{${ji6Xl>BWSCLW*{X zLl*>ne;B!+Q4E8Tv2OW_b!ftp@YS)1hJhE4bcNb8vDfLanu$m+azv(O$o*-?;+ z@s`gs5_HP1@R;X>Vf_L1Uta5p_=+-NdF)LqCbB$u7_QocL^f%?Z`ZSsc3#NuhHWMA z)VY0mzP%ItwfwnU<$9s;Ugddz(?0lLF7(3RiTFJ}Ykn;E_Pcp#Su@=G(cGWvTL?}Q zQZEH!d~n~{w3Nrxbo40bi1E9XjCEz)A~qFH@c0?8#)w2FY^8s&FzfvV6|Sa!&J+1C z7QrHp7JhB2zhdv5E;0lY0Kqh)!n3)SVL zw%gT5ploD_Ny*_3XmI@KtVkY!m?|}krk+ud{wW!^$T)MUcfd~Y(5GGn!F2>ZRS8x|wEDJxl$@R0n(v99Niw}-0<1rj_p z^lufBpnBq^-OlbxP)KL%P4JC@G{HD1U!Q~HA1S{#2bSYBzq5JC#!AfUe;O5Y8Zpig zh(BFFVNIL+_JOo)I4bvu{B36y=-r*tE~GZV7l$tGDTxN)_wc=4!CC>sPizeB8dBlj z_o{oLYMIF6AVWNrP>mArn@V0@ZN^y9D!J$MRy_IU38(+QHoW+*f}Nd4Mx%~Ij>Bik z7?33E9dM3}g-JUm05?F$zhWM?;bf_vSXwv*pIi~vOrUn+&E!7?@+?%`{NrM=@b69> zU8;zizR-rAwMO%n@{MSjt1gnznuD_T+1Jk3XF@mE6=8*iT4+#S_af^{GjRJ|uH-6g z2KsyPhv!)9Kz%OkWVlHh^xMX#nvWIZ@NYh{T~-Tvo047AlX9SwgP=k69?hfoe=V*&;(r5 z@%_byIc4FCLF`Etm2q4kDiw&7_KPKAeYu(8sy zUvh`ODLVtTxxX(sO3-j~_P>DiH9a<~+}c<(oR05@J{>E)Qx0?d4wR1dEiiO$ZV$_M zGO*;caoYJdf%FIawwv;K;J8V}gjc%+XNB4~`6ZBXAw6mLIYByR6tuLuoA+Q|JY)B( z1}bW?s)(5onsM>-)xhl)Karbu{`cZ;giV^B&Rx=t;1JGA)L-Lmy&PNh=Kd1sx=RQ! z`k0O>``u!^Qb~9-edBax6dlD&YZ(F-OpK?ta)pq4vAiWMO4^)?nQYZOQtDMGkoa8q zR7n|xBz(+wdf5%H(?d_{b@sz>a9l?J{$Xf1wb6aWY7idPC{DjU%7ECFRF7>*P0+;? zs1_NMg!(GBX>WIt@w+#>;Yo52+9{e@bu2S+)kp6qS~D?E#%@}xgN{?XoIh5zlkrn& zxsy_P1$Obp)tc>7#yb}dsI(*%!`9EqKPSG`LG{fSf%Fa{I20J!?vZTF37_Jx ztUgFE5aykJ`VM|>JbOtz{1w!Ue!XbIk&J>}8x!xnEkX0ub0}h2kHZtaZ|v?i;kOrq zFP}ycv3-(Hi(9r5-&n?7>LP`s)$=R`oy-d8qh!nvGRSZx!OLzZfd>ES3h_jR_kfnFBex>Bfy!!a#Z3!RXB-^`(G1P1vDb{{6Zd8K4I~Y1l>EAfK>1>3Rqz7J! z-5K##u?%vz2p^97)(Rhgm4wTGpo6_^@i5ma6YLoB20o_-fWRH*t3THd6Iz<@6kjvp z1^c>RQY1PQhSCCx85FSQkPO?suLVLV!t89Do8e7;O+fWp?;pORtk*1;3saACG=QCz~`Jfe~hR@LV_hv(cJ~MaqRTWCL-gg=} z+JQT1ey$F4bUbnNFw2k<9hYQZ*BLx)#oaT594%f6SoGr7ulw>O*uRl_RUxGpEar z54r`#R#PGFfs}n^G!gtGJcz9;FVRrL;mq6EMzj&u{B`JDJ4$Ft26pl z*q-E7$m*H{S9X-x67AbypWp5+Cu@5keO2{Bx(5T!4K6-o@1sC~_aLX>_zy51W9#v; zZNq53CR&o$Ahw3)XsYW?;=*deF}v(Z6lqTzu3A5gEPtE`=dlfUOiApyI$R00Y6C8> zZ}mfL!U^-bj!AgE!Rd$QooSHiP|p@CTI01I^H)WudmxFCqe`;8fiLQUoFdifNP6K& zBrwz8;J4}46m0s#^YxKJFQ~B9XpES9Kwq?CC3As}?qQX$YB)#H!(|hz#g1`| z78;w~!R*Ix>IPZOwN02f(n*YvAi&GY@Fuv`2LeHLq^y&p5WSic851-FJ3Wu?qCV(^ z+~Ke`c}zNHe4j=8rZN7uqkoWhOO)yJ)J3e}KM`@ld-fVOLyZj_HCn>|CSnys9s(azyGB&ka^T=~*KHd0!2Flo?JVXCf@k#EHtuM_uOZg9N2mJmcl`NZ zmis2r>!5+D(bP0vW9{ZP)SX6pi%_k>^a$d{5_9GA9VmXghfbF)0|(9kzk>2UNRd$( zVm3~~ba`z<$Gd6RYDjbbColn~x%Z8Q)NCt=eBzU#{NT6eYKXVrS;jmi^vgv9kZ za@_)cBfUvx&Ml!Kv^dZIT|yQ{&|uQ71-vXHP|3|Ug~EJGTs{5_bUxOr8F8`zuS$f6 zMu@HT#+}Nz}v|t)KmI7OpvO;{YT{T|IB1z zE5l&gc{L76aq%Kc)-iZWOSz|&H4(|rns)E z0ad2h_ta}LNRx&+%|*Il&-Ke!Y}fo|AXd;oT%rfw=u8_4S5QGYI$rBB7a4e@xX!ed z)tY$O@s7@#eGLYObs-)RMs-+pvw4c>OT+S|eGNy%2XTb^QuoWaF+823{qoDHaeO3r z{PqRfAU^%U$^Bq+7nbo;l~|ku;0~9WYv4f|1drU8qiq`n_XT`uYuvmQ6!PYtoG9J`&u3uwtU7|h2dCiY@EnQY} z7o~te@|UJxa__Oy-rTsyfPynATT_)pnP{ANGGFv%AO5{<{B1`g9oKWZJUkrKh+Lfx zbx##DL1k1*s)e%^l9SbSo)&h&_a6-Qu}liwFYMzSphr=Z}Adw6MZeaEdicBcV|4mXvFpz9;%mFZ{cPu`>unNuDzcvIXFU~v_WC?n3v1mnD`|%w z`=jA!jhVReR_0Fb^gMQQsyHl4{lk@1U5VweWh6Q5?{51sji(LeSHA|4&~c|{r9vzd z%8QOY;*(#1uVX*tuyh6f%bU0zr@RV_SN}Vg-LeSkydycf&xhfdMKo_)Lo704IHq^7 zj3Bw-K&^DhJf2suklZV@gfRxk!fV<_nCRfE{6 z*Fj5HGx5IBX^o@K&1i6O=IK^i2X+(o?UX-4#c+ZA)$WlFJmhnBL*V5$WD!a?EnMr{ zmV%+KmyId-Ue|=1B;SLCCyxaU zmJb0hyY`02Kr;Bp%Ab^8sKf}~F#BC%eVBA2Im>(Z1olw>vYv6C#CM!Z*`;Gc=%0Nu z>)={{dKO*(hel|Cz_wIXkGI1R&9m>|IDHlp;%<1V^(?{>f)$&i-y-}FKU2PG`z)NU zRzCat$`FJ&wq1J9O@vd1KJVn^8u6hr&9H;Sz)GH-+{v5+_-FJ?UEg*l{=Os`l#owG z`9MMQPOd;G(bRgdLK$a)!R+BU+#(*k|9NyJ+-vZ7DBIf#591^+iYavh zLGYSYy<`V)RPP~|1T=$TfRWAMrz}vk{`1>Fuo`7fHdlyT@5VCsGuIzo8bDtXiE7e2 ziUtY$GCPRls42#-@9=&URkfVdIRg4odw6n+9Mp*)YP!ApFBIdK*LD5xc9g;;k9#Wf zVlALy8EC2;Lk3RcyPy4&8)1GL)J^-s!C`r@a^K!2v^{fr##pZhE9ExD8gC!KBrPR! zF?|3vD|9y%&=?pk+`pu#-Gu-9KPZESV?MOAh72Gs{pD#a8-+hL>ok@^Ct}CBubdS_)=j+$LK4ipM2)N!D7poViRLJOuk?m70eC-a-F|`9YgT#N^U&w z8V5xuG?y}b383?Co&1|eHRu)IDXuNoi*A3k@Rr;t%87?3AEQm6$T>s#MDcN4XLaj` zl-?jFO1!8tzE8(}5%MO9b41j&KA>(EoQ`=B6q|;b4EPwyQ|4q^23bQo9>d$}K+)%6 zQI2{u?jE>PIU3 z*XWGn^WqP}!r^2*`g#xR&5l|q`0#-y7uE}GaU4~l8q2>R9!VSehTP@eW7@qaXhd00960JXv=%6>i+O%BXCmp|VOz zC@P6BDn!d{7!@T&G8!6wqJc!ABnsJ)l@Y>M*?V038u#M5xW>I)d%ezi-}CTO*np;1yx&bH`&?EI~jSEW3J9SL?L=2fFu z%ffQNQ+NWe-P1|uJTrzQk@fE`*9_usOMWIytd`x6dUGlX0c2p}Nybv)z* z36?_6)yhnffNxyLN}H`6_Nz7A5j&O(2WGzqo-wS#ODs0tT8Tt7ny!~f8tuim3~9j= zm2H@Oyt4shGI8gdhz@gV6L9UUaP`+20Lx!O?+%-f0PUHK^U*8A!2V^}z+#RBhf1pK zvqkeDtX`7sWO5r`@z|{r@q>zYt`%P+i;dzZMgL{W_#j#({_wDEZ$L9mx~ZjqI}EaY zU!n6d;Ql`;huV~RXqa(e3;DPVw8AI5F4`=@&L3~|jjv5XTQ1kEUw0Q+yi(s;wWR}7 z2A55}0_ICSiTAIBVg`WTZsz%l~|T;mN*rzT)+IK4-ayhOhPI2T;Kyy2G5%3H5-ddCh|5(20SA_IOy@yVf|3m3pQTsJEHzx<*b1x*&W0N1kr%XH|R`?>LU_SC78( zF=JvxA+LGrY=!sjpC-qLj@(PbSOyKu1 z3w!lHqj=D4Q(kFhFNVf5FuphsUa`OCU5KGVoaR}V(wr#>2-0;AR-T3Tb>3I!&QAmL zf`Q&b*(f;klyis+HUMMSb(VdMZam5TNc3nN1NFYS9P9XR4m}@OP{~sZ`20~1={)N^ z*7nk>vizrT-16P0!*WA-{aCqHMri}ey}IfI!(}kZKQqf&MuKr+cVBYA5Uh9Dwa=nq z3`8XL4a17*;KNWbnL0cPpSObY{E;az5OgzJzC8sht1GEDB_~0DhOtg8X$)HC-kVtX z^Z}=8;&F-h`Cv`dR=)hb6Z1u)GhB4WFi4QXC;FX<#$lt*f_G=J|0vNb>(LC(Uf`H* zm6$|>aIvJnXQ{XBa~@tIaCvl?So z5+Ndv1f>U;4%$UdLXDfRq|udm@M>jMxS_QOmczxOw3Bl{S6;dAWx@bclGS3XBMH=K zyZDauq~c7NLa`cwgqj&Q%QO9lkvu7=>}X3vra^TJL7$412}kdAmh@mymLoOUwi2s- zP2#oP;y}0}tRyeJ5|$Hn&u?#TfK#sCnP%r2;M8K`{c>hZW0Y9W4lkYj*gCpJt6rVQrBJXjHgUP>I@!^UoWm{$n4ySDy)!)?u#LT~o zo6b`Khu(WOC(uFtNJRaqZBsCnUg+v|XA(Bt?cQttq(484Vh1@s|MT6 zjaBz@vXQj0@wEH#di1rCi%D&&!A@VoCx}kQ0}~|eNb7RA#zFbo)SEFH0d|T|Xw_Ui=CL%=g@dVoF-+ETSJB!}M)18|{7LnrvM__>6 z8lLy6cbl{_@$7b1m3xtcxP!m)C9}#6^y9v%F2+*8df?eJ#WkE7DBrq&@CFm?k9=TX z$fASy2){ylZ7<}6YSa9=>XDzl_08+S34FGF&`a^+JdRzG(iu-!#ueg}c=G*MG8e|d+jvw>u5}tT*pMO?J_8VxrK@(00e5+} zNre}5Lz`;Pj!HoS^4iu3pSwy&#oo!*$1!uLA#`z*W$D^HYBJ~hMCZ|L!}|A^3>YZS z9V+)ewH<@h{AzPJ+o3=H%HP!vI@lUXl5?NTf$x4*?GW-JjLh>$&aYe3nc;A&^(iwT z#kycoCrO7*;#!rz+sC>{OQf>Nf;dD9&E_YjjqXn(c_4~Vp zWFXiCTKG)P-aD_(tjCvMm9%4T14DuIAxc{HnG8FcBHAS8K-` z2a)JU7b0iT@Nd`txYt?3$V*-B_ITZkS7e=eM~9+8+#^cg$aN5^Z}tqv5T@bAsR2e=V0qq8jFT?AK8Z}$fCh2N>FCwnuqa+RX!L(gW|%20_Sq$q5pRM@52p)cz5Qe ze#dSmYWtlzdLwNX_gb3fo4%UHM{dk5t&yYnn@SM4$y1MZ#8*7qGrA#2J#Ix(l>w=d zR*x)x&BEv-pP-2DdC)xn?s`YZEYvIrx21(m!t1B&59l1GLZoT?uCvaKa9&^a{40ueR!Y)$2pd#{4@LT8LHUQN+@{#0%2bMAQg@LJT$8EsCa`IGf?@FiYnnr#?O@p zF++TNzjkULhIpuO4_daPRIn%}sAS^fzd2%*`XM&0e|8VIb$istJHEvij$m3Xr{=tVyy*f3|W z84g0wsn}2SH)Key5A{sYs|LTf;#T5o_etp|vhLm(K-z}HV`J0f$i2^+OWSu6)y^bI zh3HIS)TaFhrlluw_^y(a3~>SzSOw|pGe>a$qwvjX85E>wrwOI?k}#+(wsnH54SBAa z$X!bPhr9{}a(l)9f-Ol}Zja)cUtc}BL+$Peu&jEnf5&8iWk-n2+@5PXX?lxH2Io?0W(~KmdU+%W+bi7$sHJ#8tkKZMum$_|MQDCX8AfSE~ zBdME`_YW^1R|@N9%`yhQFHLn9OX)!7^{80Q-%U_drBGgDG6G-c1Z)K^Ge9Q!r(uUb z9m>On@>b&q!Nq$b)VZ?)WVg0Umb$d#LX_)BaL6c@%8hIMZkxjBq|&7%+BBNi9Mhlk zVdBy?qVGprI!dhU&Y!yEjSY`kG@iS5pP)Wv(eV1~r6m1)5foPiV{p$$SrUoiZ6AR$YFQ?;kSILN7jk}d} zC$PTTb>ZiI2A*a0`_w#6Lv3+R_1XuWxZQ?o_Wi4at}v<Q38yvW?YeT?@J@1Z&ri)r4D*?(Nzm;8`$v~Q=`)AG zz3yAgub~Oh$mKpdLmP(^32%;V%br_ok~_LD|-7{9W8A7~Ly9F0+#fJ^K}- zi+E=sS2mcidV~p2c5d2beP#lV)~&R}vJb$4vvHpdB${D=72#5`BSL}Eosx;94A9@% zT{J%X54iccf<-qqg3D8dt4G9J!K_x-ZI?+iNS%uI;bDv~>g8b1r{Y#pJJI}6FGjND`wG$v@l=!Zu2SVjC^KTSek|7q z9S1mWr$`RMoS?DdR)>DbRejKUaa}8@b!8`;e#a?>foX9h`_&ODtvQ!uyi4O5*o`q<)tKMKydQgK=6TSNaIS& z=CGasSMCXa=?6sES9W2XTeb?>?ej}zqlWN&gPN4P%p|H1XftgdQ>ZLxH}_SWj`KS& zZU^>$BsLh|_#>Q-_nv(zae7XKxSF2kmgXV&m?hNtaAp*eE-VdMvyH6f1>GOA}iL3!Wb&uiJr;Y>k~h|;G{ zaHCTn@9QSR6>~|BwpV1>6LC!Z+~YR5o6Gf)?@wx^_26a zz!P)o{YRLp5Ah%SBF(iX6E~BBLe_UxAUiF4-7Z!Fwx&p{6P~rAThZsPpMfn%pZ!{S zPOky2FO2hk?kq$HC*c>aLouKgRkn z*C#jx!OtU%9q!N|*QL%oqk9a#=N`D)kTVXCC)(ypl*T~$MUInR$q+0l=w`pVPKM7n zDl_FMbud~;&?XvZW0dL6bzOe#`1C(cH)>5Eem0h1m+Tq9gAb1dig%GQ;$U2Nns+m@ zf3>ZN>wW>DpyON-*$ST|>+^po4}j{Gp0>^3hajGx#y%uYg>#pV?>v977h2v-EglWb zh2wdzHK#(_aAx8bhrx#typpxjV<BnWOgb% z3o;~uS9rv3re_tn))*y!75a-I_SR8J@m=^K%Ee+Xk%AXq$h_LoF@!9^g3QG|RE!`# zlG8~hp};+3ugd3@sN7Yw~j|@=@gXv(b zuSBB08-Xt-m0IHZ-N2`t#p7k;l-v2^`;xpAk#KJ-N6(F0zV=xXxk&=U{Sq!kRHV-u_nFP1HBSwdf zCSfQ`@JOF89a_&88!n%uftkBmxX8Dwni zk5DnVf8q4)4Gg3S@E%_ZUGs0Pp?kj?r_rJ=jzN-OB58e7%CGTp45MTp99i?XJK}aZ zw5P8ih45bWQws@VD>(wn-NzttXPU(yWWdHhSDVJ;>5$ZMWa5nS8oqW9E`2;&3+%t& zd>V0YLDBaWW1MLtXm#-I-hvC0csI(mg^hg*E6<;gOtcusljQEoU41=x=<-0%?z$XU ze~Kib95n#93$N3lOJ@tH|3hmDCF-qeZ z#fvIB-d93rko9t`ZKM1mw$wIdc-1YSP0m^=3Y$i=Y$M|a?_reRk*t=X^%o1`)7X7g zNFYe$ttdWD2WjsmMdk9BU>%cP3*{fe-VlS{W6Wf&jll6c27Xuo9NH=Iyptmw{ z^?p(f4sNVe{-scjdBT^}j_VbnTJ+H3pPV=duQiv{e$@o-lrwClB7HCsZn3A5FbpUB zOr6eajKlY?f6*@z$KbxFq3X&TD#ZN0^^EYo19U8AFF9F1#Yc|~Mme?Haaq-?Q!S$( zZP{ItzPubj?rRaDwe>_??KdZ>*~Osj=z8`*-Yz&~6TQpAiv|@lgJ@~RfU)(}eX zpn9s1-KMf12r73i&U{KjF(-!~7DY6?ipj?b`670 zQ7*AKRt=f&a{<{~=3(oR(6~nLL3ko9b^3-*1$H=IWj`7)hFS4{<~hozv3gFgv{GOe z$$FmU zqQY0t9uB9jeOYM(t~(uZK!nzmgn7CfqtuG`|6b( z6qk5-@8UX7q*rh=)wBY@D{I`bPAv_8BojC0J#4_$5Xux0Cev~brUxN;*`v-Miyjq6x#de-_8+MIg%QWrlog5l= zKa$0GLfJHmG=B|j9hiqrjvv?*s`}yW0H2AIdk+3HgYte8=bnPqV0uD7isj{ja!+81S{+`+*w9pvx&`c!5;z7|6@udlP>F@UZ z%~}t(Den!J<6P?j;zCDbS%<;AXMb%R8xaBoRd#Mn`VHdmzG!nN7oqvK9X_v|tFijq zMHv;#GPF=V8Y^y@iry0`_Iwp7uxY4{G8|e2dKe+omYokTkEAQkX*^xiWCi+ALh&lH~`&pCVHcKdXY3dqIN8A=TMkhdzHG2EW5;}0N%s;rFmJ2^3k*)-%iB~@bZX9QJV{EDUL zQ}EuHn@(sz*Z<+O0)r$R<5C8L;48y4SwO!82o8&TM{id_l)|l_r1(;>XLs;;vYZar zm~oAIIhR&=Q4ugY4Mx46K1YG`fN=(X!4)^0PIF==iflbLevt7R^P?(T;@vmqL zyxn+;=x+BDseO%?{dRZa{&e|mSBxmQ;dZWFh3qgY9jN`5ymtr#w+mcKuIfW;`x7&2 zTz|gFfYB))ir>$mOrCZ{rLMJjDJzmQ@IzuIkX}png9A~GhUpy#d!Nhp5NRFdad8V6BE%W-5}sRO_Gr281AL?JO{X9G+%fkIhSJZk_M zJaXG+->p*MG@q?Z$E!iOD>};YwW1#`@FZod+us8dKM7$KV+1(m)0OP zmlaDkks`yh*SGek-tGst%&^Hf%M=juudHI24nfJ+IrqlT!?0u;cluB35Qxl;6f8fb z!2kB|9#HysZc(1U4wSTR*!xF)$6Fuo1xK7|#F|3;LB8rvOmlJ$5;@j|qZ0Gp`Dv0aNf-&zarZh0IJ=q1B$1)_|q!3daqDM`;A9f$r8$<|{($3fOx?LbWQ zFsNMleDNFy8HUS3w%O@cgYd@n(dv_#nC1L>ZqLmo#AO!un)PkyI{PCs-?<6JYn^yJ zvvbgu=%TB$vlOz-lx}v!5n=sn$AUz^0T^giDiUNFf*{tf0>sZ$*k{$}xh=gPzF+c) zE+BV;%eiYR?D>_TcJ6p1*PJKlNbO7Np%x%7V`8}cN(nB+=WZ?7jd)P=qCVaAD>Ck_ z%a)JLhWcN2O_iw?@Sxc60oA*MlN`~}X8)3U-%r7#nq{(E+%8qWAdy2AEG@Hi^sEVP3F5>@&lZ`BFl zO4l>^W7-I}b_RzpCso6J@1J4s*P7@5?%%RuS6IQF(EK=95oBEQVg`YV^`rd8H=gkP z&fBmRPXRdHZPM=k`7?5Fuv?r8`h^jL*;6VW#b^q&sBohOTra!tH22{)^boF?im2>D z@tVsWn_u?gW#3r05Vao6H}&6=64j1Mb^7}%UzI9JJU<*Nse|54IhDD$9%SbhXi9lZEk)6fBYU-ss6ZRDfn4}#t zl^Hmfuc!81t>pjrSRWsRNGe}zM4XBA3%JvQ0j4zRAZ{XxmVe-8oFU_Rt%pKZWfY9} zQP}p{WCXwYJ^3iJX&lX?)mc=gCh(Et@iK1R3H965tYf-e7=F%z*;mqskKb|y$PEqP(9dJJwRZ+EM816Ia48u*2Aaf& z#)&w~*S4VL+l+d32kSLC8qn*{7-yJI1NIn+o-Z_N!`FQ_8`G5g@P7aR0RR6ym}fi| z-W$g4{aBB^S0STNLg^e8LdvS2k&%p&gpg56C{cElQP~xlSrz9bJBbJx*)p>|kNx-R z|LXdj&;9B?FRt%(-9PV>XN?VJP(qj~cg)ZdsufH$tEVNSSNrcv*TF4x{n zszJ@S$YYU`y+HI|#<>RaFh~xHncdbO0n7`%@RZyDF#2ZjTVARS6T|V zYGayFM$G$K4!Ld=>Xj>dXRQ~BB#8a+KGlOxhX)@yr`U#!U-h~xMtwjsebO(=$I1Ys z%?W55*bFwp?25O#zXRG%w50|*}Z;hC#UF80sugER`Le zLl5h|b6Y%JLi49vL!_)$kkLI)zx!LuNR#5ygV~#lh{&iw=^`?XDAgC9ajp%Z)odR7 ztcnUmkcspqx&Hu5=?gj5PD3DJHEBEk&N#?2O{U7rm;fH5EB+tJqk#N5)+y7fALLYg zosYL}0wxWF3)CA$=%w{Bl1f}BGXGT@>iKpEwJ|D9d!>z`Xr+b2C;pBj*S|+)Y2J<@ z{WaP%e1XGAM}ekUKB5=dq#D(ITx&rW_B$F0JyxE!h5jiuF} zM~JnkW=^H19%T%Oe+*dvj^zA&Evp&Y(QZI-tj(Fd|8r%wx@y~qnDf(XpBaBZjGqiX z60HgVC!73pRhMRPpj?4%R`?f~sP4P?#-;RCxF6Tj$$OZ~FQ(C&>Er561(`)Y4AONSSvL1{~0eXSNfT-|AU=~;; zr%<{A#D1Kh;;CH)lxK_jN`qEF5Y(!CqrC`fR>M&kaP7im|!f}mz%i&wl;UT#&f5Ei-DbSYSCN3YN%tNV5VQ7Vnrf$dJIs(2=dy_;qkd_~I9-;tg>A6H_6} z?mFNsSon5Nkp$kF9PS_0UjibnN@dA+{(=_fg6+d{?clio1>54gjp)wYwO9OpBdG1L zJXc%vGz!(YBM~4whhEMyb`CmfE}9Y=K&Z|=YHV|jkx(8$`vz+c@COZ}Gwo)z>UYM`mOATVYQYo=c}0Dh z&3q0;HXTvWUzjIgVf{-$E@aO zQL&fVVMVV6Boiwghco>q^Z-$D0l`Rppq*3qf& zGK-eltBA{5OqnBn8TBN{K<95ONa0}7k*zQ?8tH3(;-&fDZnF~?jVlMU86`3fj1}xoQMX6$uypr}L)Kefc_2c&jqRk}64zKN?kYauQTnkFf^|jQ- z;sq7{tzN~WwMm7iouY~>e5tTs?E027q{Ky(X>;sncTt#+3ptm23#GM*2aATTqdzT7 ziF{bKMvL~as zoT6-xf;Gft5i8y6xq@6TkyY}!myknL_4d;4JbIU@Z#A+si%yBXyRI!WgJLTdvs{d( zk>pnuLlZQGO!nRVX8U#$RbHCi-FIaISsyg441O_!_8hJ-{ZB7)Im`HqmHZuz>1;~a z1-(PJL-G%$$X5dQ&r(Mkh8w}z8@D7~)W3s20{UrkJ_z75I+FP`O(+W`sb=I^ha-vQA{hB+)(w!urT(CnYKTi~XrOX9&F zn_%#=tVICV7P!~b6L93xHgGvThhD$f1r!56|1#)N!j>iuUTrcJ9AjP92rQ(5YG5nE z*_aLz){DB|HPORMA-vmnco?B`hhe1=V1n`YNbg+?nW1Ir%^N)0ERZ_mis>tH0+e?( z-}3%NfSBF1vfw-`L~KG%$AVd5NNM>qS2tEjL8IzaAj%4_di$#SmJ(nQC()SIf&k+t zZLch-5Fq0845)P=z|H$w<-@B4s1)G&t-gm9HYc01bNyw5wYRJZTt4h@UcYKm`8PY{ z=A?MC3Zm~n^sD`}$kL-|U|KlAa3wG!@xQJ{f*&xfPsG+7Y8#FR9@yX$1 zgU7#6m24ZbK?zc*v(Fzk7?oJbxO0{rdOhniI`xVjrg4`9S=6$_i=P@DAEmQH=_{j> zRR`JOM5roT*90q+&2OW9!bpH$4=OT0U}T2qWpi4n93wpXbebhMlOC4de8)NXln%aA z&%WY2NDJA>=hU0LX<^{JiT5dCTIh4VCsHAU20r`6n6`g{8k)$xmm&>P!Ey}`IkOx} z$eq@w)_;`(9=*b)cD!f@v@!aZ7Qt=6M6kQ(+!)>f_>Ds zw71MAfOq=kjl{H1;d$+6!|LU9<>UiucSNzo5p z^P1(z>VF4G5(UD!e&uN21lyh1mO&)PYx8{O-yABOB81QftfK4RYo)V%H&9Jm8_PfW zZS+_~>-G4#Z4@t{ckZ$9CVIWu8GQR937O716kQ}NqlBuT!?`2>P$|7$^l_#cbX<)w zjj??OQ80wZ)VCNEmcPRG1kJ&HFuG>!oiO1@T}VjRPM`(|K_X z;W^ge=EI@Qt&0c$@M0dBln?j#dH1-Inw+@6jeE@0qt>FiaKnGzBsD1_KEx1f?)i-a zAC;VblbgzprG08>h-Pf~u98Dr#wr09FNCjh93tRCi?ksgHUj>`|Fp0A83Bj5$E*Fvhu_=R_2`+f(r+Wf(PkQ~ZCe$8n0Xh8hZVGSJXu9i8UE3Igjw{T zWC>l_)-bx3aBKECZ7;e*J@Z{Hy$5MOooDO%(vRrR2!1I#GmK{bNrl9SjiLvdoOP~_ zBS_DqWQNYEAD#YF-YMg{Cl!iXu)(t$m_ zr1_2JXc8~1^i__q2<3(H#M6%h9C=~T#BkvW)x9~(t3jq^9{4`oZrAYz4>UGIzA|z= zP{S(s$Gd!P_;cpbmo)`$h{Y%+>pyToip6NeagGZH35pllQF1}*6W1T=wiBU7(ve=m zCnDtkVN_yQPK5C#mQn+I@0x9JXyh{y&dx9NJ}4lnGB}d&T~YVa@Ojo`Fl7h@B%h$E?W))YlXYtL zs=gaw>A596PcR8ob?dpDmRtpwc(hB{Gnc^wj-6}SCzrui61x=OT>+%7vfzUZYhW7+ z`iRt$f&0k7)0&1&pmNC6p!enuaL&)0f5=P$SGBvc1BEEyb!PwmpHFvy%(m&{QlU*C zYo3#vsJR9tGaiIHKV1MzJz-g+$>YGUQnUQqRu}NiiJ;{9gaGx&%K^$4gV4#JURTuA z(vhQ<{N(%8BtYGOE>l$30QQTwUV8`s1g4)W-u1Qjf|%BkpkU1*P?=}a_(N?Ntm$gc zzg8Ury-{pktIfSY`^o7ui|2oW=U!6(%n}fga&=u)`Id)TP7>B(i(Yh12KA+wBpzLyyUIxnNB$=9*5Y%9oK#Mml_ zV-pbc`+AJ(Q#r0h&9 zL~FT?$OIGl;^bYl_@KD_tvMyucTMm2(51$&UC<@&09vd*+ud z9QoZCX3S67^=qM#1^>NUKXWUPfM3K>?eFX$;FRsUYi=~GcvrtJaq%K6b`h=K-^sv+ z&v@y)F%V(L#Wy1l>yh?U_;_LCItT9WAzoXgAmR@nn9iRYA>t?9QMDz0TsW}t%xYo@ z7rqJXf?HI%@F8vEf)6c3EPPbWZR9BttDlrfT6<5#*Ft#QoX>ON6H#pW zngQH+B!R5FA;^nKWs8A&)cn|*+e}-2Spdr(8(8vwCxrhbuqtyd3gh(m7w4-oM6gHX zv%I+q5&Xuy?d&iR!ACs~j`>&#V+iYNV%3H4heNl#RG9>^MmBd;=_OS^O-yf`0(gwp)g;-k57D-A>Z2J$H7Z)j7s_h zaAB)RaV-|agWmDAP0>O)H8Z>9v#c;a>e+F7*jX3{ne|sG$qD1eQk%^bYay&&@1hyL zA%Nc!N3ZR&@#CtGI{cZ{Jow0F1%v((F8oW;f;;~$2R41~m|UI8ioadCHB<4N37>0b zkR(rVv!GLL%>^Ve0N~LNw1g+Tk@YBtD~gCwscPqc3hyuH}WXwMg1tS zs(*nxMbr+Ox-U@m*m@K7WofTEn2^vZ$sqv~wJ&;>+fRO+(1V)W#2Caj(}Y*)-DKk#c&DXg zPVhr^n)&htetsy%rje92&j$sQzbxMW!Uu2g{xHyw=7W|Me?FvL;)5;|liCf^eDI8F zjbiLFFI-w7J?s9$3*-OJsf5SzLdWm#Zyns@*BjNU$Z<_xSfJ2HNyEttsXEjzbB*vo z{X|K>x;!2jKx^2f=g9*b*F?<+RCwTJ4eHRuC2mN3tzX^$o*NDwd-RXho*TM9HE&R3 z;D&|&-BsL7;DS_|hgCXgxS)$&(O*6TBAk=Z2sFCJ37sxQX1>QDeL+WH49$wavxGCp914$g74K9|AP43=-=}We}TWAfUXYXB={Y1 zkn%+Q6bSjEz7*~^18lM^G@PmDK(yHpJvrYw;BlsQ#8`V43={_69+R8`@?T~C>rom5 z@vqcEzWwO~&Rf<^tYIx6=)Pka+3OjgE~wMfim5?1N3Y6P__QGnW_{m7U3;rAPq=%-3jCI122=it|_|AxVlo@0y2ej~tyL>U@nIs#G@GCSvThk&!w zgY<@jeIUF0*N+Is4j`6y@!wc;EwC5#@wPqo7JShvIyq$j7-`>_89GF7jNHF``7vWz z4m6a-qH`_V0Jr6_T!WDTptI=aXUH-E4)6)5e^Z$SS$T>dq@)%=xPFCrK;r@!-|(E- z7@P;kOeBSQ<7WVu>ehg4&^Wlb+Ds028URs_fo5Y?ZGg4Y{ODLoDM(>6x|I4g6ItJ> zPql8WM*08p>|d?bqNWUEYQMY&)FaWXO$N8*}>w{8pKj#g;9MAwS; z^H4K&XZ}Dpb@#{X(9el>yYI{YaBh-X{3XDEd6yny*#<88E3Lywx$C312VS zsf<7b{IXkqNBiSm5860J-cI4fw5*mXL&x`c<69{^Wyy_aB3d-sI=OMR^FH&?d~Up@ z(tYI#ksIGj>^9jt$A#H*hLWj16Y=xNv}mP5A~vd9^i96Xg>7P!FE?y*$VxTNfLNE`eRa~ zqXfR)USOiuDUL6MiLoZrisMUWy_7Y=V)$99169=>5gZ#_M9oGcjH7?(v%9Vc;sv#Z z&-Au}Sk&71{#zYEY_}VdWRWR|^`B{;KX^?Di&uPgY7G*`E_$LiCx3}x9?AQAWkn2s zc4I1M{wanR8H(=yz9Nnfw-e)(tHrT;MPQyvvp7y`n18ExTO5D;Pb2VSwHV&vIlb$B zUKIE6H!e0k5yDR=mkTbr58qfPVDo^N z+>fp^;}P+!t2*RQNE+Y zkE%H;MR#ei1v8^J%`Ym<>fiJX9HYS8r?$(G&j!+u9nNvgSVpay48Q&>{fmr6MDEx; zZ9t8}>)h4F{UGt1@x^(EB@mDlU_EkS2iV@&eMBQi3#+VoT5i@bL1}|3b&UWv7#1XB zHR{U&U(`f4T=U_COEd&>m*L{z_Gt#MZAf8kY!tD#iULE zPBi2ygG3?NyVNAKHX{t%%5v=tD@9dQ%t zBCtO3s&e&JVOXoSeR>cJ!R%r_=g)#d(9rGv&9VD}@a1n-{#RrHD9`MdV)j%3R!#QC zgy;)E)yQF%Hgy45V#h0zVzL1=cD;9t-tIWz>N)S3w3$v`g5QN*kcHRFU2W0cByu#S%3iPY*|5 zL=S2orGXY7Z-b#QxiF!I;r8h9i;6Q+1|4N&OvgwimR zKyj#Aql^?89CD)aVAvvqvZnE@s6D+nqf4gwMFRJh){BXutAL+&so=ncC9o@+=xpvk z2c&r_Q|v+}fUK)f#g&X9@O-JN<99~So-c<+d{*iJGp_bKyqazQzxxIZUD#iv)C}4s zB0gSySqIK5oP0H(QUx5Y*!#(%Hz2M4ByTd+bEL6Je7+h~h6e9FY-4l$f=FeC)4D6Q zD8Tdm==sz2$hi3Ezs8gG$RRcQ&jM3D3a~4-x4u%33Qt%Tb6#seJ|3nduYgA6D*8{y z(X$bG86?9Jo_eIca4mcw7o!emQ$l`TA-aV#d6Fca0H5r!FTCf(c{rMi+0BuBIr^sdLDMadL6$d`+xjV3cL8)qW> zk(|aBlmdiq=;WqfuK_E^b15ageu6pe?oEH(2Q0ReF0rK#16!KsUKMqt;Ji_k@Rj*d z@S!Z8FmZSU_z0adQaSJk{JHa9@mNe3s5ooE5~|t?K9q1R+P8iI{;}ftUriA(&Po2< zeK!&8ljWy)^XW123MsC5pp=W&2IUec+dm@(Hwjj2+a{#`)wY`cQwQ>|!hr@~`cRyQ zfWf}aVRYq1v0+}Lv9l&{f3kk;)8RwKw7ZgCNvbqF@3+irMWEvoY9BnW#?kR6$}U-1 zX$H%qsvt^e2X+gnyQh;FBr}mKYS!G^n{$3l*stcfMS6;lZXK^GEX9ffJ^T5 z9ErUqgah3&6>csF;D???tyk~xpHfG1knsp(YQ~#oUR}=F06BZD)G{raAXd3A^u8j>|?m@JdQl=MV zFMzXQv2WSG^nn?#D*@9>bHIn~3}KJ?8ff`;7@V=-hN}5f_0!J<;nPHss=`JBl+o%~ z=s8D(VdM_)Dq}IIZJObq=R}4#x3`vSm62gm&z`uV^}SPbnh1-;pyvKj|Elue#82=99kBoxhKu_>jA#&x8MKk(QcrUe|(T@avx$U^5JA$$;+P^#J zk0I;k!<;tkDDso)|7T!0f_%p#uAh7~gbaV{-^$!Kh&IfQ<{7`tqix8t$wam|qYX(u zVaabl(}^}wnyzr~?L~c=8aL0y51@oYA!5&^Mi8fo98v%87|I|yDQvT!Ko0t$^ZN?N z5obhGeC(?cbnknL?3hnK;;P-R;D75oQl55K-x6Ggc-b0++#B*h#p%+ef{=QUSmqdc zdgVLN+#WVnOzi}mXBet(Cp$m~IQ6KdxfO&9CKK=fX#hL8ZPM)nYe4Sli1%rM<)Cg` z^J30-1X{jxMO3zzg1puYbt1-K7EWIJ!}tiI*^i4Z{VM}mmpnf|nJ)ztEAvyDy7?eq zBX(z3>0|JKq~aUi9)>uGDn}h7(ow()lk`{dE$ZJc?e7gJtd#mH78`Bhs*Wu6uk{bquEsj3%AWSv~gX&yzlcI0(GSe-?IhV=29Pktj^S5eKk z9e>avRaRJ%-YN=76ETwb{0}kXzU?sh#D)uxEYIA&!;YuD^hzDvIdIsXJ@wUdocJG{ zQOwcc!F>#$=jbv&b~V_2W#o(y{`P%{J++^J=UtnK1%gC;^Ebz4jUf__BHCZ*-%iFt ze?qQ~e=_v zzrNgNC3X^qr#CLF=7{009aRB7G;wT{N*dWJAce;@6{^VMF10It~mMz5UsqhTTI1 zJ^5UzSn0UYAsYoM7A!hxdoPcQ>AXgdqy*`BSJT4;O(Fva+U#0n7&5U_ip?FPn=EX~ z))%GVD2s0rS5%_9WY-*Xk>F=iFgF+|vm6S^1 ziW@hi@BAa<_j2q>H`7G%%riFgq-`5} zN6j**!aaiUVhIe6E+xPpc5HJIf<)-P+pG2zCPB*YO_e%rV({oOQ720~GQ8Se{8TNF z4Apjh&Wdv;!}%3XPpTCehPink+3ZM$sAc%ep9V5~b>@`6QYr;P;kVx|d=`gCeH7e} z0ZAz61&mn(Qt)!M^r2m`GB5~jrj>1{!R^R1?W83g%3r%%X0%L)&%_d|TtXS}uIcq8 zX-*~-FzATxb6`Twp+aZ#d?tLJ8}PnthzUzY9>vD~WWp0C&RV85FySMd)#7-Y35Ppn z7RCjbkU-$ry336LL(}a0?HcKDZMWl{P7OND4ld0}iKjsv@K0BfLW6m7rGx{*G?;UQ zFE-{X4gR(+9j+9nL&1#ajRCH7*z)2CXHOX&x*QTIQ5m7b=-7FFiF!JeSX;Hpa;L*R zd~KX#Uf!{lYrC^@&?#(wdB;a`Rxf3Wt95%Vw1e6z0 zV6ItiS|x=7--~l?$crOGy#*VMVnH%Ay(P6$eqId5-r6~2dzS>cgQNY>6CxaZaj%e; zAPPNvSqX_jB2c9^Ur#TZ0H3vG!e;@((8FtLn0{a*yiy??;%FrVspwXEUfc#)VqC5m zTq6jV<_{z!R0u%A(6Gmiqx{gUJC#Uq;)TMeElr*HxS{WSRQ?|w4!C#Z=i-%Wu2&0Ix#f)k)#d)Pi@Yy>>FSg3U& z_5=0>E13_6nt;I$mRXEf3i9+MyjQbtM?vE?28_3(sE3a`_hai%Bo)k6lB2nRqIrPF zaq&N>Uy#Q?JE z9?x}|Zbe?5u3XJh73kpWj+n>VkI@5zZqMYNtKi*ca^cPTr>J~aB+bO496j{jb@niS zGrFVkdQ-SaFM6025_NU=FmiTTF&4_7K>ryLr+$vjpl3PM_ne!4BDOQ3L3aCoq7}>O zoi;MF$Ukv-kn&&xog*k$qk%!>%$awdU!onQr-bEP4Sa_p<5s$N=T(6N48h7=&n_T8 z+8^d&PHN)2f&%nlc|ob9l)L2>0-~0 zDsc1LJ6kSBCej%G>DQ1}k0gpe1QmbiK)ZAMOyp4e7=pjiZuUyaZb zj~2y_{Y#o=EF$*IisbQ$Bw`bSa6ai95%cg_bCZ4&v8X-#X3tH+xzATZ?r$RDy?v8) zt!^az@Ny5f6A{B(Pt-RPB*?gnoq*Wy1#lg8$nPWvBEQgL(r z^p>G%I<~prPi?ei;>l-yiha*n__@vM-G}S?!g{v)z@OQKbDsWl68@<|AG>oc);;Gx$MfPurWn`YUbq2p4;B#pu( z8ou+$-nLMYh6{|<+6i$qyeH)uDp~icPteV_%x1lR3^waiBba!_QYFu$hJ~3&Grs&9 zmc^4D))yPz$l=~`4ODKSEy`Kh| zTz=YG0yrgp@Ux_t0QNe)O{HyyAAk7us)%LFkGbP7B-DxV;q5_H*QeQdFzwj>{9r{+ zEW2fUxyOZn=+WEnnTLZGk+qbc{Q1f$6e%pVw@;%LY1J3Dc}q0{M%-}Ht)*!Y+;l6S zV!Hy;|20-!=j4LjAHUJ5;R4W@tvB?h7Xf-W&^kXl5n=1;FL=X*7#!X&eqp7O0>cNj zi?2_J!&eRe_58J#fWO)o4%?|oK;U~qL?>4qh8RPl6j2;j)SjKCr%+&xlpxtHgaXTX zLKk({C~%AwT;C-i0W~VR76hM4!sB0d?K-o5w-27V>rEV_!ss+7ZmUE(tZ6wd@HUMB zZ6tvWS1c2Dt1)yYbXl-6xYZ&10}BRo3m(jzW5GnL)SX|4S@0mfGcoXh1(&<+NGh@{ zD7Ki8Ilu0b#;2#Y3v~>5y(aiqoj3zB{2TfC*8Om-WqJF~(V)QX=igjT(O`JZf>)~M zdRG-U9=v#m1}|@VP8QrihgHXReRzJJ4!z=2=Ux@jVP^_&v*I`%W<7KNPhyM?PZZzX zIh{p^ibZwDRrk_ohHqzWgkh@A57`m+955-1CJQL6XR&(fGclQ zLzC;*z)Fz8%$6-nU|iS4H|D||*x#DUxpm_txO7T-Xjk|!c*;5bcF)d1AYU|X1MCOE z=g+)3H&lj!*-Y3AaRMv)f4M53=cMe@k03I8NFk|c+dJ?VYt?b#0W|TbG9W+N##cW7edc`#QV|zt` z*7y^(d|P(wHJnF{Q_uSv%I1-knfk6kt$8G5nSaf1=qHlwF1oR?cLp8MqlaaiP9Tx? zM1R|&VPumK=f#OjOhwL^lq#aLzjSA=i6T?4qCprIB27NRD3->*Yxk!;(nU^GyC; z``udbCA&e!{9PwlUQ^3>aD4!D<>BMaUxtAv(lEZOKLU0hsyN_YJ_vgHJF~tj^?=>` zPuEy78bFcXW~Sl!d%#y+Gt)P*8r@Yk`XaWm4N>-Rw%U|-qo>cx+^>LM)T$8{G{W7D z)@(V=4$Zb9_0D4|k>{Hb|5LLkmfOCe)h&r?TjJ^v@$l|}gAq+=S@+5Z@%vrKrul{9 z$jl%L*bQZ_%TJ;6yl)%+ZktEj=qiyC8&y@S{CGosvSWYp23)z1N+^6QjJMXls*FA?f`gk6NppmX;!b6+XIj^Y zxZT}?uY8G!)sqa$cqt@&@5RSz&2AzdwX(TCk;JzK5}Y zcE6zG4oe}OP<|#>-mvw}JDv5;xOYt?Nmmx%iu>CBfn5#{X&vzC1uZ+)m29C6hBl`)3jTB|Cl(jGT)ZV zNfuk4@AcRgA%`_OK1MsqZNi^+E_!Vj-GoUF)y8TE@#(&^K|-4)@whM7a?u+xEJg0JDH#*N4=Tr%Z<9CT--8|*;s*uryKTk&23z=W zs9o`8nPa?oX?AAy(i1KyuE!}DzEu!tr46Z0izzW}g^#^I|uI!0&{rpKBx_HXI`F%nhUan1S7Nm&7u%soYRylDP z9mA>R{74+eyL`~P6D|QS@>K}=woAfeMz5Rtu1LeK8$At%Td7cNhIjw-SsL`X^QZDj z9UY!K_);83G2o2jy8+v6OvtTJWX1N9327%`hU9N1v@sVF*hXMM<&hYVo$I@$$FD7{ zr%X6+>2t)I%7jXV8(&FZWx#7LHw4Vz(4n-QdM&+|1{25M#@WSFp;SF%`B=IPRMIZp zN>`VG4Ih1n-WtfjBv=yMSs?>C!{6D-dsE?B!{1+gJ8AIg5zT7qOB$Tt&$Uj| zqv5_wyVsqmG*ojpwGxBc*Mm*i7D=$!bf;iw5D9YDQFGWkh|s@q%1@y~6dJQ<-%1M; zfwI=`?QZZ9;PTW^mJF8=R8Y52*0U0TMqeXG9FOtAx#@GMRVuttP+*gvR67q82r1-E z{Ko^MUCzF4)8d6XmT#Kei zhjt!V1WQLmZXQ+p325QBR}5B1!7!R1Y9|bU5>d7GiQHZwo!BupB;N-LOU)WyQU`$g zbbgM!Id>|n-lA`zk(&rjr@;a5Y+gwnd(?cYUBB)`J__oH@fCEdMf0mY>}s!D zk&!`Xg6_c|h@!>eq#)3PoQ^*Bl_=^!9r1S0e>|&4dZ7+f)f>;zLt5P>{*=#PDdjtd z25bigIf-7fKYBqDhh?_BWj`ppbnE5ICmvfVnt{wwdKYpeq`#ChIv4);dEIH)Kr%;@?1%QfTq>IIiU+pQ9VYL)9DpwAbi_xe zH$b?+R%d8W1$bStW?C5d1*B)odF@E7269CS-=`MS!QlCVpwB5^(dom1k|i|*NT&Mm z+_j-uR6JaBPw4p~s$3?H?$umH@oD`RH14rsh0XxHzlj}>?+Y90dB}mMo@w2W2;#&! zdVxosMY!>bQCoJ>BVL?r`ldDDpdgMHOn!JFR~YXonp-?1D~gq(*hhS&NO(>zJ^Z7s z7~X#U%CJ(V7*2hwy1KYb!d;7RGDiZ5SmWm2gDs^Zc*3aC)_N}i{}hTETp$W#GVj)~ zX%+z+2`H&9YKY>x5el*YofxjaZsKgBA&I+emG?Mrq2bb}FYC}W17C0GSh>`}!qjWh zkja$8Az+``@z-+L$ejJ~u1GnYGEq=t*Ds4}gky`6hgkRk)zq=Jo{3xP-Pp~V7+9f< zQYtUVzy+*1ZE%~8J%y!K&#KWekBL+HV@W!;pv8Ibqto&I)Yv3rZ90yrb04VmrQ;3l zh^E#<$Fsk5^D5T6<(SiGQ&E;D;9yC;kJFZr@3J8YR$Xb z^Fr`%2}j4BzoHPWd1t!|li^!$GVA3q1%BEyd1b^^91?`X$NIV{&|ae~C-OfsY_GmQ zzD+<3I{(|gYSKi6TY7Qk?_)&x&2#j30w~a@+3)>pX-Vi( z=u~+=M;hkxweA)lph6d|DVKBm=D&LE4n=RzuQb=w;nV2OF86oq zvu=v5IGYYtq>dP*7t-NffANPFt#l}S|6#+QWjgfiTxgCZFd!{X%ygcU0o$Qa%ZC~| zZ0E*V1`c#+9rId-vw;SW5I{sn|E_Q!>!>bY7az2^pxHYxS2*lY#Y1`N7Ic zGH}_j_H3TD3^Wp1JG$LR2JX6glC*3m1N%-!${zY94Y!WHN>5OfhR6LZ2Xthlpo_2* zMR{HV8VAnE?fWhc8?VT1Ga08q2Pe`J&t3{VGMy?ERX~O{b$IiAZ8B8nx~OCLS_~3x zM0IT2Nzfv;)>$Q!2)nELjvd${3SW?BXJXfTv~I6nMgLtPNDiEI*pVXuHJW&oRg(B% zIoraTsw*#SZAn>kljDUB6JH+Jo#urrO|93eGI?P^kFN8sFkW~>E8|sGDh~|Z>*|nG z#sx{Tf#D@Y4w&3q;Iz=X3RHjYJrWbR2o6`Cd61?s2jau2+7+Xd;7x&+v$o+FxZ%Bn z7PK-7Dm>4sGG2^>18Mvb19v6?NxQG(=+Gpfx=r*bKOG16RQ!3vOR)9_g<(DsrHzIL&C-07(?WoT-snWK$16fv?-#s4RhR)dcjK8jG zMm{d1`js+`C_(y4t@GVRWJ6i{{_k%qGS~h)Et1-e?!Dq0eqJ_!d`7P{n5naW1EDvm{-04JxE;mJE!*-PT%$+7 z<8S)`B)&B4p)?MFkgK5q?`+4x{{R30|NjivXE+sJ90zbKdtRG+-FvN+8A+1PK}IE% zWL2VtNC;`52&sh7Kb4hT5@m*S$S5JZWbeKA?C;g@`Mx>NdCrU9=X@8Nb*9@m=YR+P z;GV>?1kA%f=Jva-0?{7vM*M0WD99}RDPLa)wh9kxa_rZ@3VV7Qlv@VNTK|cg&Ch|K zS#uBKO(uZeu3!<-2ZLZ*>w_TgXfK$pXBul@?*oUFD?ku0rowz;dvT`&!-%_+I?6P*{E(gm%*AB~A8%GnvspODZaX=a{OClJyTnzwI)e zpWKV;N>~gOyhc%6$7My-Ifayt@AGe^nL~__#_0a_1+>(2puIq21xX1tzDrWsLh?!r zKX+p~Jf?j$BkBherg!P}{3OVMiMjnUHsjnljkczWKAM0NhL3&L(c;BCrw11pnhE%j zt5l7#8xQWk?5Z2j#D(eXtm>@g+3?dx1unOOm@(@FIe=f78OJU3d(hu!#Yv_^F5(qj zc;95zrtcSC{IO+t@sXwwZm$UDZWtwEMNz>6lou5IlqG9BLt7jh_Q+&~UzNZvpSeYY zwk2?(E8o68sw7tB82=Nqw3GiX>@|v)z~@-FmFb}bPSq}Ozc3(IVPAu3iwx-we($au6jd+|t!2u^=t&t|PF zfaU+NXP*B;z{fVpW>$3E*g^aoufSz?tSR|sPX9D3{#eg*<+CRX&S$NZY`@NeRqV>s zuK!}eo?W&E;RdYuySfs$b1*BO`D@glqQic?K2TjaPav`VOIJ-W5yfSzsLY;>!Vl8u;l%SgWNGGi+Vj{7+$QFPm1`k%iIU&eAs&=Y)RT%a(z3dc>fBuJb13R0s;@OFqdd5%R4>dW*MbG;FRdqe}C8eS&D3O+B>Yx^ir zR4BCjZz%=daI~u^)}g|``hUvrCR5?&ckQ^|Ix4*2&lxD4O@+s+eb1$wqQdm&UbgQM z1vS{N8M0b*)M{#55f+`7en~T|7EfL{)=cfu@j3g-Ec5rQ1WOyGFO(i2!IqT^k$Vz|aNMLnyR}*rZd=B_Z>kW1p}Qsf zCG&*g0^OQ(+ZQ3o@nS(Yv0f0ixfYt9pcjNu3U_?T`U3D?xBd6*V1CG<5wQ}T&j%Bm z6zb_}cwt!o?dV;Y023}BU93ywfqy<#8el(eNWS(k?_&cejLAb`jaBUMo8GMai&0is zFC@#zF2@3!l1^;i@MVIBqo3sOZrTOUQfyS?Nes}%i0;ZsM>-h3z$0o>PXk}(eG@A9 zxeYqXo6{J=Hh~7cH2d)KI@oig)p@gP4P@?JsLz;N1*g0Py>ihCXjgsE7Sg-~Mx@7T z&iwTJW3rve#I7f$3}+HCf!$8u|uOMWHgmV z`o9t6##Xx!<~D?^OC0ai2lk_1?@d`FlzPx}Hz~#$yT3?gL{ds{qYc&P>J{(=wjkbO z^X%~LMr0Vx%rN3vi$+Hu^`_Tgl=OV;XW1in#6@{XitQ~2gO*&aB~i`5@0c}RUTh~& zU24`XjPC>cZNGX1I}L!qH|oFn8wbD--@Fvw#k0(PdJ)_zJ^a|<#v-V@;c!&da1lHdpV%C-<{Mr=2vsBOi;i025Z zlvkciI4!)YcwLhX&u4{K@7_6&Xt1RjBf*DDRMRv28H8}*UP1~_l_(x35!PD;$av6g zlh$C6g5&Ji-|CD}@ml+>tk8GjIEim~B0*dN(`QL;+uV}C#}=|Z>Ea~td0vThx1HGy zHp-%}OW-TZn~l#`#c`qn=VIWFS9^R-8Z=F*xb1hI%Cas6{}k@e>3b}O*P$shNmD!rhXE`(jur*SGTWD+*4e2e;XeiDLnyJvK2cJHGUuAKo7- zg+tV`Ex$gO#{5B6?~^;E@o=W5ZjJR$PB=~}6qdrn3((zBLjntCC6jeFD7a+yb(g$4 z2|EX}&Da+U;qJfJG-CVt@a5doG}lLXu(L9JVDOa-7i4!crr2>{L%T=2SDn~#B25@f zwr9sKy5zCN9(LR|Zz|Y&l>-+Jp2~Jk;J|J9F(%1JIq)v;7cZ*`?AWN&O2GOr3#RW; zJ#olu$C+2jlRvs>v1*7euMt>9?>o+LN&Of?Z+enetXb;7>g}=fwpZo>gKx^s=lf{j zz={?5+F3^UjW(mw<^&t0zkat-eUcNdaio-OCUZmg&Inz0OCESBEd-f<Os~Q1bEC-iISzm3#BcX;_j*N!4CGbCg!L4A3ZDsz+Qu5nc<*kBDy~LctMd!DI9! z7^3FJCTu{0g&oat<~K?3PGPlI_!$!XbSsdjvxf*769?aL@)4o)y~BBlOrp?xEWnhZ zM;Mw~{XEB5CIsURGw%k~3PRcY;nj=Wf>0@%t2y|I0BmAOS7F&B067B;qbqCqVUbwQ z9d%27NIP@W`rD2(;(QMS-gNQ8+le}{9(4p*&+`1$a3l}(@u4%V*5ihx?L`UW2@V)g zQQ*{afejkoVC?T)W`=7ERn)#bM)-v+6*&Kv0p`lC*o%FngV{a0?#u{}$H4>%`N2Tm~DUW7u1=({Ka471d7G zby)}PIcHWb99RV<%O*?#mlr{|aOnM2fob4!$#mnpTpvJQDJNrDzXA~Ogq{C-Gcpfb zpDz4w5S5e8%^1r~qVt_uaW6J!P;1CR4Q26JRJSM6v9e(b&G{VVov9c@S{d5Zw?;$A z(8kdt-=+_3uyq%@Y;>X)WtN&2&vs;7p=xHI`WuyOCfGOTw;?kQpT~a7tteCVaAco+ zE1JzzIkQ#Lg1S76del8y(TQin=YJjegN&%UykUV|=s>Wd$^D3aGYR(5*wM1Y%mASk6&@MVESOWXE~3Uw1JdHuqR#LH z5LdCV;n_b1-X8b=)Z#J%9JQqGi13eqL&0vw7x#{WZqL(lH_wa%*|@tDC9WxOy7+>> z$-oS7x!B_8t3LOHY@-i9f3egwIz_lVfInw+cL<~>bUvKWEBkC zy16cet%1;M2S4Nqu7iW}_YURVT>~18Hy)kxUI8iO%0fDxMX=H_{h3E*26R5Rx7I&6 z1n4?@RI{UggLjvdeN&rWR$LEoar0-3(~F= zZv8@e9y6gDOI;{QUnet&G=csdKKQkeWf^gq-uX|Gj|Tt5!>(h-yYLnAy&rq$Sum;T zdAyc32hQS;%4R6%#B<`k#1H12SU@_o+Rl_6pK(2w)OU#m!^1E?&W#a&oDPT+y2XG~ zZj{Na%Q0ZH>VL9vY>e0<)bCO+FDtIINOTbxioI3=a)wA1;a&%LD7G8HiZW`W2bu6A7oaxjw(SpMs^o)m6*w^pNqv zn`JOb0!N)5fB&DP6xN7n9dn>a@5fx(Hzo7IW?URebn&jHXS)91mXlw%k*>#f1a%s-&z6 zIB?ZOG%&~PSRt4();F6S6C7{<)0g7FzyCfce^bnXdrwOriKlYnGpbVkm5iL2PTT2H zuH=q8A~gnH8f@5$gPz@Nr$>62>t3`-qQ}amqKbiAEN6UoI2x0LqZdi;z7o|M`x_Z4wL{SiV8-#u=K z6cV-8TDajeu^Qd)Qan&)-MuR0Ee~AJBD4*g5g-klrsa$=FU-+B9BSjm2l-4(UNU{) zhj9g&T^98MFg`6WZC8yTwCM9_4}T*Bb7tBU#>|9a&C#qN4}u6>=ND0tn-zfq7OJA^ z^hC(@v|WA0kOT!{WAuay#9;Apg6n4v3amD}zp#}+frhQ)q0=-}I6_7;cXpil06{>$ zzbC0-^>>^CqfZ?8;BcP;=ZJCB#3nLqR(Kj26)gs7pK`heJS4$Ofw8ZR%8Bs0R$wdH zfe61^UzrTKL4@a;Cej0Ph|qx9>%sI+kNGAc_3 zM?6GG-KWv#LKTIk`eU4#LLyL|a^vrkkT6v9c(iJNNCBD3PRek3Z?8t0k~>> zcuLe=0Mh3IZsSpYh%60tIF9f`i6*1frK@~UjdT2vmn|>k5?9q)K0tug#>cYQSGnOu zPM#L*%mp)?4Hz=$IpFzALdc_x6&6o04+f?)!}-CF(eyjMh?tK^yZ16c3GN!V>wD;7 zO(Y|Vc})wibh73}9ixGrp^4Ga&$hwTkNkvRYFi-o6X)T$`%@#WMM-T_ozo9eW7b^ukAazYpRhtL6`YylO; zQ8b$s1Ni4AP|x(AgYr5vh&)))ukAUDu1Bm&#ZD0Y(cx~z!G3VArn=P5c>q+g-)g?$)ej7~ zFx%YCUhsSGv$6-`T|jP?ZYuv>2VgU^^Zjkv1^9=LGd8gFf!f4LisacrK%EM1A1xmN z`xH-U3%(c!C;M(m?zf!;8;Y&xLyt^>q*pxj{;5--hjW%G(s&xozC2oN-aG^HJ}?BT z9+(4L;bHZ;1M}dfp!Zt8|00+QFxfYJe+g{1&KoOgE`ccTKcA>q7rb4yKLosK~jFb)>Txrhm+G$KAxOnwrCz)8A{Eao_%r|Z-Ek)D zj!MKcDsE7#w|x~w#r=NT=6R7+95gRf@!}p8cifZ88P%ZT6Zsb?mftCu(AVa?Dn-F< zk#aS6Tg5O#7yD@HZ4wS&XQSZ?Ct_p8MGB`I5g)5+`*O;Jgw>pDj?gHPaaX$Bqp4{M zPJ8^$op47SgWHzPVapQu`5s9fN3s+y>-eDXwMYufrnUe4ktBs*@U(W#wo2mUt4jx@ zuSj58g(Gp|JNGqF{A<`$B!&ss2>CRgBG_m8&uCMW01lZ+UZh^+#R3iJfXWItR@^7h z6<^4Sllq4RcQ3Kyew78Y1q*h3DX4Y2Jd_>N4iv}4Lk`@<)h={Hm=n{!rjQ29IB{wC zKXMa>1lPCG$&uTyU7IZpwLp8!|qAZ@qgz54=3^FXLo70s81C zg@x_x%e@8bOo9pmko$mh3S-sIo*gd=eZwpa{k&V~7AA#Zf&JBg>Omrq*UO6UHH|1d zA$}~TCs-7g%{_5JxKOWY)BLs~+ zyG8khg`iYpufX{>aPO|^edXm>Oq9z(zRCHWHMsPA-W+$Rf_(nvRjY<)!jLfjD|5^V z?I3n0oDeM806q@7$)9B?0AJ&xmBYFc(Ny4-)A!Sh5Ygt>3x|e$WIj7EJ5&1zO^#&G zNA4>Htz{bflOh_xRl}-Q*^7Tbv9^#y+Se|S*xwZWTDKSE)jt8NUA^GSLpT0R`##Wq zrHNN7t{+_Z#v+#2IS6Ls?<6@6kAQPc@>=?nm1b3E0WxEfL zg8dOr*WH=>z>B^;-dUn`AjYhEJJX;T@#WLT9uV(93bC$C@iIfmJ~mwU&ebtwd}zby z;jM9Wm*5~|7C(m8WLiqL`NvSk+i{0fvk7$jQ_ZN;fjPu#F?sk;`U-lMc2+>~A`NE4 z)rJrAcj1x4#ov~h*)XU(`C#QL7pALoQZS_B!QYt#pT1J%!Nx_46G_J0SZlvf36$Z$ zV{e05GygH;Ga{D){LFS?(L49*$4uyPqr;50*N!)unXzuIQ53mz=M}$eBMG(UrLpVqvzwY8(78hL952cH_nJ-`Yzw@ zGKHNi^Yejs85I{SM}I5aPsNhSb4`y^DOkqMX75#53NEUg!43ThgX^0|cof!6}7jvbjQgDr- zkk@&BDmJgIpSO?}#|q|KCz|(2;0w237cR|6U@4y*W_cq?OuyeI%W#*# zCdZYOjsS7&b&q^(C7ptGbY*`-DC z5QbRrfPeG*qyiSqrj*YYc9;c^%xy&G@-gGLUMa7GJ9l9wk8mN%5jxzdPQTe8u#OB= zXzm)Cjv)*2nAYPzzJmnEC^OOJ1(5z>sqd*G9ekMJ*A#V;1?G%*$y7~nLM5gu+6RYu zpv%y=yfS72e6YLL4R8}6>$9USn;ATCq_9Sx7xKWP*SXhEDeypp5b=1$d>;6AJcJ}N zLxA0T8VpXC@IgIU;ZM&81z=-Eu38wMFl=tr=o{P?fzF4A&louo;od%-Po@SWXn6I? zX4X3rw9qu9Y5G8dgXV|)$O$C)&caF`7m=VK{(=U{VvvYLUUavM!F9SXc_&-Qa5H0E zA?C@@N0H;reJ41MU&;Xb&io?4jH09&R#6_Pa7)0>4RS$DaBUA6V29GHSE`PY*x*!-ccxA)3p5Fi zDr+oZhV|bVUk70(xV&(?NAC+GRGO}R8u4Nm3>uEU=Hbc!6Iz8wm~YU-zyA)sdz4HE znUs!mekRjF@wL3&ycM)C!@lGihczucKGK?JXV-xH);V>% z(gnZ}r%+#LGYV83qa)k68bGlVXVQz~^~m0C8T$Ou5ZH@I8g1`?hYX`bhxVq1k4_+F#=W(5Y_q8Rd({$C z+#DjCv*Py7dBnr?-Li6O0rmb;y}aDMi1ey&AF;c?gz8rHdx)V+C|r2)4_m<^>MCRn zK2kD|j_q=tciTII`eiOk=xq+8*d%UF#^DY$WZY~`n^1|~aG9KY`z;Clb8FA{Ppts{ z5^sHDl&e7)y`51kCHk5sANvq~V zD-u}M4JsdRK;g0BN9$a_Az{h&>34#O=zjnJ0RR6Cmv=bUUl_*iJziehJHD?~W|Ac9 zdo(B{MWR$jTNzmqMH|T|A`w}ItjxqYLilBtk!-R@uf6s2*L|Ho&UwyxuIt>N=f21j zOXmKbfwK$K@#!5U_*Qaw|B2LMyyRYwU~E+mUX}l$-dr{jPov*<{F3qp|H{*LHDo3S zzp>+TO-sdB+;`(=npScXZpUN!J72X8x1d$VG9{dR4J{(Ey2-3 zPVO}rmbbWI^mHAzF2vm@-`|2mo_DJxN*K{{qahPJjTN0drr75}QFjV#53p0L6DMTsKSm&;nLbQFQ=Mrb6&{w|Rf<9*2_3Tk8sK@Egido?Jk0kbAz2{_ z^^Xi>q^!1Otj|P2XOl#9^IWKCr_sKt(ijXe7Cj2n4Wps!k={w>ccqa11i#1~eQ9*r z#_Oql(YCU6hTW`^MtoONa-^-L(aC>ZOv<;VkiaU3c%V8B85jMG@jORGi6+CY^&}E9 zyx1-7xG09`09Q=spRbv0=9-J2C-2hp#4?B@PTA8 zU{vUk^#Do$nL^%kb!jAUSUvP&tO^C3^!i(1bASp)PWmefPE&!5*RQ|{ISh21HmZ7K zfB`KVVfk}cF;HV9v(6EXfimhpapqqbc>iDA)fO5J=+W&t<*v~Hcl;$@xda+maUf)| zeW3w{ClO4mMKln=_xjS0NE+}?#PjoPXuy^9+-Ii<4QQ5noe=nrfvWw2!E}ENV6I;i zCHG@sEzOjyHADr!I74OgO{u_Xfpz0-E(LV%jKn_kQNVtttjaN0GB|urI;nSp1Qc2K z6aGCVfn5ZXaU%;7u+I3*@yw6}sHR)J$dm*O$nT;pkCH%1Z=`t~fdrgq;|03>CBaLg z;8(tf5@5J;cz30!IFNN~#l4S<0_%a0YiCXfgPM1yR=%c0&`k(4dF>?t4Cu{X(tr3t zm<)5zoToZM;RN%})8o`y z4$!|i^JNyY1FS+SKQWpO;QQo5-JDqgf3kh$+-Vk|)pYi~vKcdI*xRt4s>1}fGK->g zXBfbil$e~b{T5z7z@&=4tm3x}9fzlX&*Pn|>L6Zk?$pz|$p=dmLbFk@gTy5!wBjJuov^}wxR$maK^1wYmcc`upi=q@$G z$peL|NsYyDW;Q`g<$eHEbk=>K9r*$ttyTSPMTmlL%1nAK!`$F~7Z#mYpj|;I)?Bs%4==;t zuP>~?9OEl4F-FU<>{eyNxW^)dzfK7~8lQv8VF|Te@qeMc%={ni$uanHXj4%rbPzfh zJ=N(R{|!Ig8YT)ji2v@@zbJDvfj4A&c_<- z5(-W5sFjs>=jeZs>c#i>?yE-l_Kupp31=(xP^ii1tLcOl%m(W_owoNqua;}gh@rmEzm+zX zNk}^MF3*LdRP@6k4yUhSNKfjTfaC@ZadAhG^dqHE`&@B=D3U_T{D;jK?n|MeCp<}7 zGc;7HH_&-?n=c2Y*JeUwG31;I`hVF`kOdad7g|a}zUH^$qhCs*cuhsAbdn@W<{n*; zbCE=Ov^8_B2oi##_V<%6Q_!8D+&9n8U?|9&)3!E>hL(%e+_X!iP{jEZ-LDbS=zxFG zp!%jX!m{497z{|GC+`@#2-?!fjSJ9cB&5)7W-3wl9frDJ1t;&?AftdT#SDSZ63A(W z|KiUEQ4|-ke4XotFp@qJjYXyrQRnFpTOJodG<@bMT!7JIROgI%`yGns^XLn$WV&F)lRo^|wy{Gj??1-)r7v$btkWr~VV-WI}stkEv_) zY(d4Rx=P7OtB|#ilcC}7Jk-9eIY4lnfT#4cV`o-=!o$uVBYD$*;s;lwi>TIgoWA*l zLap4un%Kw}2Isg`%stwL0Y_LnQ|^EW5D8}4{ZCB{ z_;6Yo?#&hl<(m6Sax^5tXWN*@x0xgm`d^l4zy=w_Pk44wNL1kcnDcBZD+Yw@wk`}_ z!N80gnG{@%fuS_%TZwBJ*lhFY_Sr=Pjkt8C;OT8{kWETEU1(sZQB=ZjUm9>ACgnZc zR-HZxqwHJT+)-|h9JQx`mt_|+8O>-QFokliR*MG0z5b;i*yfU)=jZQ}TNnrmDPkcu zV!&);=#6mDcKxk|R{^^*uq*zp&6Q9pu&mekY?w{~4R%_muGW)5^`LmO4}lC68XQ^p zpOJvmxpUp)M@WFFlXdPLAc6bo!?_>MlE6TQ_=i94Bv1jK_7>W0ueDPMV(XFsetY*N zQcelO#ljyJSWNem>WjqxUGihw)EEDi>$2mzceee)SE2;RIg5vhwJfUz1G z3U2}*m}$7_95T)WW~YDnKVRVnO6M!?qIhn=_r>CFS{yf^nmk{)_=y|*+)TJQm&Oe) zALf=+GUo;zlI5n9crH*C`86`^9w*5DoLn%w%nqC`9Jpw}%?4Z(S^qm)&J4sVJ7ZQw z7{RiLvM~d`f%6qN-O6uY#M6AN-uGOc!E2Kp+sSqVxKj6nI@X+OTx)B_PuJ=z++{EI z#;m#nN)P9D0Cr_#|TCys-NQba*y%#{T6NY?EVLFI(J#6QY)H zSRZdf=0@SXGS@X&-~3gzMsf)r9zW|KEiwy-8-=Tv$z!myp(;qazZWJ=3zS+!|9~YM zY3how4z5i-S{U0?4;kWBIIllygM$q&bvHis!vos>+>bLyAy(-3;D+N2%+N^p@R^#0 zTCp1Qh==`&DHt_rTWcIN z25p_r$N%#lhLX}+2ZK%w!57RTarvY{sM23neWYywT236`jC3D_3;X!4Y^@E!wOF^W znBxdE3wczV1IOXVOm=EY_cY|Rc@*^R;4F-8NpPzxn1?s^Mb2FlS%Ql@A4S?HsIT(o{E~8O;~GH?VByffSRY3mYBD5$VC?3UvKM}5xKuR$KQ$# zZ5$pd@jS?h%CC^D^$mE?cc0&fSULES;hz`O@7wdTSf;4g*ggStD*S5m$qWAjv3FPG0*whO!7@Pi#XmG^f+b?81qmAqj^Vp-Bod&^)YW-A_hAOvbTrHyJ&%TXSd? zCL{F%F(0W+GHOYs?)00Zpo~P;cWJg5>S6Dml)XYj{wGvSM$bthSYRtsC?t&{r*7t+ zbdW}e$1>%gt4gC8)$T-9Zz=Sj?}4TLrZhCI9B{kr9Tm0yQi+haA)(h+-;WkbiKF*_ zT{Sxoh@d;J(lWHaL=?-#uk58Qi0*uPm10H_KpQNcbN*fg)M8k*kS9h!YiXZLmlFBW z+F2c0{)ug_KhaIBmR5@XieH z_Q4QlkbieaX$2QMAlbYdN|@sW-fxUz#xL=J7o3c9k0SX%=%u?Bf6fxXP|5gVXFma8 z`TmY%#uq`rsXoHKS3(F#MO@r_K1dj>z7bogS`h)Sy1fE2O~pW->HNM0Z*gFBeq7n} zmIO%Qp&W_Tkp$7)9a)FQBtf`Z-bC)UAM<6q)=21+!A(iDxGGEmoH^%hyZEVK<&giW zEI|y2)g4qmnt%b-Zl$)1QZ$e)Dl3PAXrQ|8zHe3|4cJ}&@y}kpm?z<`Ef zN#Mcr7--)&6O2zQiF2EaF<$oRkDW zUPMWNX9fYxjl7`P-X?&cWtM}_ya{0A979S$C;`-TJYVa0O#oKK%=Itt52Zqjg3_5hc9uvkFk)lPY8lK9^deg99!RnPKm0{`=Pd^CvU`M8 zBzzT5(&1Au$)Ce78vG~26FrWf5GjfM6xxqxUk?3!ro9cf`;uzVSlWWWIwI2+XwZsZ z4)PhJE_C6&4IcB)_7C9=b_|j^-eWj6txS+HpTbLx_Fb%uo5m}4d$oooP2xi_gCD-> zjo{BuJvZkr>%ukk90jgMmExGKd+35z6J#LWctB4agbK{>SS+U|;UuMI{Q0d}I47x5 zz7{hN*+Y8PF5Fsx>p8LWUGfVs=88gdDBC=A9Z>t$b7~e|I@Z9!UpWIyho)Ul)=xke zp<|QgJ4WHtxaesD_6K&T-)pE@8h{!eKYk4M_Cx#r*i`S0ez<@u_};J{geR>`24p+` z!02oEten~yT)V@lusAab^_zw!F5mwPy{pvo0(Q^BI?E>NS>ZXj!C27mZZ!vYRERUh z^3Fjn4X69o*Jojx#~poCOoty%4hlFNq(ga*l#C^u4z*(+U3geVhie66?DKK6kWBq> zGBthAo7nWJ0x8zfQS)`9pgCy$fM&)Z4pHfG5s~Ls8b`N z^&Hh@t0^IL=#8hf#!gWLtbG&8--{!&0TtF@DH8hXsdL>`21Ubz=G+ z9f+Z8-rT2RIx*x{+(I{A#E@?C!ht{n4XKfKH)Y7skZQ=92>%Wm>fm+H7~JOCfND;^ zVjYH374rm>x6k!*R(Lt+OGQo24MzL6$jDm5-{rR%3Hh13oRN)?K&8YDcb*tvpB(+&;R4#)Ya&+k{0e*wK@iL8`eE3lhyqtB$zFh`zmxuAXz+fLD#}BMHHa zFeE0Pi-ms*PLBO2&2Xs$W=}Z#XCJGC4sK=<(5rcqyIy5 zP#+UWW8Gx;JjDh^@(M0JJ;Vua9Sa)N_{t5Q9WQ0iT;T<&%dcBq`3S(4Inn)vssQ-J zd#F>}T@ct`k!=*{B?7ToCXv&Zgh5BV@)d(65wK*TqUq!%2IAKCN+>srgC9zXJ}ZAE zfQVd(to#p2VBB=_{=!`nc=tv>W`% zcbWp84th^AUZen`$Liy?U2DI;xN22>j=ba~K35+s{_xQ-9_x->b_(yowf(#a`{wKM zFPtScZTG|AW3NWdnu$iZ>Yn7QY|#aa>TmcB)eb=8)@G~ro+0>^EcDHU?+;wbp0fPe zHVBDEZYtNt`yq2tlkE;_FZ|88U`6Tgf>-&h-q(zFK{?u6?b>U-u>2dRqw)O#*ymvq z`|Z{r$W|oxjx}f$COE!%x@9#1BMte2zDG?#OYus*A%z*(A8Gn{Md2^>Q~OqLT>BSV z&fmCVfAl&+2h;`&~DIHpILel&V=eITNy3E^!6fm z2D4>Y3X^QT8CIa_xNgI!#tP*9rDeuHy$t;Vyj|E1FT;=5vG00;OK_>D+C-YV1mCmY zFpG9xf_GrB+Ku{Ucy0IeaXqUwXl{S2)oo=H-h15@$0E;&!WJ{L`ka{&L9Fn(?h{tD zb3jh@p(;DF^6n~kG2}qXb2;OxO&sW~`IL7ModY>NTK}Ry%Yjxua1ZiJbD|vPh0}3O zoaj`s8MRo78!1(N5Ur%}BGq*_y|~x>C_A-1T|rg=g`Bv2%IvBj>a~0ldDM}JG~8}U z9u^QnIvGc%E|?1;(XA0D@nc$?}{(b$;k2U(no3L5!GI&wUK*^JEjxkCMB^40Z zrU;a9RT+S@R zE7xlVD&Q=1%o5%&q4z~= z>L0#q_Z`C_K^9OPYByt>%nnxT7hd_qaRHI5cO`iFc>!^^zj3JyKR9R+k>?pq0F1s- zKH1L&fTt|SmG3!%VBk?i{`49V#2M`#F~2GdvboJu*~diybM_8H|I1?FQL^doPj%v8 z?|Ye;BS>e1$>49>~-&|ZZ`(}vX*7;(P$v;#Lx+3MFUwT_lLdw zY2b2L@3=|~4aA%i7XFw_1MjOlu1;<1$6LzBcgE7d2C1q1{1Y1JDm$8$c$Ef{X?yi2 zb!p%<^~wu|tGvarD41{j{WPhWLfpFYDmSKYm^jj1Kbz-R?KwiGA z)0zs71Rr^IN}URp(;PdbC{)nCqdwb%nF{orKOSkWrvSz^?S}>1+%bI8YJ27j8IT(t z>E=x&Fn+_)lQl&WY$!3cnH`q^F;Pc2-j0faQQbd}MxKfSEw)aTG$RoZlhnD8t|JU? zbx`vLE((F*u-vS;aw3T3|F_3dmk4gQ%+TeV1;H1_-9j0c1i|U%7lB-V1%PsN6$P$F)7f z{`)BWec-`~Ge(mz@2ut&%$SCUCA02%X#Is{sw#(A6zDMVoq6-)R60CrC?$Dbbryb_ zl@$nWorUdx+!_bN=3rE+NfpU@9;)&Ul|ae@tR6O_Rdg)CL59F|=lhGWp2dK-P<9DU z@6mWiY~1#7owp>lr^~SZ8dc=`*%g@lZldShkyXgk#Gmo$&>B3{9QU7`{yOyKC9&u! zZ9sYoA243tfbm5)gA-ylAuZTT=W4t(*Uo#iPSCpI~FQ`Mcpg*UwJPuSMXh3BfB zq|gy=EN}TnpfG|PJHet}G3SBy32PSy%Inz^4YD>J>c9_F8%A6k; z#P6#_ECI~Ld%teSQ3`%@@bK{lJ}S2RZ4Ua;8g2)jKh9lPi% zgx~J==wuIBw|&)fV%`a1%c3@?)5}75@MW@ysjo0LrkMvEJ4nMWI_bO8_R(?8SACU` zQzCdM^4^_RPf`3zj|pp|#PC+)^Pk+I;yAG9&uPLb3Cy&ZWeWe3zz2Ck+Y(Mm;_PbW zIsfO9_^-BV_QH2b%&Yy>)@M)>zg4=rI=U>0TcmS@mU*PG%;Bo#Oo|jX;j&^s!Y73d zQa3H*6-ivirq#eTB#EE0fA2SLki^k`+TO29wYDB~eB$5Y zY5wChOdMmC&7}L(%V;V+-&KVMN#-HfWWc*n5)JG8* zXwu30@K6-qZ+cBmGZKSO9bTlLC5gkHKbvko`63Qg&N{Pedr82-#pyqM$0ebgmeL(v zWhod+yK-Pncmw?Ls7Qy|zX7KB1-Y5UO2gHw1?`6=8Sq```m5lKj8C04h5 zkbr!>CrmswCE$3 zt)g59h&glU(`%Dv5YF|*dO`9RIOaU$FQ;Axo^~Aad$#`$`p%w^60xTm@iuz@pucTJ zBYUHQLmu`a@q~jX+>}O8YAh>Z4|N=Ey%}WENoJzW`|?1^4kl7_^U*EMnm~~R|9k2E z<0yk8if%YJj?_6{i)wbS6G}`+U ziFmD`l`4E;jDd0$g`C$kB-eV(w(GKhfVFSOr%c8P$)$+K^yZWC}+r&Nw?BLOEA=H|Bf z6S3PTzqm$65`LBWG+Wb-4O5e~3LGFi4o?z(Rno$a|J*du;gR9MbvJX-A4v|pB`c^c zXp|ifs0+?`A$I)a#Y4Gk-`Vj_z0>hwN*p*o$f1(V;=q@+2Hlfv%Nfct6$|T*v^ODN3RLCk_E6@!o%NhHc)WA%)6a=T2w6gVZo$E zUkJZ(^|1R(6~_9PWXzWXg>itl`v&z?VceeX&3f%7j4!KSqb<;c@nM|)y&zr)TkZO4 z<)kcx#ka<}C5=;YQs*5xgMKRB?~uvfyg>-7uJU_5jTXWM=5C|uTEdtuTIl-^s8};sVJtqUMYu5}gp(#WwDgz=VZAn2 zd%0XHPTS+n^-zF{nRlLt-#JCWea5E>6$AybR?F$!*n|9dVpsmp@NVix`P-CVT)nB%uXHC5YzN_I81)vV-N!eSeEVKZj`6Jmq z*Ls0o^%6&q)(D9I@@TDCjRm;L8@C$E&H_MbcuebB0xOw52PBvF~0mTtz1Fw=}SR4-Ey4%=p_aIn(C__)1gATUgLslB^BPj z+nIO%h!8B!$||_hAp}3uSPqSkg&{1`XY{S(>e=rZu*jSa*?pj`*GUnWpZPL!P+JsU z^_#nvOcR66Pmdn^fW)9!K4r4)pg1fr*s^z5hB(~JI0Tl65-_0hP_jaX3 z0#2T+C~44?gslCsi;d-yQ1bbKk|jea75+uYM@bP6+$Eu+u|dD^7D@PFkF$0Bq69pk z64YZbECI<&P3fUS5|H&hSvP1v0zO|`T`R1YfC;0w)Qz4 z-M!xq1SyZD%Cvp}!~Rpx6~wcE#`f;Z`tP#Qs+sp-ct#0&EuV2mDXj`!RbCSkVXH^S zyk)0c=6<68&_5Abo<GXJhSV zCUO+qGt>TeL4L0{EW-jA8hq0vBv)fndmq#{qs4mh@iE-X&SDo!jTZ!IvU z2UpP&Ei0P5XAR-wd%T>A){xbPhh8ZoYiR2_^hB=PsjbJ`_SvtYJ^q>uH@-EL&~;KZ zp<@+kTFFod)X|{ZkOu}cyk7|ed5OJlMf=b^k0e5CT+xj<OrY}nCur;W`{Has4cjqho);Un*o5Vs*4Ruz{LVmPqj%}m7$A6~HG z?0GA{&nz~q|2w0(*n%BjydQP1u$3L3F&*ZNvf{wUGLpwaTRAY>L)ND&+GKohPHj2# z0U59Pz7BeuLB?JWVk-w6ooam?|Wv8T%%_+D>K zgeE^30~MZHOLH=w9tz#0KSIWZyn4U)<#J+SCvEDEcU*YMjX|x-;l|4QZ%veB@Zcn0 zw~n-Hy!hn1-^c0Gyck**)b;J;!byc3**v!xL%Xj=cF(nuH??C{-(x< zO{}RN9IyFs)ZnpQmG=Dj`1VTo>sJME<;!%-lxRU*DL7HqaE5|=?eF}PET`aw`{!ME zu~G3E#-&ItPAYcEI?>8#q~O)=b~_6{3O-?6`yY^Bzt2Inwt6jy%heO6b{hy{;ZJKZ zNk;_nbtBDmt2jaYpnA7VF_nVF_nh14VMf8XKfK%~^l+UooDaoZOrT)ruFQXD{V7;a zWdDt%Llk^Be|KyAm>^ysakl0&f>_>IWzK&_0E@^-JYH=Vz+a%^_%aR$YB9oE>h;qjN`|bm5nmqr1Rni=H#5ner_BNRD<8d zlJSLWto3)XV?fBfh^L4c5`3-7uM+TpnoD%3*cytG_8coZyNU|#aw`OET}4TRe7zTA zD@eOFjE9f1f?itu@RHxTgv!;ucd&NNqb!ZX;x^jTXl>4>`|!gFbSima?(y9LWa(+U z=6SOXMKAht>=da*0}lrkl{hlcbUi<)`kn`FpZx)y4eNo0q?cBnb{pVC+POD|`oT++ zerMUz2@v)-ZO-7s43MO8a!JoDfw)gC;nnO!=uzN)NB6`jeiiF`_-u#zD0u6G+YYX+N(V*9!4)Xyw8oVhb3p&eau>Vd8=-NPsCp!-? zqHfY*Rlv_Ki?4JjX1IrZuAdH{$D%BoQ92y_XH@aCmJU0pYtbfAbVyO2+xp}v9Uk}i ztszUI!$oCx!$+kwNcs^Px$F6Q{;vfBYe6(9D2E6Np)?p!9Za`Q3LHVPf;f050tA=05z@`vlLe`zrH=9T|M1J~n< zTN0l0)8N92)6BpnVOW#)bjf2vP=WQ9*B1uL5gev)s>c3AQ%Va*uPC=75eWt_`B@Ko{l`4nooxv1s|_#- zKRbr5O83_Z@Uu{Si-R9|a1sDSK)b(%9CbY8ATy0_#8&W0R7|7J9<98?oHNK%Q1?>3 z(hQ0TrdM6@n?d5OiJ2a!XOWNFK5Bf;911?hJZ>?xfS6Jd7EkUkBZt)K?riHdBuJjf zDW(ze6$d-6org#`S*kA9Qh^PZ1al^yOJu{k2IB*9pV;v2ONW*Po!PK6?^a!nK@!f6 z*YLftkA$yuz1>+CK*SY0xIOy^37ArTC3D{m0v?t(QmarU;GVqyydBjEI7U8LGvyWm zj~j26{?<#tCnMj_ZMsIpO0h1Pek2n9_FomO@Fw96e=|Kie~|F5u02;$T1nXD1gB`{ z3ld(AF8IE30A}(*0K76l+fT`Xcx99g0FsG1xxmC*=3ZG4fy2EQ|cYj^U z$rA+p8R}V_pCDk<)Lrjwl8Bg&RkZ&qukU5m?|oifBjI`@g`iDpB%Gwqxkt~7h^^o0 zpC_Co;70{}#MxU{(5<1(a-W5lkVt$~&egy3Xs#=I;$!d}dSq7dmS#PN7@?0t)b-|3 zqf~7N?|OeN`PQENEn7mKzv5hXn6IFFTv8E%dsk7!tNkf=cdQ}_ofS{P<0~jZGO{*w zW(oO)p0>F3Vi8SrpX0V}UO;cEog7b77t!VW&1E##CA4$j9JschJ+^n~oKI1;v-EF77%B4YFI z!H9l9z=aKc;`68tg<|wYBnt{#PQt z{_ItgfD8$@{c(ISxUNG)jS^FQ*Zas%4coe@jerG}9TT}&Yv_1iWabm&b-bUFFm;n& zLApAu=QDXgK_y~^!@ey0r{~dw07V?#ukSqwAyELeb8VLy{%l-2)r?mp5-7$(zi$fdR=njKesv7F z#LHz%-|t3_=kI`|whAP)N%8$mOg=EoGtnF@Zva1S#hiDMe}lZei$~missYa{U!40m z0*qu>Z~W#`i#)cZWl4m!BVtauyl-bOqIWT0X&UvTuskP$?Or|q{~wJ=s_Y7y+3_CH zH}5y|C~E){XD?~rQtbtHM>3zU92*7aPCRb;$Ib*JN;fCE{FvbD@tBXLdE;Qar}qh# z?g&s2y{+$Yq#uwz*#^7_=?1@U8$C=6>jHqtSF~NX7tF}dU)$C<3?j~I?H%uDfu`pU z)QaD8;MNaoVcX^vP#0IEIdYE(^?CiP_Z?<~ea|YC^V!(pso1<-i5YB=KWtNu@HG-7 zZ}V`J{X~Fz-CMpN>RAEOI z;n2Bvi3InSIy1Kiu)*K?=^b_t*x}xv95osHIiR-@Ti2s|9MIZH#7)VX1J+w-I@}v) zho+;GdX0zKA$KFizbuRm=J$_Xm9HQ{-rLce-6x4~C`9*sE`bQuRhbz^C2PR$4SP^H zW4(U$MY+?v7Xfu))s6S)94PcJc$55R8bm3K-z_{c1;}kdS`@A+aMoicyerHhTIR*@F_6u!$-3^Xu)TeB`To2MmBZ>xtKY z0r~3~OTPLE0`>PYbhYhvO|~qH&To*jJ`41 zhMdmL@6Mw4poB6g(4^3ZQoBFxr-b&Q2c`TDprRKwD%yIRRQI5ox#)A3KKCHLppb8y z-TP3Ug2s)joi~c#zk^UpiAa08J`KPogq+J|Kk{M+p znQ(GRRnZtyOj>fYeLjN7J9xe=jtrswPmBaKXNSouDsq zOvF`iw@~ijB%<$lek8|c3QbmT+0gc33W>@uD~n1@AwLCC-kZ5Bq$~MO=H1r`B)rEz zret*#=}VGr!xV>6CGBsm6KfE?zHw4E>Dv%0vpRgv&~6mXW-ckz9-BaEv#nK+Dp?4h zR`vZAIfV|Kd{%rgdKy)BzNmTla9#J0T)t4~J%zGv%(Hc+u~1fV&&^}go2rW8w{iE0pBCG7|@Q?>Xh+%QuC;iYU65$OmwyqvWONQNVTDe$=zI`9_ z5OosG$FkyDHcg_itUK54ZD*k)cJp(D&~eoJ@r77i_%Pa{Q6n{I+K0?}29hPebRxCB zyEY-2&B)#o31w6+SsM2^-ISSnp;+xg5{xzY~Q z?0Z~o>Twf_2=V^gV^xi^OOESWQ6B*j2PK}-wFa=g{Xlxz)o$RP{Ob9q=;mT)7Lorasp(O%XKOwj{}`%iG6IW5fIGYx%m4^KR6b-CFaZX4zRh|PCE(z0>S@+ z+(eBlL4w26cH&YQD9ZRYvaRt4*j0B4c3UICc&4}>h9cm6F}NKkg8xm3>a!tOqcpz zkfkiDC8yQ}h%Xw~VX6bf$#SJE4|D@Mum1ig+XsQk^$D-jx5h!O`^>>L-6>FV8yz$g zn*;p21L%W_i{Q0vsKqhbGO&N4<8T=*gT>Od6f?LCa=Bi!uQo0Mp>4msv|8suWT-mr za{MevvJKpBeSRAF1no>=7)}DImZ5WwZcO0%&un8({TR6L_=y(r)F`-hRCBlI%VF@y zIpx*-!Vvg(EqM9oqhXMf|BW~4!6=BR+tqQoa2#kJ{hqwnoCW^=9#S(nIt6|+Z}x0{ zGY!Bs7e~F>Y2dE=M21j41TdV*vmir-5q(SJm#E8K7$Gq4OnV8vHPf=X2^~f@egv=|7T#z|VE_bJE&hkS?8`JYtp( ztU5J+v)&b>CCdk?$VM z_l$vVj%y!yq{aYGz1N7)y#XLYO!oM!_7@C=`3(r{%tgW~^AS@?T_~(K!$oS#C{og+ z!9HOoT46lRto9m5^J9sZD=rV9b`m!!Mc^0mv)tCreY_l8WV{WTY3l{tgI4sGJL4dr z=YoUlCl+v^Y|Wc&W&*R)>jSx;hr#!Z4>4m^&EWBm`!|nPe?(dn{N~|Z-6)60=|$k` zINFn{bkK``8oeQYUAmw+i(Fd1?^dXrK^f!x^95I?P=;uP`falb#E{>gA^dg#h4?FE z6RX-#T*;M{H@oYQr~v=->pKcjxbO0xb3bwrTM@3>?NW>q6Ur|=m~TLD>sO0Q7#*nk zlug%CV?R1o>Pl(S_=m(UrKL~DFp->oOYWI&7Wy9m00960B$tOf7492{5gPhNOGyf) zQbK6Rcx;8rh*VZcWy?rO!$>k(ibTms)5wU7=P0A>y~nYRgM)L9b2!G~=emCXz*L#=uDztikPes_XzG|lJq%Fbm3iN#nqW*zcB0}S7L6l_tN1&4Is^u`BifLI zzT>`HQZAAMIY^J+mB0zDU9)=F09Qn4i8s}o;d@x(tLOFgu+XelbNlLVP$@2&`&}T8 z@0HU=CHoPT@0eQu@c)au&&q0+{VK=gL#3fDk~t{j{w6Eb<_Gv4xt42qyAGtbC~6;O zwZr2|GuHt59xyDQU8}9v34uQDr92{a@NUlPK~U>EJixQ7InlEX&7HId8P7@R{jbzl z)n*DSt~pDEAE2Xt1a-+kl#Ys;gOODM>_;M9^y|Tm&4{e z8IpSkKEs%HXa9b!csRQJe&gK+gxFwD9?76)c)#|H%Gk{T_*&54j8$Xs^dHm42&s?} ztjR}=q(diKKOc;C^Hh5i#YC?wC?mBY;L{iiIY6y`XuiPkr7Hf_~@O%N$N= zzci;V~8IfAI5~%(t>%ouIPo<3)5Ggfez~%E^cku$~dMynk>MWcrfmNup$U zDCgTYnK%T^^KWlu81zCaAFmqWNj>mM1vQEIzk-`p8sST$Rk&(DEnHEc8#y-C8!dMa zA`-73Oz#`Popu@PEy!dGZuDs7WK-oJ2dpwDI9hC+^(amejaaR*K=C6t3 z_xXg$#>59d>^iZfDCzOk?Ga38p7-y`oJ6G&uJdvXDh4oqivs6Sp*VOO5cdF?7>~Y~3dugj&z8 zqx+D9@ZztSr}Dlfl-tO&zv1~TZq~DjOpTtv;=+#2^Horg8HDA{v0W^?8VFe>A-pyHA39u8nXq3Nz{{Q=mGYmA zqgbJz@AA<}++!-ME^Ir71J=&Mi_JuAI#3rC7F38cTifJMo43GqLb*aA?+EY|{`Vze z&lIdaD86Z5Jr&NZJp)=6lTc7wtJ>vFf)2PcbgHcl^hEsE{Mw%g)F3i(CZQJh_8+>G zoZEpC8s1y@3%XFXq|c#Qx)qP#I({dFzXE5Q4qv~n7!E&#fA5|0s(_LhpBcG+S7Xt-4@#M_#T6gV>erI{`fg@*Jppx@bb-8 z5u-0KJr)oYx6llK-_#{b#SK9Dd+*%TJ7l=s&}A2XVjKjwk@K-}434pn2OSh4!i+BU;4cGsJ900rkEMd+DWbv-@gW%foEN9J zsuc9=r}~E^8u9i`zLzD(2T`y!TPon<2rg4PX(n+)xR}`BM&@b5!)((X4vQJkbMJt~ z+30?-?33Qn(?12;+G{0}#F_AT|3Bl~H)bJ8_~a|!t26M=Nw28hf)32bB3VLeWEgP& z()!S|2`HL1B5#)o$Ugq^&y&x+sJg~=rh8}@7u71yw5g2X=|0!mlPQB}U~Y0Q|4KWK z{}zy}VrQYQ?Ea2h_iN$Uy>l8ENrWt;tkm*tBj6%?$R_jY7(_XgC9qfKW@T4qTyTO6 zH%T{5DlG<~Ao*?#joATe+%omI6)Rwa$Y;NQ>px*am}B^X@_LN%wUOp4?#7x?U+4U_ zgLulJUe17cj!My_R&JHum#S%SK>XxI|vl1yP;<^#$dChKb+pO^6s}^RI36V9IuRam&Z(lfMwJ3 z3{eK~r%l<!9q1_jJfT_ckHmIu*<#AJ`E$k3q8>VJW(e2t^vT3rF6TLb}*p z->dr?@acxj5fwfoIGMw_zd>jkxiz+Bg}j-=d8TRoedi@q89C6T?ZrWejjp5#nQ;)p zT;n9#^Vw(^`7+o3F$+(JZgO2ip<&sKO7`;9Fh*+Xt?pKC!dr^NT-)m7Fk{h?Gg0~{ zun$gTIY9;9hU8KlpZmVGMoD6 zniN~)~9D(xX?Z4)cRmIOA(!PkBtZGzl z2rlClyJ_i;_Z)=#1tt|8k2neZ&aL0VHggg5F6wY@+{#5bUac(l&xVum{#sTx*RN&# zc=@|e-1Y@*<|dxGb%2SJ38MaevlO%~_i(@9)Q3L4H~bV?1S}OELB@sI>s!X?r8p-_^jG#qFNF!c$W*Y}31nL#+5udsUeKXpw83|8N4a#JCK&bS_#V(u z51F=a?kiN*L166lbyod#aML))z$&I5Hmp8y=F4CM1V1bx>8Lfs$e&y1UYfN7x5=l@ z3PLaJ-{Uzc)iVhDLzjh*IFEw$-nb;o2UD;!|Id5FL^^bvl19P?7!aL5S5dlg2CkXA zGwUaqV4#z&Smwb1(HVj8yFE1UzBOBV_re6QdrLyY86=PxE-4wV?gs(!@CE-5Jz(Ql z5fwDh4KI3&-tB)(1k>=HR^IkQz<-dJU^z_&t(TJ@6aSlliCR`Y6Q@9rGx7FXO)3~B z@tD58HU*@U@3l>5D8S^u=S>qCgGj6E^}G8S}d`$)SSGk@y5@;~)sMV|dnFjqV?#dt6u`X*Lcg9t%}&DCz;KY}Wh1 z!ED$zl_j4^s>ZD1`KOgx9q39IQVS04L>H~aHa}85ZgAn(@*H>p+XWs4ZIo|^cIAN^ z%e0kTv^)$^=A{99*7sVT9RsXorq*uy$OQ4R9K)|o3_y#aDf5eTxO|M2@7g{Aqd#x; zJSrrC{b`Qo?Y%^(G=3a65zq+{{p2Ee)DFF2wNAT5x}cDua?nPJ2%NzU#{X9GdN-zo zM<8|xX7+hpsu>vsHm^~KwILA%p9ZY)jau=BJ1S9oGUMRJ%;r5WVjGcPY0-%2IfxF4 zUL1X*6kO{U@eg=usC#c;?pkp=O6TkFq#mN7s^5j{-j^sycO@K`93DW4Ap5fW%qnz~ z(^VD{`wM4ms_G;%2OwBwQFZC*1l$tLdgzu*2Z74J>GC-Yc+GpP+&F>(f4anEWDMyb zs9pTeHiiODt#dB-)~)!tsB0&4`#(rCQTQM)QHJkz^@N2#_F|vOwr@d=B;>X1cWFB^ zj>mW}9n`R-U~H~Sxgm}rzm|wuvD^rlK>|2c}VA;{{N5 zVqmV94qs(31H=6+<-C2TF)nEvcZdfKr|XQ1>K{*F*H88B&$CDHh3Y4bXq5q!BCU2V z``v{fPT4qgn6_e=gemdZzh-=x`AcnhcN^;P)pd@Ib)%bAu1#{*0CuM6M89hqK}q`R z-Uf>ad`-GzJGOFu&%Y6}={g3=*sB#Dx;TS^WkU9)r&f4>v^U^g#T-VDCEBEv&*NSL zuG~$>7Vz13-e)PR7jPhY+f0i5Jbtg6YcfF=y0E-749jQGp!&x$<1PaY3&rBOW$7rM zy773s3KdHgqS?}0Cb2=f-mR)}0u@_+8pv&+;&tnT8=`O1(cv^>azCAk4kvf|Kj4{1 zd5i0(IyWyN!8PZGQ4R+|xK3i-;Y?1#*E__okBT`77fI>ODl2|}D3nI8_GjZwqxd2z z(k!}X7ylTrrQv$B+s<`QhEXo=jIdKg6WV(0P1v*;hiJe%u{w+Zh4o`QG*_2G+7aC@ zMq)ZB|DC&eKZAh#M0UuwB(|d^E54RTei*eh2UCkuCs66QjY!NDI`Z~ieALNUnN#4d zd%Ld9V6anYpndKPe*Ms-lEh%5PX9BXMHL462-+=N$)urmi^1QoT@+ko`z9324r3CF zx%ISP2VRP}I?36K_-+G5IAF2_UPXH|(k9yA!DP9|hq8XScSM-yn%OYu7soKCO-OLR z{b>fPa0nzsjrW+26JdkTk+CP@ZIGR;BF3~WhXs0ou7A8c+ce_?N{w8+#t;ZxxPoKUrRW$*;<+^>NTPe`jZvIX8 z?I`T?EwTApLjtja2IKSEL!i=~!@D(W04}RI={r>n0AEz-SPB0ykfMf#i|R%}{^5f= zt{zm-OvqB&*T8_9JSV>TyK^9ZV%$9M4;!-FB3|(OaH#c0b$nTpTua9y`B_0dcVkkEKV(A@Zr$)4DMtv~@9dyzFg&;xdDjUMPjx zyCUlUeJlrZbaB!1q-GG9<|~&O?}zIuKrtEL;g9?vaUG#gTaEETF;hKxLI&r;a?l3&p`1VS-$s|nDAq2?a}?33<#{5JU`+w z4VE@W!ji>wxDcR}=HEvHA7x*>{CK6`PF+4n;hBJS?;KMi$ zZ-K-){?Q4UKd{i(z2tcC18|y!oSJ!i7g8?g<(mZNKplB&ZCg$~IPg$+iiWTJn+yBm zpY9!ld!&mB6e11g)t}5!SIxki43X6;6D$xp8$3N-gd7S%=0NQKzFuSFe3rkgZqz#az9MGvsv$Rp$q!9Kl;=q*9Hgw zNy=@1&;m`dPLmmaE#OW)pR!=v3O9d?l1QK1AoRe2m%+ z@o!U!d)E>n?b6g`dASuH?>S9SnHqvMC95h0*O7sPsVZBxeH@;P7vxwyo&d4Ur1IgP zlTdQ&)H|tdli)qFofdg&9NrR(4pbTyqDnA{fb4D_=- zWWaVH!2kWS9xOYP{M8hjVYWe>e0H`Tf_+>cyx36;Rhj&Wena{A_TkZo%NJWw^5c|= z!~Fq#86<33QZ$0WE(V=|xKfk#Nj!LX?U?2N9lj`ik2e;dO|syr9DLSC-^nA(CgYwf0d$3eW3 z*7WDK%sAdo)jui{L`82w`}ntVbUb!qPrB7tIv(n{V!=?R<9fE9#2Y&rz8N!1FG!-| zckMl|ee9{Y#zl--)I!Cuq!*WphG|GV+k9eNccmv7j>V`%FfcAlx4~SOiM%1iR9;CY zy7TkxV2Ci#MWgp;f-xP(lk80mrKsq0i}_%!DFy9zwuD`JGK%fBy_SEY$oRDF3x~kd zQB=1tD802Xj?%B=g^!XakyX0oaby$~_53%#JK99W(_1EEO;e{(J*`6O+17F7|4wXP z6FP*`-Fz-~Hl6q(Mj&oZzY+rp&s>Gp{lLR<=R=pars5-`s+*=mzw!4RAy=le4R>`L z*tYH?;n^$`p8h#1KCr7@wY6>rRn~fiJStzntD0)p_)aZjs~+){V-N?SK(}&)Gn9j% zaPHfW*t})zTgA0rJdlm*$F*J7@-ARFziZ0dG8V=r05@gVELx3voOM*5LB}qYKqJ8! zJYAg|()Dl#m4a!{k~C&9*NjGmoh%e0_ID=qvhdrh+sS;+a~QjG)pShm947dFO4iSx z!wV@p{y4hNq4ddjPZgS3sPufQSh;f+gMyQmJ#Nn+b^p=GnjZ|j<*g)n{0$uqJcIIz zO{uu}(X``(J_VI7_O=OrAY*_K=A1i1!h0mKED@#kcRS)iB6w4(DAzJkD(G_I$AB8X}?gRVd7VQNAdMjsFk$e zGf#gEYZhBZO^Sw)c8{D&ZR)}8-+dd*LL1RY=!78Ki-4-l%0sRLA@F;$E1%Pc05)IE zA_xZ6@OE>AYJ_tm%m-{YoTfBGHCLg=-B}5{^64w9jib9 zx`M1EOM&<7+C>>{1UX!-mo3Tzt%(r-^L%Udfw z>rdQy{;W>;5sB>qMGasr&|7uhuNo9~IsSFCZh)Us?>_Am?t<6wY4ymqVQ9RpEy1BX z2?u*LnuZzEAad%cLZ2rKg7vqJpE$V)#~q^1ytNdqmlw)^zuT>CQP2 z{O<}?s4Ui5I*q`D{&`$?%YPHc$F93DmzPi)f3j^sL?YTdRA?kTh-Xx0h=iY+Q8sYbz+w&CY3N*oxdwvK#&1xY^v;#N*$tpYV9_koJGch9b$+`*b z(+u!(Pntkc^&OAq-6zoRbIO}_dKA=}`4K6)eGIj<7TXVWk?^30g|^|B0W6BtpuN4( zh0MHv`B8rLXc;2Jy=gKVKdg8AvM`(j-=z2dFq5kR3w_Bkm*r+ywY~nFX;2&F{dsQn zN}>fkqNdj0_o{?4jsHBQrG0ToMkuzCw+S`f(mNh}8^ZAYyXhh(R4l$M>1r)BgRTOb zrv$hb(AqEO-=g0Vrlm(Sl{az_hWqBs={3t3==|LLVJsWDFT5~h6X!5&xJ5|(AQNvS zYuCIMprV7<{N%&EW`BZLQTDdkFr0jP}utM`ULq5l&K6NNL41m7pm7_OTsS@ z2zR(fIPeF`27{^`^#1}oD+}GSYCu8bigRRQ11M~fp;5-0V0XY3Q~9E1C=e8)3r;jb zgt9{5HbNyxx$ZTtzw!ls`IhO~X;=l7vX;Z00960ESG0I75*EBh3r&>5K2EOqh&Pps5GpoB#NYr&@!Twtjw~?&WI8z zd(S7^LFTa!&N&W_y?5&0i~sxYo9FYn@B8{*TUz!?ZJ+*wRlG0OX~d1 z$Z-0~%JH-!GMKtq+uDwhAzx{l$%K;vzJ;;|t`!u>VEGc95jzO6?(->&%fnz07Cq4U zYaC{a);~B6%z#$x+jqA(7vTKSO?rxJOF+8q!+T3<70#S}(_r&@4Fu=U=$(4M2G-du zq#>7882`|W&t5M>cSc<6lfDHQoXwjLU!Mg(hLPK+jHckvru8ajyHR-L#FM(+We8rB z^S}QZO$G0%yP5ZD$?(EvW$y3L0BFv;kL=+i!-sFuN7dyi;3a-+o%8k}P}j#EZTB9A z_=qQ&3z1`B+cIdYe{LES9@OYH{hb8|+1T%nLJLqnGt(0uw+J^odoG{XT>=$GF3;Cu zOR#jU(|HHaA{hQW@lwKi9zv#9Y#Bb!LRrCTwSdSBu&(^x!x=OMVXNiqS}ju`z0;*O z(su@?HgDON>pTaV&jm6~hZcab$efFGY#D?ZR1fpbtN=wqo-Z$U6{h@M&Bs|+VdYRz zv9-i9_?Mr${8nxOGU+#8SjEi1bO1$cch@L9ZH_qm;nE=7FrDU5>m>ofLw{*sPA7aA zjawDA>V!ByJ4-gNUT{`vVZH7^fwO(Nk7BroLE1EtI-fBH8@q&#xP?u@fHL1os{afy z@9e%3F);)2{JOTjS~JkUB~Z)Ac@nakLiD^V=^(zjZ>L;86|QwnfALN10^18Qlls4^ z!I+hi^OtS_23)g@_q-sSN>eAP5 zX(TN1qlC)t>_J<;`wHa=jRCda@1LRwBqhkN(3qQV6``a@YD3zQL0a4mZQ(FW6H2 zxgs{U5;04Q{M?`o>v(Q|(jO<`?Q6qyGWQ_<*?TzW;*Jqy>W>@qJ3fYd-QCX0+7qbT zbJfgEXA&Fq_EWiUO=7c^i-M2W1l9&d-nq^(j{0L>8sprfXk7_O>!riEZMty~&e2e0 ztg6JVl7dMb@}Z&&{pcwx5^1a4i}QNYHQb|JXvABP5p}f-5Ba?~lAO?ukMqZlzbPhR z;pes;E}mpmj82}M>!zY2@elWpW*V+c#@6`JhcKUQlZ;RG5Z>_(Bkj3HL(SyQV`u6q z7^x=xtJu8{<7M{O&HU-aY_|LNDWycb|N6L~<@t78F~4{`;!zJSYcVf(h*R;%{#k9( zA|3zy$X#H1G=sJpx70q|Uce#uBu4AZWz?p>u3(*8#ighoI=x~IpDwc)=drG#iaBjU zpSXbCmNBlusxww(dF4^2jyiMP5NP?md^x_8SbR35x_ z>g!k`#5B7Q>sG5_&x5i0Nv~Squ+~mh4J?NPQ*N#OA&KC{U8ccemyc;+=aDDUf+8!w z2l+j3+}!%_6F(xVD*M#-G`T zH~M_^8Kc{AYTJ2rY0G*XI&e>hy}J;mAtl6D}jzdfHzC<-ZQ&x?hM ze`5I!i4>blZOH1ATrinQ#+8C6H}%cwc)qCS{)EpoM&v0p**%=YEgwUEv3;J$-y&bpIY?4q>Zy#TaSRCzi3#2*kBJ~~FZ}{%YBdB(ZYLC&e1QZd zlc`2*nGsMGySbp$wruAj;wOhW3;zidACvk+)@xHCd-9y~)&QQcS;fJAm16Qs`r z@hA^r=fE5k@?K8SV3-99$;{_Pve)jY5&lFq2PfrK zK`?iQ%)`6su;f}Smar!j#CQC;Az+yZQ*R6Qn7aQ2t&e)@boFKk*GM{Z{#`ei4Rq&D z$Wh>)WMiFs84dh8*{>Bbjlk*0H;)Jok3iAEeC6%QBXCNUp}neg7($tMesx$Kf;_P( zS8jfr^G#{!m0~rqOb7Qw>A@iEpg^9K~ z@U+}F9BUgBun2Jw{EOBvFh99o8=X zd2I)dkyOs`j7sP&do|6wza6S7ZT-IO9srvD*7lN+L15)i*?h}#7{Zd9-bRVg;pwno zmyX=M=bQvk zm^&+-YS9R6_Os18+-<;q#_ePYr30?nS|(e(>;|E%63wicZumV@dS$M&6I!>*ehju~ z1Cy^=`_47g0kz37QNykX0(P&fERMVdyJBoAx1cd7(KD=J5FMy&C4V}dh>iUIVRkc$=ZK?IX9YGL+k_P=YIV!+sfhi-DshW z%o^OIIkuIhk$hWQ%W z)KQKEhOeRVcSh%ibE{Zw8ZVR(vxIZ!iymro&0{)8W5>YKB;M0e5ueK)LU-*vD~@9% z{JNFMKQq&aYrfqdqk8l3zISoa0o~j9?QdxLd`c{ke_u(JF?<1Q$?Ed8H#6}v6PJ49 zYAt%*ny{Dq+=02Zerl$?6pVf$uwQg_7&)K4xN^p90Yq)vZnT+2{abVAPsUE8olS0tZqpbxC=Y+q*gAyoj<3=p1^aQ5$Zn=aYDI&f z8%~YXa@2eNSH5iQGk&rsU73}Mh9z0SXPlo?Am>bU#v(%s%+#_kD4mOgn`%uP?-k84 zj_qhIr&1DH`P%KOp2$bn8{B(hgDY@vWr*rpP(6xj|KQ;{*NO`B`h^3+ohT7#e06}6 zgvC}rM~BtONY;O9N#den;PLO%N()pxS}^p)JDZAe-MWMoehTWC1YG0$_x@#xLl(Dq z8%BR6vgp1=q?;4QyKA$+>uO!I@cmYBea54$qBQ_J5}jFSDnn3pZLICW%Tf4kX(CeD zI}T}|Jte7<6R?9E`Z~*K4E7Q0{0`I)1I!-0E3iU=id*bWcMZGY1CL8W`=usGzhL92 zM*j&5{cm)cOtN9Ex_&jQAs^-)jrDvvYQX;9(Zay#Hn=~{yv6uyAEdk)KCX9e5H74u z_}*629M}?+lVdb5m&Tp#hmQ)L3zT$DD?}@NI zi!fyNHx{pVcG!y91)#F&3mH)?0F%S2OB0XVAZk;PurUV}BD1E{S$oFd?3n1{sL?Fc z>`{)8zOVp%b!N|dix)v-fV=lb+9H^r3*MN}Spf0>XgM}wbHHY9?41tNz*XhOo=zBp z?@k9zHW!RQXf^A854&L)|3vC7?;nQYP^Q!2_vxVYimzE%bPV{k)@gd&r|W{ip~xY4yK-ZPbd~&AXOm z-*sV7T+ZUvkxnF^tK_O1Xu{o>n$onRg7L1LRFc0&D@5$#R@B}@168fpzbmVzz@;$6 zNUmZYn%V*bXm6IFK~6g5qQWw4_M)gNCojT@IM0)noO3|bcc!0?9R<%lVh?+gdLTc7 zHZ`me4L@8T+`Zi2hU%sBrTG;UOfb*6kQFh6jcVol{^K6PU85%Vci56KFsk{8ly(cQ zCcUk=llKWsgm@HG%sQYh%SZNx1r3TBFDiC_7zfuPZHf%jKi?TCTUV~0fqjEoANUW= zKuVXeqt)LD*c$M*@U<8nN_Q``XIoN1v-HH5%L83-aNC+_okktpySm%@(b+i2iJ;s6 zI*GWLPbdg}*^JJHE$Kxbop{XR=e;X-B>cou=#o&_kC``4F3V8*a0#CtjmYUni+(}Q zwnweV-=6xh%b^VKhNr8HuEv2=NlAE&QXMol#y33o=m5cJuY)QUeZUlNR(2wZ40(T? zpV%Lxz#8Y=x@997EWTX$6}pWKPu~g<`*a52-iF+I=w31uTf`GM^C<8x_QvyHr)ls} zTE@@ozzFmX2+GB!jY76r%Q5E3G4MTUue(Ql3@Uo7O)bg8U^6d!sacT%wY|99akC9R z#4Z{$|+goHw^jc=a{590YP-}Jwk4&hu3X%SdX-W}Q-ZD7W!qae|GW`-U(0{!lWam|UvHzw*{%3TC8=N7 zrW5%{dTTxkBy70HHq&4@faQc8>De&@=<$r}&DbRpI(uwBR%hLgd$$@nI8W6e$HOdM z34vhT7nk{C-|sTGELl`FD%t|(#luF-&Fw()QvVXG*8#}Vqo0`A3fDqnmaQYJLDb*K zy+J1g%v;4wclTDH#0;Ypk9a%&)t;DDkr=?BJ=+7b!Ui!xO=LF2W*B>ExxcFR({YLA zkJy7TI;LY$pU65LKU;@|(PHTs+{zjm#7)Q8#=B2n-5hvrZLsek?Ol}k5d`Z_$Lv67{IZZT~B(Sl_j!h1|U6H$BA@A~4YZfqa@%T@3%UnY@{niBY_Xr$p;yr@pY zZAv93?jb`+B;AVL^N(AnOpi6$CDHKfVI#8VCn~0T#2dV2A|t_1=v?#X9=wzPXrfY} z164Cucd{I5LpU8J9&^7HwaR!die$B6RK|e}r>qWSG~fM8Ik^YJexJ3E$n8h(clv=> z87LSUG1YfvmVzUmE{t58sVMd%Ykijz1%p|shpb%&(Ar5?>tIALGK)5<(69XKhusdV z)52}YC3)~?`KLx4R6D*|TdEdw&d<8^^^_yGOU^qrg%a$u%uKnQo{NTQ8uVekOpNXe zlWVF>N0sF|k!y9CC_QuT{AI-gWV}Ig*)CO%!q26Xnwtn1RUfUxZ`y*18~*#frrYt) z_nAZ2%e(Q?Q=N8B0TMce`0xZ?CE=Ovfl&+p^`I1GuUhAic9h?Cs^9zdo5Cb zB5(ZQmQ{@~)QY=F%??Y4%~QLh z^^-akzFWQI;$Iqoy(T{{RT}m{_6{RWRvjYL*VjK5$ZmiIQhD6IvNAZFC-GNwM+OKD zW;h*D^96$(&5C`AAF#+`LeZ@=62DcJgj||&z_I;n_gViYK<3fUoaY~xf)ro0$K_x3 zuqP#lnJm~2YulW{Ssh4Fw$|fA^ryg;-BAIe0yJpu+ZB-ddI&ymYJP7RJp_p}htCyw z(I65&_f-c`p~)z0{67^k9BGjXTXrMCMZ5R<3TJ!Z#qulx1A9QLyx6Mtb01t@PpBPr zq`?3C0V_l9nPR>pAn#JZ`{u$ZFl{<6f99Xx#P#(O$NPrik-7mP*PROCcRr)SqiztS zYa0-%2%xJupDOUAx2 zgJ)9n7kj{pYf_$&MuoEl|J1sE2u|_8^RDC>h9pA%K%c=NkQlE&5j;eK4Tmygz48W7 z%=xl8lf@Pfj9rqyH(iI3WPErMIx*Fo`xtK+2|ed0-`m*sp|H}&i^dr}SpDhfs~sin z*pN`e@=&H3Wf%J*R)17us*uP56ONzA{n6L=>hS{1vu723l$eX^NyQsmi?Y#Op_GyC zY5u7nmbba?o_g62Lmi!&J$}6q^(RL`P`(3HR@P^@HIkkRvK$}!za zv`PAL?#fCrzMYIU+3H$`+MWa}ha(NRrSaCetE26h9qwHkZrP8?U&Yd0MyRNfCiK1Y z#t=?PlDWGahB#aAzM40K)(piEG~>c!SBJV`LvZp_?zn@?+eT0KK;uT^UYl~VQq+i zE83}2b=dwmedp=wpLo`+n|3Xr5T|2y+rq!0dJWLN7c?|OY}>7!-B+kksL4B_vN#F_DkryF>P*2Y@z#O6wNpTiV%>P7 zFb*O*em+cVgP`y+kAV&Z?HV67V>;*zC1wt8(7^wf^&Zm> zd`9!2nFct>ecW>LTMcN%y!!pHtP;4i9ore93euPwg84NGkj~^L`AMc3Sm_;$xnGFz z#no1~+^QR1d_A!FF})9Nvj|9KHj`mJUG>0iaVk8HxXj{3q=Nh3O_v%iHGAd z6}%H$#c%OYpz+J1L0w2cNWCq&EXmOWAyy^KBSl0|?%vH}xbh1Ih91>)-7kkF3(hw2 z;8Zwm6U@sV;DH;)K7XDaPs5(a^8#Xbaxq0QdP>VV7lX`5wFw8(@vBB?kAi(5_Wnp1 zj-5_|$s^5r3cG&7%I9MX_I-8m%)EJNYP1!&uYdg1bGQpETuw{R{3gMK1o81!FER{9 zl>~6}Q9)k$yhaxHAb1pCy_|bz5O#lGm_5Bhg^Ur^w|VUp$Ra;iQ=K1xC(0~NH+p)2 zII`5t^O6WVHM=4W7YJZ~qwRI+hJ>D~@u-qNN;iSKJDy!&NeeB5w!HkY#TNZ3N z)QuXE=i~2-5O7M(cEL2?0=9E;NJ}hN!|QW7(&IyIKwRQ~SmMzImkdHT7MBNAeem3r95nd>?uXxY$6Wz36+kaCVKC&cwfYYrJ69bap!#TrZOHA zIj>k$BqbsFr-3%#!z4VQ{yvc9Su~33CbK4RxdEZ2LG>nMD(Kkzwy{nWL9pu*J5x72r?_ifo6R$ydwaJg@qo5)?q2OWGd3AMr5UdCY3-ohUrv_b6g1Gz)$BMD>FG z@^LIiNOJg8Habn)I<20F#x)X?VW{p`Fs|R4l4Dv5MLNHjIbEwkrMjf}ff4}}#3uu$ z_f~`W_fs!eLJHv}^OnP%`N;@KOE z+Ij=6NUABbdK%DxN7^RTi4uq`M|~7kn!Rye$RS(kMh@s4y#CX#x*Q1Ty&>sV1=8g; zzb(k+U}|C1;Kr5@N5Q+UpfMDrxlhlvrGCS<9!idDW)ZfgosYPqP>!v9(t(ol706*Z zd*`xU8FJ__*2Mc1Bj4@oC!K^VL63*mj%0P_YxmOfsx#ga7`!Ual+xt7Obl40S19q9PxY*Yp(7(0PP2FD{>k zP8Sp|Kh~h3fvw@wSL0N)bv|%c|1BB)LP|dH-0VTuxD)CP6)m_(3`%n6{&)BJ|Nd^G zB*Ho4lPpEN^&q>cS3rLmEL&b#ldvRW?$1}Vhb zaJ7LNA@Kbmdl@j6c*x|{C*c1800960ESPsZ75?|fMTpQ)3hj`Y29;EYj1r-;OX;J? zibA1KRAwcGBAe`)?Q(3cd5wFy=5=k^B(r`VkMHlV^Y1yY_j$ixujl)nPDV%08WGRT zE%N`ZZo@NIe~gZ^QPG+p*v?=~#h0m}>+`25XckvF#`~OvBQD0O4+P86OD9E>^d}B~ z6!9|6$V9-3Qk6`c*iUHMcH*`CWEOmBpZqr`lm_Qt^6D`j{SHs=t|>`1so>i-RU>;J zceD>+eCikR3X~jA1b&uH0H>RtQGYhM7|$|Ed!2&_(?^H zr_8+{M>exwZ7OL;BLC#P`E&_R6FAxTrj!D&S54NY0V<@1%CV`zAc&Zle&BQ%gHNZm zyXku-fbHGxeCefMVAp!ba9L>#jwL+R2{apo$zgS^pFLd=bUvj>Gl>Yd&UI||Fi5~% zdM6t!4!7bbF*VaOTl;Zo$UXC*_XwW2nxf<4KY_}t0bJzdX>|GF8Le}E1~pQngiBMV z@UWR$O@P5TZvWYH?7x-)R5GCWetXi6<&v2)tj@W3hseJCTeJ%NXBgl6c~K#I*ds)g zlMb%7Y`em?4S{Hvptd4?2pAp`s>v&Z;MtUT(VTMt*p@h5^l3CmGoo_UiS&ZLSi#w8 zmTqX=RG9KYs|z9~)^?E0x?uTT*!n@~F3>I7)g=;21vSM$-titXBzo>*vI=X0i&TLR z3hM|T$m1V>Og%^TK6%2@KE%jtM~kK%iD*q4NM(CekK{q#INhjfEZJ^oAR&{7LWjd; zYs|u+Y6Q&&W-B1l@Js_YXAAVWrI^a!Z3ptJ-z>4!RA>|8xMXicg)sUH?V?T!=>K-P z5~Xc$bdYCFiT4T}hTpZEEETJx@HXjo>G! zm^ry?<%aYyIDL$awz$&|f_8KXiM;wQ48pEVVP*~bQd3C9u%g5VA9#ygMRVI^r=2Ldv6)KoxQW%omY#M3=dU2{F*SaofK=~PC^yJ zX?2aGBn%Lw274_w;$xFi-ioqD?0jkV{(u7skIFV(RAg+$00l3XjwT8UJYnu^U-dp$I$E*IM>sRGAPy#QHZKgH_)XALxXmt;XT0 zzPr9?Tg{xvCI10k;|CI)Uw_A%)MaFz*s@9VkPGW+@fA|P5B5J9nH_i>HO(NJN8FE?L+Apvz#IG^1?8h z9o}|*(rXlnk4w%`sG}&qU92FyaTv2*+}^x%qT_dq_5J%v9r!&~%zco*3il3ni)%Ww=hF~yF?zbi$d#lM{= z$6Jw4Xmf*mM>o>MS9U($(~k?kF2y$y`f;yme8R$p?)OE`=#lNa@xPhjx|;?NHHeAV8SqZU5fT8lY_V6iN8m z40kIm`0l1rV2@@V~*z33~TRC)tU43NK^0{uvv1}MUnoR}cu_Cok7s#Nh)5UtL zuMvha10$70%b;VorD_l{4$^AoOwB(=;;q!B>W4wOXydb|tw#&->FJ5HF+PZl0XMJG zjq=b>iQPe|CkBmo%$_ibi-Kc2r9N2|m4I%^Ip5Tsjqug=xfUCN3FrZV)#-=wFRvrRh<$K+XKunfvrcGMEJ;Yb)C$Cl<{arMRggNZ zEBfY98i+`I>vvlFj@reNnp;|b;)@T1J%4$#kUjQwNv?kcX8%^R+8&<>PHv;~9d9b& zMi8&y+lXd}w903Ru;_sDwluJ$_W+~uA91xd8rao5H>&IJ2li?M#Z`Ga(0*AEERNHm z#Z|+M*3}Q@=MOLRZ1kBs;jVU#OoNevj9pHwG*GUPpXAT!fdVJbNXLzSTd(xnxt*Xu zBf}0)l5sOsmQ20Q(I)}}GKO7jD~0(=+Z$>u1YjB|_ltEX2IT_-zE}TMLPU`1Ue9h4 z5H+j4uHEl|e6hJpe~)eGldZWmHgo_u_(vZJ{Tc%5we{WSzYl|nCL<5sV+i`q)IP#ILo$u?!6^T93d7Ed8;*<&X1UMw0?HT4UB zsVAV-y9rUDs8ZB>KFp+(TZ=}PUMlzBHsQW!=}tw&M$DU5Q8(1BL92ApkGnJqaF3_C zoZN0F_>nMi@Yq}tR5<>3=J1bN*tRO{++Irp=hyB#-W+d+R-QKQZ_)K2e^ZStQN(aVQ9+DDK?8E`WK2Har zqVt;9`3iRmG9J3NOPbPxk`B?e)>94mL`QbTn^uL1+-3Jw=1XuaqW9pL{#^9U+6&52 z$(a4=n3#Z?FW&k_4VaRPhPutc{E3qJK(00HENrQOhSeg2BU|f1Gv!u&r#K1r95&y} zG0+68PS0Il>kvWd_Sq5E@;nIjTJV%sj>nIS%*hI!b!h)l?FYj`2UZFBZe=^yk2*O! z{s=uCMB?yYe!s*a^i{Y%=~*&}{{Mw3zTC)Vhlpn^nYVW%x0Gz%yL+wp=U?b?iR;zK z#6y`LQ%TeU4isXsxN*p#!89>U1A zn;eVeV-{LAY2WZELz_+{AsCDEri!K$y$|^fFd{Jch$|tV$SJ* zvY%u^(I94>jrZIKyzA*mP%eIp&CV28xsD*T5jI~ z!Qgs^`9FC-!8}JIwK}{WlDA$BHH##}HS?Q#{#QCdy?22%W2Fns7jLTtpu5=S+klHiWX-w`{;Qh4k;=Ir{*1{WQ+tZUP3b74+?yu zsI=j8&%SAJ1+-)7cm)63rdB*5t6`HaT8F)(p9!R{Tnsk(ui8RC1YR-Ud!Ki(1m>W5 zk#C|J1d60`-r5quug3H6-0KQpy>;6oW?vSZZ>e_69tg)dcUf|ncsV}2>8$YkJqfQf zvAEjJk+CZ?#@SMqih|CY+Z6RWakJ?GuZB1(HtM}LHL7k$&GXvNIE~48Q+IsOnWYt# zds70-3B)%8pZ_N*tf285-S%vuL@iZJ>pDAZo?8EsKhoD<}T{x=rWi4pKf87Og z(wR**=KRBYqsyQYi(3+XlH;<`{lfC+O{cu@jfPf(@IV4EX}+_{xn2kihpw;m>sNqK z{_l@RYHGoQ?^)V^mL!PWo;04p*$Rrvyt=iMZD4!nY*J!88P-PE8CAyGK*49YRnn~m zL}^mAgE$CR@-(@`l#Cj-f~MU4Wys*er_1rA9{mK>pGM0y zBeSnK-ynYr>Ia{qee@*Z9(J#jd1EygU;fg^2>n`FDh~KU{wI)6at!s%l8Ei(tm+4<@o0>P7`1k%~AGpbpj4a9x!g?ngq8kwUsQV zCLm8ahnCYd27wNIDlejjKub&K-M{)?;Hrsk^C)b9>MdPwU-iV}ZghBQHq(Z8QYV{# zvC)x9ht_a~Z3KDeC*Bu$k7A$fzBwO(VU#aDdu@V`hAhdFaT?kUI1+XstzIq%LMw-d z)y`5OvEA;xmH!}AF&n1XIF5l_XSax7{x~o`H<0x++u$Gm&S_`)0U$6jttnADVBgZ) zh^4<3z*$UHJNG^Ueb*1>P2H}-oSng$&JrYK%XN9X|5-DNP6yxLewBm|n%M~<$@O@l z=m$rgcP;9R6;g#g>v8fw!GXio%@{&H{D=I!9VbaVv!q+S=pC`kE#);GqYXRuNAV6} zUBdOOuXe+zzG;|G=*}>HDXqECGBt=C?b7$deK&aH;(}zaQy+Q)Py5_~PW&r&-EwN4 zjBY}XtoyuM(Y5l_>AP&L$ffD<>EneqJSZTW$elt#VM`Oi&T=a5_{`<@cc2@;6D_|u zy7u9burJlC?`h~HnXh1-Ktto!hMk$DK9n<+?O(ayjqQuv@3u3x|$TH=$qx8UUdghT*V-gK$T9dMn{? zKd1+pIK(SQ>bH%pOd=Ki_ZN+m36j5z@-djYv2v#g)~OgPle%8HUS}AY@&uE0JCS`j*#P z9?aDN`;{14+u=G`7(3*-AY2ZW+>Wb*G4UWL8u9PRrc4xUY&cJPU5#!gaj|1j&A8{2 z0zZEa8PB-{5*o}=mpLXSJZAVGwyOSLrb!g70-?7jbjDu-K|7>)sVee+IrHp+Y z&}w>%Gc}n8Q#ZW%9^}wLAg$?pTYNt(rc$=<7U}}(z}h6Uc|DvyckHZE`WFnc|NWbg zM8b3Bi4D1TdQguoSe#x!N9%ig3U@LO;@5kGnTQBFHl@!h&xrOS3Q8GAK54_UwUF=6 z)5|eKd#*vG{2|DetsN9HErs4m$0Mf}8o{ZeJw^rFV0U^1dl?fIp7Nfs*h1(ASLNwr z$G6hJQi$2g*rp%M)DGk;sL`QBLW4q9rvtyic$2(LKRBO!K#Km|1DDF=N-w`^huA+| zmt(4$ps=s}xPW~Lys&OrXMG$DBTH-q>%=&8|7z&17@mbYHmmf#*ph`Wvj6%$x)G0` zq8f#Y_;vux3#yZdND|zht~wj`r2u%Ny4J-M%HfO9!nQ}^L`aUa5GojKf`2`RDo+l! zf#F!?ox93Zu-w_<6)fBb5&3f8>wM^-zl*9F!Z`$&i6NebLBnt?FyCsuVi=zCU0soR zJOl&J;zYU#bhxo&3w~DUg}fcF`Z^paU?1Li@m6yqgtMo~_$yUFUX6pv2_yh6S6t%h z%ZEg>d|jDaML_j@WH{?r3W|-=7sf?kNe?6{OPPfw{x_q-;M>OCB=r`_m1( zET?1ageahw_;*n6T@x7QS)Vw^R}FVZHa*~qtOSiyeKNav>cK0(>5JsXzAz;NE6oj^<#+h*;yN@6H*0n(+8T^NdQ9l9PQ5{R^2k-Wz^D((*_LTr zgWAA*_1=}Yg%yyg{^@*nOf<|kvRn!BiNLYq_Qq2EcqBHJ5-*ncqg!ggq>xAyy!2ts zQjpIDwbWL$i zq(S0p&9Cw+{m}nY`SZ0SeekhqEXyQ<0;U&Z7CCQ~gKzjUN0VwEjyw|gF5E{!^A`Rd z8A?BX{Oc^9E;5Wtm%Hsc6-Ur}d)MF!;}9l?JPtN!>_gr&zpVq8n(?&0Vvu%N0^S`C zH#KQ2hf)Pm@mD6T(2&7FXf*188HtYk5Kbz1+bS|;g;HSoSGUdciWW$7Xch9-X#^k7 z`?I;TMEI96>6W~e2wT(?&@!|Zh|+UD3U}+Fpz}#|T4)_ySNbhEBwhtep1fkGWpm)! ze<%0qK8{8)QQHZ(H`O>?qD_B2(1w)EM3t(`-N>HgntjcXhSn!&nwzF*sK~l`+;j^K zKTBoJ^_X;{rQn3@(uN-`?xuX&*4~EW2|0T046PU<=ygB8u?6SH4vQX7Y{jWyDq-7) zHvD$dtF`h`8}4&dILqkPf~xnQ%P7V);Dx;jEGB{#xRZfn-RE8oX1r1{c8`ujOZ)Fn z_MT2glcghK{{4tskN>^38wMO}`M;I-o4jN(;v{3sUmY&f|SSsU`iZ5c^OeM`AKtY3`3q2aOANGyiU ztxs&r`;M_m2i(^6((zA-Wiy! zPGD_`d#n&MWY28+Fz=7$C+f_frl-SAK`Dl#>?OdOJQx)Ivm8>JZmssFl*8nuG2oOv^v1MC!N{GxCe*}_NR$TG$`mA3hk4l0sY7DBeuM5FckLx z>+Va2=}-S))uIMMtQ=HNP@+NUz!0f$Q!X~#!Czjpl_>h=?y}%i4Q}#3(M6i9z}Bo| zg=!o3LE>sg?}MNZxH!tp%`+blf|iHr?jk?I`(C3~YEBZIW_Vh-|Iur7CfxqMXr6~x zHO|_v3Rj~nuU6x=phg_ilZlpIX~JO=r9BvG@u_9O;2Bl|y81AQ8(s(iy^GeZH|i^3 z&%O6&O7dHvcWi+NB&gsLHLN3Wt_R|EO9}J8dqMRv`QT2DK4@t0>3gNs3;8cqmp;Dl zfe}~EC);>>V3=Fv*knUDcur2_o)YW^ao>ISj_7xSZ%km^d<6w|o7LXPk!%J=whW!Y zm2!a5boEWuk#P3fffF};Gco#jY+cSO(!EZF;f8He>KEt`6;9mwPG=bGFVWOeE{uZnf~EVum=SP~G2n^G9)!$o zTo3Bk`(WoY@e=dzZ6KL@Ou?V03<8RVkH%&D<35e+YdrI%$n@1Nd%m>}AJKIt%XmoG z^~3N@;ARr?6}o)8(^roT?|1J_-@n0I`iq;N)5?(LTZ>3`WFZ=^QMm{PnK+cP%yKv* z8gKvcaKGa2jDIq>Z_!f?0^NDmD7(vvFjn|;|HZ+dFb!mvO|$t>e6H(yrV_$%#it|Z zwwA%1dLsMU23HEJC6zvjuY&xu=K_O!YT#|!1qs2xS}?5sSVtGAgC#+op{Wca)UG=0 zskfY6e8b?oQn#}s6c zwiMS-@4z1Yy&e^ZI&gE5`=|o34FfLNu5jOK#Bmm9)iSvfl#&|P(q{F*nze4-5v4rf zlXAN3K34@V)c-z`I@k#375DvYCy?Oc3rn@cR1$=BJ#1bwY=j(+A{P)gKPtI56@RqtidC0=!+_Cd1!lbU{ z)jqb+2EnaHgQs-JkT)kF6)VvOr3+nitP0IAaqvL2YiA8;O6PTrj^#iMgVn+dm1yKD zDsI=;t-`gYC_jFSR*arl7CSmj#cdPcD3;DW$o%pG@79f~`bAlTy^V@j=e>Tu*w=~& zJ&h**ZK=eq^Vvc^o4z5BE|1>(UwM$nFvP?Es2aFR72m!KuZQ;tXl0_(#Jx zHPGUlqLI3h_ofGIJd53n;XWz(N@p(tEE3-2sRWjSlAz+DqWv}C7iVeAzncV>pH+WK z__RX%Sl0R$UNSVQ+^^3jw1V254ufRjMtF2O?h&6&E8nr+ZOaoIKFo&w*@_*F=d*e1+~j8 z{EAmd$o~0lsLu%!Ugn4~mtiL19WBPns}Up|5H7gZJl>3Q8cXYc16r}X#%#1noQ(hT zZwjtFe|dmWc!N_VZsa}hZNpG2`-ko~Tk!mK4*~J-4YRVQPy&t5I0>S+wJ1_WHGTuQvnxsNpjAJSQq?4D;L@o4Q-&)0IicTZ^Iab!I%x_a_ewzpuB ze8H?;K?@q6d#h{TT#Gt=QET_lX5rDCZb$YL*%06CmZmt>0#EZ54UY7XAoQ0^_mpzQ!_F6 zf%yb9eEw!PK^TWi*^XT_MkDY=p{y_LcsDf3QtIiPsD{B9Dho+IFVtB+&uHOSj_>xD zR&$uuVxIemsPdsI9Idk?_ReJB1Nns;%Nj{gw=eH`eRegJ-=w_JO4ANB>i6YDk9LBg z`-ccdjZUzu$UQG{vjco?iwr9)HbG&-dv(60Y9KQkf2b8+2Itw+jeKnLf!@^YcVbpH zJo)h}A#5ZAE(^PHJwBWSHL`S8r%!u=yEzZLmvB7tZcoSrk{9Czb*ZJ|>osUSdp`S% zRwI(dW?3F@YQ*sHP{rAA6w25PIZ8rX zp>6H(iDYnRu(ALrFa<9{%L{#pio9K&}Sk9ltCRd{TBI-F6*;Nek*5a+k;9J$aGwQ`K?E ze69C~%XZZ4i5#cS+y>H&n?P4deoe|*NeJrU#Y7`hp=^pvN9Rbon)R@h!0niY& zl>eDa1Ya)ZqRwNrkk2&M?|j1+-u{ReG4ia&O`|fWoyAUcjB)>+JJpZRI1(|uYXpOl_a!fvspg<)P?a?;_sMvW>{0kci(@ghXev#aP z-{qKRNs$exvB0qr#aoF^;qF!6jufMWa%|{cLOwQe2R4c)6=J>S%~rX!GE8N4F>)HN z#F>mEBFI&Xuae0VWlQRD7n$VI`Lss78$RO8<=Bj8tt(kt8QQR@x6Xkgtpk0J=078l zb>U>`>qslVUJU=f+gb1D5OR6^PMJ6}hMI@(@uyf!ptkqMIsY${xIOQ{(X29wX9?1g zvLfSnsOb2<>mP@ZiqW|H%vdK{{tD&NXe>oC)^t|Ip&YnCE<|qdrX3u#h{1+5y>R5p z-(w1dK}f2+syu2k1VkZY;fz0hP&L0J>da1rwYLxcUiw@EjKQ-8*NP0`6J3ziQN9}7 zbL<&KiyaZ0sY*PHV+U~J^_A)!@liC>&U+k}J%+i0>xS&KDy0g-?xK8`~;C4<3E-3{bqu?)r`?Bh5FxCjyzV$8|l()kI=h+3R zckS?G-vutcjRs(D*KA(;TnrH@0!K3qUSixw(bG-lC77#CugK+4hcc^cyY;slP)RH# zOVO?x|IWtA%HK^xJ@=5k@gsTQC+SrzrrZvZLo)d?XZs+|GTm*UeFW5E))(4h#(=it zTq8kd47z_kR8@)@hD3(%@mya>P^^86@&i*dT+;3et);*Qc2_Vy93?jvr_5=y(8@ERO6N&olcV}Xhl0w$>av3(Jc}rAAR^w{}#w=~8I^4@OHyKq_ zhX-Xhjl4e9;D@7`k+xeESn80-H>6R5W=nVE#V7KR(OlSsQ9T#8Y=@41-O5KNLf_li z52dJms(Gt6vjPuL$cIz!sltzyu4PT9D^SlaY+#>49tQpXEhfiii+l$L4-TIyg30X5 zMgkGd@Sr69Zg@=>G?tkPxVH4e={x(6Z#oRa-1{wWNzoA)^}40;C~yc8PkeO{`1fue z4sXiGgSueh;CbC4t9E#+p#F0vs2*fbJvQ%;sRAi^$%~g#DN}#ydHerDx5kNps1;ysit+*I(bud~X1G zdn=cjaz;>QasTgnoiXI*xg$$`X&gVf=A7HqA4jc+m8J!BCF$$$otp z#Tt&&+Lu3%;Ex%`lm@b4JW;q|-F9OD9ewLgq>c7qR6O73^C3h`FVxq&P}PcM(W?on zDV6v@NX+SuS3G{*u2Xndn*jy=J}iEnHPFPZxU)gs0rXWR&EL7YK{0=5K&ZYK9MnT` z@?$^nIO_Y@@%2NA#79GJfgYerag`F)CBT%WfsV9)6)eXFI}>a^Vj+c*Gx49_WWQ_V z2o@W_g`<7GsjU;p$E+o)8#ssN%?Zs%Z~>k6nCl!dUP9SBu@j%~F5xjN5$%h53pgWE z_EF1X4r@In^zZmjA`O}M4{`qyd^HjHb^bvQ&eSXhZ}xQH@l`K7+xaH64|x*Cm|Tzi zA2%z;|JGsrTY8l=y*eyWGPVfTtwx&-l(apGW^7*2%b6(<((F8 zfg(w(vxnWfpiTMe!WTj>&|JI17p>X{jV4-QtM1)UxqSUx)w>qxSMS_wSeOB>nY(zb zCCgCGd#Y3T4FO%r8|G@q`_R*$*TCiJFs?*KpGaSCmn2Z1u!_|hE&*MNJd*#dF>PcY# zh%cyqP5}>@jGQ)1gA&El^U0M{AZ@Z5a6VuXB9aZC5Ua<5p*EOd`{gL)UES6$4IhLQ zfiKrTe~3q(|ynT2n#oxmB(k& z@L}tc_C2{$RJ0O2V=_~NzaFN2zkR9&w?kAIBi1@_S8=VDM=B9lBkvZ7-{?jbR<45$ zFT2r0-N$`{vI{@(qzm*g5U?~=GMaa+32PoYzwp(rM7lI-w^#O=C@OgHG4C832ykyd zXBJO`cii^Ux@(0H^yiR%x=IC@-al$sSX>RdPT~1CKGlIFQO5YrbOZcpnCDiLYJw;C zd3KpJHiDbO+v|;?b)fc{*>-5Q92mdID<9_Pu)NT@L?Pq^5-`9s6%g=+99Qu%&@OddLv>PKbE$xfc+OW;>V)=u#A`CQerH%_u z0;kZb{hcrC;j;T2H$zb;+D59`_2Me%6hW4}PyNpCw`8&HaCfK}38pzn!u|-ii^- z8!!7E%g|6S-j?>BG?M7eHl&;j;DXtUxo7NEklSOtxTsqNmuoAosK^$>r8ABcCVbHl zcrxhTnTJXEh~B;}HL2)7jwr($bg~;F)7fYesK~fV;|hC@UQ@qLQ4YH(9yOUAZv&|w zyIO7UE{IgtbN5N;fp`}SCKaO|V6&!LzoFR$T4XlKBc1IaBk$}GaIFE_GZK64_SQg9 zI{obniggfMcb_8gOgq#InhUKfksy8T4#(j$!=P``v%uFo1~dXS6bo7t(CI?or(i!0 z>MI<^caM$$hcKz1!mfQy!Kdrw1#5}Ifehbc zU_nfT@!9tbpzkVJWO2)bPOtWpE6?&EbUx0=@Jl+>JHHUyIpzYxr>RI=57ICows}l} zr5p{#1;kF1)uYxAh1$bDt@!I{)JE7Y!hiWrL@{6M>A7PCFhn!4MMm%JrZtx|{4 z&1(1cE7W6nXnVVaEd%r4zfD9W2v-!SgsYmy*GJ#FuMglMlikad|C@i^yEfi z9=YH%8guUDtUtEQJRbD8SA^!-WIwDqt1-2@kWOd51~0PBoN8#V#HTlFy96H>py??x zc2|jLv}}^~`8yj9-%p(>WB8H@hI>0Bh@5#a=fjb(qL~Bb0!Mfos8e98blE=Ys}GDm z(O{x#`;5lBO5GIVwa~fp!A2^D(0M#Ir(te`Q}t6RzsG!+D)^lH>sYJsCW zIQZJDG9aE$(BE9l2hEk;NByJmtNR=&tguYE@2B&&aQk373epe4%4!#;oi(fivTj8bwN6C>>(v8wExU5@1Op1^ zl+|OU9RIs(lLTCT`{uge%v#^ecd{|7u7Xx!1sGZ^uKyHq!L8!z8Hzv2$z3l96FY#P8rK{eevfCPsUdxgx-jDTkOyo9&u zIIN0UH21npLQzR)=}genKc4>fSh{lx$fEozBJWJWP2r<`7Y|Ls{A7aV>%}oJvtv`A zw;BfLdmT2p1>KOU9IE&xy$-VFMG|G+x*+4cc(7_xBi=v0Ie(R<7e~w1^#4eVV*h?> zf;I01l9L5RU1FWYsK8UN_MMwR(aygL4}Xo~0arqsB=-=Cy1ddM`ShUtVb^dvT_V~A zaO?72A)wy$*4ir;0-Cs}_>=MfFfPlk&ok){qTJY;hH4NAt#_|Fflv$9DjZ+``85Ooug~U! z-N_SIb>!>eQ^(v0bT>nUU!XxtMIFe~PSuUE=0a6Q{^kMpG~72@6d%IbfI(4<1%A+p zSzm_sZXM{x*!I_7zgiLTt4G$Jnx+=ymSmxEi77@dCpSUciBG^!_Q~zi*K)``?lhCB zT@R*Lu8=uVG=M_)Qpt%IRS+&c(K&T92jbGkUMc<61G!g0-RTUe`22=8oxs~{Y|)8V zou^5|TgOW^e^DD?F5Tb40P!qPC|IWp-L8Zrm7P|Gw@py=i6o%8+zy5&pWM_0JK>08 zwtUndBIND3@8Uc{g5=u~M&diYz;jnLDXw%7yroj~R;NZ_I%eyfa>y8vS6}v8x<3xr zN+fK}lE+{!eEo2^{0OMTMC~ic?SWnEyC!3bTi{pH`x{qFvi{5a3|y1iS9I@66^iE@ z{v}JTMR}vtqLv4hn9QOhGisWF=@gSSF3z!_ca>X?m|PA6UUc{Ex9h>ExY_3sYcuG3 z7TM&fH-aic~?{~Fr{=t z?^xKf)~7zOu)ZmAecuSEOVZzM;TnY~kxZ?|e>~01b^L?uwI0~nc+HesQ4dF-&PE+n zi$xtF0W(jh4yfgx{3T8VYGlV9<7W53z9y|C&f)$hRqMc3V9_(4Ii2^5xwU zE+hDE`6Sb&1`={@y}Mm-p#{B<5Bv(QDZMV&oqC^_o9owu(m5_53PAR%oyDEbuvj`{8~>7eX_y>)+ke&6bb4s+X3 zOY?4cq%_T}|G5iDEY~|(xQNgsp3rw+lJNic094A%`Tn~ResVAeX0=y?34Qjt0nsAx z6z|_LONoHXu%|Ft4Y+arKE#kfo~n>LOi7ZxcT)NPaMq0S6)7`o(2!C;zONiis6vm%~@)?I=IVx zPyfNE4rt;Pd7{bH0}*B}9^HqAfP*Jj{D}q-Q9Y`17ysU;>;c{xP;x$pK-Z4;^Vg3!s1I)0O;@ zLZDv%<*)Ot0MbvA^8}dYL$QwFe9ufid^*Ow&xEoVG_wTn9y(hNY9$uOnZ&E$o~YHV z;Q1Q3q#V(b(O3D4aW?IQG>3kx!ZpLXIQHG^{aZ()=QD!;{*xwA|>Nq2NF?#XXW{>>JB_i_ATd2 zT{FI(X-PQkQjM&tjB8Ira`92=UXNLSeRM5e>0I^ChO>_~S&y<-f@>HDaLLvIQ_cuM zTcjT9g;Pz^tgC@_OCj`nTmk%YDdC9cdI>$|`3z>ei_q4`M1sK5gc~91p>MW2afs^2 z@ee0@G5M}GJ5%fc^1b%ZEmIjnpU=+6g|81`YG(X{-J}7We7TSjcfSw+^i&0H@{{nu zP4V%oKApI2$+IuuSHNQD<}WulnCkJT`23uQjqCEw&Z^+AxpcNqf!o zP=gVi?Oczw6dlA-SL$DPlDhDb#qh%tomyDp8x9wXF+L4(Y{MWNV#jzrUBH>>@zUXeTTd5RBU#|334>#lg00030{~eckG!%^Y zhg%4hC?!%zw(^D~3c12JBugbrC6rRO7WGQr$P$t$6p@|mV(eqNwy}+2Hpa-@(-Cf{WZk==ek}bTrG6{XNiTQ6|v&Dz`*3M8n&~| zt(g7&;HAx6X}CqJa;HgI9X#9rQgZS;4Wu)U{Rt!WLSf$em|E5cXgR1VYcSgf0+R%u z-ohSGPyL^Mx;Yi%Q~87$Sq&g2qG!n5oehR-zPvFCj7Hs$yYd8{C37X{A7Bm4?+)EMpW0=~Ty!EDW5cu4d8{oG&>#6&N$xVwfy zov_bfUVH@j$g!=`u0wEe@9U@a18i_=xAi>kLWK*C!McWPE8uXV)xs&$0+h>86COIz zg3HxwQb0jvh^kh!tp;MBGyv5!=oY~9@Vl|)C+s>&ddz}32^fcM5?C(8f)^SbtP1G)qV zJw4MIghu_-=YC)#>>%IgZQIZddk86N@y7;1((K#y-`=CZ`9@8w3;YO9{VUXT+mBF0 zEu>su9055?^GW^}f*FXC4 z*Su($o5nB})-ks{RUAS2-@D4+cn_n2sbt9d-vhX{qhIl*9UJFdlNo)c3>0CNvwX_Q zsQO2heGnT_k6?7R#~P94HsBtogjkyDC7)5#fE#n1WJPmZ@G^6qPF9H|fBmCOQdM{*o}iJKpw7Y&G(ka^Ex3$w&Ji{*rr@<8W7zzN64& z5{h?CdGlfkIwrgr_`b0jwHNb+oO0+`HFMK*sihB{zF5k(jSgW+=wpJ!wo&YhdN+9c z$|y1nw6qP9hcT&>JF|TD10In4KJjOUhIef2as@1_P-;W4_CRMIxEl?}r#)(cwGo>S zoPI!sFwJLwGt6mlM1akCDL{cjsea|SpY@=ua(_0SHwnHPzsN8DUVO3uS zZ5+`q^kTm^`}Kf^1+gpY-;U6bICt>2v0Ep$7&-e?8?>UwxgFe5H|lX%B+&7Oav7>w zq}@9unT_)UWKPGLx42g?l43vm8k0)a2n+@$qhiMW=l#qaY>e4{dDO2M*9BO$O#4<~ zps(TbJ1ru{-#-?^ZYo22ef~Hz?`)j29$BIpy@2rtxtb3D65+qA%r5tyHc+kzUTXeC zhq3oHe{Wl|pziXiW9pmOuz28yX0kjBE-Le1OR%FsxJjUvj8YveD&EhqanHj=ts@~_ zSzV|prD$wd*@q`|V;MCH!Uv~czxeEIhTuuHp&9o zf>#L0?@rugZBYZcGBqQbOe)|Y4^=0;2Y4-gUQY}5f`Rm~--ottSTps?!=#@I49(G# zz#ZdY*;93Gc#eNbahGCGemM~@J`x@suqt728CE@W837(^C;&9u@Hq$kqUtxOp z4m+uvDL9iyEfsSnAko|?{)}cl{*k}eE-FbzeKyU#`3xOf17b%5qPme+L2m79m@fCF9XSvUJjK}wzD%(c`B&xO4Y?CxWM!&Ju4YgQbP zo+!9*z;F<58t2psWpm(P<))RRTiI~=VI`XyKnKxdqI<>Mn<0XH%{c!;5e)mCJmJM1 zh0$$eXLL-8@rZp&>S%c-?sXr(LS$56wW^@4(p3T;zB50{Qb@tcV4r|5{Q*EbpBC@p zmIDkCljWnCWx$+E@ZaNK3$#*QWd*+m$Vs|-&cKEQZ@e|`iVC&DxP+9#^}%-7tLyc4 z9x2e(zFXq-aVi|EO4=T4LW8`P=-I4#8g!9-MkaM>5J<2e$llZi%QsS*^UGVoswL*b zdqsqmKFxl+ykrQ{cAqCI7a`Hay_D}73Ax8CsJ9)cm`38Lp3GySYOd?{eqk2QYbY}X z+*w$qML)HT(Tybi;1!-?I!cj#{V&y#f(~;R8|K%OaFaNH_1G{Gvm(v^+aHyWLId3{ zg4(e->uXRuA*BfSoah=?zD2=wZCCA_Svq`uwIId&s1U@j|C97n7y8JbzDWupZU&+Cvh?%8=?ObVb?k6=s|r&;o~SD6wbg-jS+;aoWUV z3HN%)61f^^ai<L99&K6+l03_lEvs}_$lz>Y+wNBXjWS8Pi4@k}oOUuVJA(jMS)=#g!zrb7L# zf1cJ>Ac!uc+MH5P#t-RZaX;5I{n9g399xiK1Ut}ihToC%C!B&}=U-ix93Z05 z$BAhsu@G`LMdgX=b-|KkX~XDYHvE)&q_=Be01PC;1c~m0aDhxcQc3TFWmUQHb;5M` zt{MAwt6?>$=%g%3uPel{q`<=$McUC*MeAfz6die@C-&~*WuoFrDLt-Zm1~1rw3))~ z$lR&gLGh?TMnfX~$){8-KDqPX?z15fCs^{fA~puv_%YLcRuNC=NPOKRpNZt4ip^47 z6Oe)k#Yb?c5nTMvFz+ce0lzMBCXA~A7Te9Px^1h4v|x{j)}utop$xo{d0z#xB1c^F zH#NYP13Xt^?zO?Q?u^ss7OPzUw8Yq2(hbD1+}__2y+D;V-Rj%W3yF+f?H@MNRKQpbHRwLy#(nmly($`H~Q zv>Zy8Id~{tFgMtTg>K0|8dd};7`x%{PzGNyM!%PTlD@jnYYX>>Ub;>Kgc(m8e& z=wz>#zF$baU)W+Jq>uox<>aul0w z(jeKRBEhvh7FC=F?S$S}pmd$UTZ$10f3_U*(M|8blzUyFj$f$QheQqaCv+@z8gD+5 z#K5ImUgww(Ol?Bu$SmNXX9g!+bz%q&q{J0StA5b@No#)f zVAaPB8dk)K5%isPEclB<7`C5n^nFVo7Wp~Ml$@vHF_P2{@xd}&?C>M$c@;ueIj`H4 zdh(M% zWL_1X?0W{+hXxH^?%m0pZ@VbULUDVv-&{G{(w#C}kVa_+-tC)=Viip%qtlzTCpld|e@I@Aca zD7YLr(*l-~eU8JcIXIU{DHhn)L%rtBp&rjd7&m`l6}IU){?vEov#~G1-(3FYVpnVM zLa*ZPV2x@N6JHQ=Y%9bsL;Uu~y@TK|OXeM?jR^m|B*s5J*9L6O4{Ez3XmDp~TDABc z6R5XDhL0zA1553MfBG9HIPRMekl>-iU{dJc5(x?n8Hn^3Rgz$W%XoZ(T>}np1ef=; z6JUqy)H=wy|GJ#&=cJmQrfw&wmgJw z>t6;3gBTYEztzE=<)ZNjqZXj>?mqGTE(PK<-xdDaO9LZofr7V&bnvw~ozYrLgM~O- z&ns1(Ftix9U`A|)^?~KS(&m*gH0Z9pStJsIbK*Dd63Iay==)!qKn0$ReY$)ukchiu z6LkIcY=E_PPd{a?5&zY)+6>@G=6{i{+ zm-QvbCGtVRDom}CCk$W57e*>XmmqyH(bkrV*bbxfW<_;a{7F%mom7t2(w9QzT%Te5 z*))^=jB@DIG8UM)+Yagi;hn>740vfrO3hSYfdnVNTK@qH#>9PS+kZ0P)+e!T-+r{g z`Oo}+>fh&rXU+`;T2wX0nbbaAtF?N^n%h)On6j~r+-cFIK7jr;f9DM>2JvujaO#t{ z{TP?ikum+I2i>D*qpx{&p!OYQ#$+V{zoqYd2v3S(F5h!rl+_L~;!0Y*N11T@lW6CI z>0a=*KBiM1`2k)Q_Nb_N^uwbK{F0&h$oKhF| zBy+Xmf_bi5cLWhLzdvxP&iD^3D7=p2I(0xkUKHhZsuLn}r$QX8nb5b_DJp=|1B8U3 z9Zp6)aC1x~HK~#g{34r&)1%tpk={=EsESpOjBM0C%t^tl>*iN4>ouSQHP4`LpaaEa z%UH6-G&JGYDXAHwqsr4Cu<;@tn?{f8ClpX|WP~OCS&o7{>8?8uJGS8_xz9%z*0-QN zsV!XMQ8RAicOJEAX~8vUeDk44JCY?T#wKYL+?e54R~|{l(z}6{lOr_DHY#`JiKnBL zQjw}zG#xc(zMG}j(eR8Sfgmt3GxJln78Z`|42hnp_i-*cAcU=y<-LB)ZC{|A 1: + method = int(sys.argv[1]) + fold = int(sys.argv[2]) +else: + method = 3 + fold = 0 + +if len(sys.argv) > 3: + baseline = int(sys.argv[3]) +else: + baseline = 0 + +print('method number', method) +print('batch number', fold) + +# Get training and test indices +ind_test = ind_split[fold] # np.sort(ind_shuffled[:N//10]) +ind_train = np.concatenate(ind_split[np.arange(10) != fold]) + +# Set training and test data +X = Xall[ind_train] +R = Rall[ind_train] +Y = Yall[ind_train] +XT = Xall[ind_test] +RT = Rall[ind_test] +YT = Yall[ind_test] + +if method == 0: + inf = newt.inference.Taylor() +elif method == 1: + inf = newt.inference.PosteriorLinearisation() +elif method == 2: + inf = newt.inference.ExpectationPropagation(power=1) +elif method == 3: + inf = newt.inference.ExpectationPropagation(power=0.5) +elif method == 4: + inf = newt.inference.ExpectationPropagation(power=0.01) +elif method == 5: + inf = newt.inference.VariationalInference() + +var_f = 1. # GP variance +len_time = 1. # temporal lengthscale +len_space = 1. # spacial lengthscale + +kern = newt.kernels.SpatioTemporalMatern52(variance=var_f, lengthscale_time=len_time, lengthscale_space=len_space, + z=np.linspace(-3, 3, M), sparse=True, opt_z=False, conditional='Full') +lik = newt.likelihoods.Bernoulli(link='logit') + +if baseline: + model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=X, R=R, Y=Y) +else: + model = newt.models.SparseMarkovGP(kernel=kern, likelihood=lik, X=X, R=R, Y=Y, Z=Z) + + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.1 +lr_newton = 0.1 +iters = 500 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +print('calculating the posterior predictive distribution ...') +t0 = time.time() +nlpd = model.negative_log_predictive_density(X=XT, R=RT, Y=YT) +t1 = time.time() +print('test NLPD: %1.2f' % nlpd) + +if baseline: + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) +else: + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) diff --git a/newt/experiments/banana/banana.slrm b/newt/experiments/banana/banana.slrm new file mode 100644 index 0000000..eeec626 --- /dev/null +++ b/newt/experiments/banana/banana.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 24:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=1000 +#SBATCH --array=0-5 +#SBATCH -o banana-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python banana.py $SLURM_ARRAY_TASK_ID $run +done \ No newline at end of file diff --git a/newt/experiments/banana/banana_baseline.slrm b/newt/experiments/banana/banana_baseline.slrm new file mode 100644 index 0000000..2b99f18 --- /dev/null +++ b/newt/experiments/banana/banana_baseline.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 24:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=1000 +#SBATCH --array=0-5 +#SBATCH -o banana-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python banana.py $SLURM_ARRAY_TASK_ID $run 1 +done \ No newline at end of file diff --git a/newt/experiments/banana/results.py b/newt/experiments/banana/results.py new file mode 100644 index 0000000..caca00b --- /dev/null +++ b/newt/experiments/banana/results.py @@ -0,0 +1,25 @@ +import pickle +import numpy as np + +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) + +print('baselines:') +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) diff --git a/newt/experiments/binary/binary.py b/newt/experiments/binary/binary.py new file mode 100644 index 0000000..d2dec09 --- /dev/null +++ b/newt/experiments/binary/binary.py @@ -0,0 +1,116 @@ +import sys +import newt +import objax +import numpy as np +import time +import pickle + +print('generating some data ...') +np.random.seed(99) +N = 10000 # number of points +x = np.sort(70 * np.random.rand(N)) +sn = 0.01 +f = lambda x_: 12. * np.sin(4 * np.pi * x_) / (0.25 * np.pi * x_ + 1) +y_ = f(x) + np.math.sqrt(sn)*np.random.randn(x.shape[0]) +y = np.sign(y_) +y[y == -1] = 0 + +ind_shuffled = np.random.permutation(N) +ind_split = np.stack(np.split(ind_shuffled, 10)) # 10 random batches of data indices + +if len(sys.argv) > 1: + method = int(sys.argv[1]) + fold = int(sys.argv[2]) +else: + method = 4 + fold = 0 + +print('method number', method) +print('batch number', fold) + +# Get training and test indices +ind_test = ind_split[fold] # np.sort(ind_shuffled[:N//10]) +ind_train = np.concatenate(ind_split[np.arange(10) != fold]) + +x *= 100 + +x_train = x[ind_train] # 90/10 train/test split +x_test = x[ind_test] +y_train = y[ind_train] +y_test = y[ind_test] +N = x_train.shape[0] # number of points +batch_size = N # 2000 +M = 1000 +z = np.linspace(x[0], x[-1], M) + +if len(sys.argv) > 3: + baseline = int(sys.argv[3]) +else: + baseline = 0 + +# if baseline: +# batch_size = N + +var_f = 1. # GP variance +len_f = 25. # GP lengthscale + +kern = newt.kernels.Matern72(variance=var_f, lengthscale=len_f) +lik = newt.likelihoods.Bernoulli(link='logit') + +if method == 0: + inf = newt.inference.Taylor() +elif method == 1: + inf = newt.inference.PosteriorLinearisation() +elif method == 2: + inf = newt.inference.ExpectationPropagation(power=1) +elif method == 3: + inf = newt.inference.ExpectationPropagation(power=0.5) +elif method == 4: + inf = newt.inference.ExpectationPropagation(power=0.01) +elif method == 5: + inf = newt.inference.VariationalInference() + +if baseline: + model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train) +else: + model = newt.models.SparseMarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train, Z=z) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.1 +lr_newton = 0.5 +iters = 500 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + batch = np.random.permutation(N)[:batch_size] + inf(model, lr=lr_newton, batch_ind=batch) # perform inference and update variational params + dE, E = energy(model, batch_ind=batch) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +print('calculating the posterior predictive distribution ...') +t0 = time.time() +nlpd = model.negative_log_predictive_density(X=x_test, Y=y_test) +t1 = time.time() +print('nlpd: %2.3f' % nlpd) + +if baseline: + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) +else: + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) diff --git a/newt/experiments/binary/binary.slrm b/newt/experiments/binary/binary.slrm new file mode 100644 index 0000000..8b5921e --- /dev/null +++ b/newt/experiments/binary/binary.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 24:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=1000 +#SBATCH --array=0-5 +#SBATCH -o binary-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python binary.py $SLURM_ARRAY_TASK_ID $run +done \ No newline at end of file diff --git a/newt/experiments/binary/binary_baselines.slrm b/newt/experiments/binary/binary_baselines.slrm new file mode 100644 index 0000000..7749d82 --- /dev/null +++ b/newt/experiments/binary/binary_baselines.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 24:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=1000 +#SBATCH --array=0-5 +#SBATCH -o binary-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python binary.py $SLURM_ARRAY_TASK_ID $run 1 +done \ No newline at end of file diff --git a/newt/experiments/binary/results.py b/newt/experiments/binary/results.py new file mode 100644 index 0000000..8681baa --- /dev/null +++ b/newt/experiments/binary/results.py @@ -0,0 +1,26 @@ +import pickle +import numpy as np + +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) + + +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +print('baselines') +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) diff --git a/newt/experiments/coal/binned.csv b/newt/experiments/coal/binned.csv new file mode 100644 index 0000000..5ff26ec --- /dev/null +++ b/newt/experiments/coal/binned.csv @@ -0,0 +1,333 @@ + 1.8512026e+03 1.0000000e+00 + 1.8515370e+03 1.0000000e+00 + 1.8518714e+03 2.0000000e+00 + 1.8522058e+03 3.0000000e+00 + 1.8525402e+03 1.0000000e+00 + 1.8528745e+03 1.0000000e+00 + 1.8532089e+03 3.0000000e+00 + 1.8535433e+03 1.0000000e+00 + 1.8538777e+03 0.0000000e+00 + 1.8542121e+03 1.0000000e+00 + 1.8545465e+03 0.0000000e+00 + 1.8548809e+03 0.0000000e+00 + 1.8552153e+03 0.0000000e+00 + 1.8555497e+03 0.0000000e+00 + 1.8558840e+03 0.0000000e+00 + 1.8562184e+03 0.0000000e+00 + 1.8565528e+03 4.0000000e+00 + 1.8568872e+03 0.0000000e+00 + 1.8572216e+03 1.0000000e+00 + 1.8575560e+03 2.0000000e+00 + 1.8578904e+03 0.0000000e+00 + 1.8582248e+03 2.0000000e+00 + 1.8585592e+03 1.0000000e+00 + 1.8588935e+03 1.0000000e+00 + 1.8592279e+03 0.0000000e+00 + 1.8595623e+03 0.0000000e+00 + 1.8598967e+03 0.0000000e+00 + 1.8602311e+03 2.0000000e+00 + 1.8605655e+03 1.0000000e+00 + 1.8608999e+03 3.0000000e+00 + 1.8612343e+03 1.0000000e+00 + 1.8615687e+03 0.0000000e+00 + 1.8619030e+03 2.0000000e+00 + 1.8622374e+03 1.0000000e+00 + 1.8625718e+03 0.0000000e+00 + 1.8629062e+03 2.0000000e+00 + 1.8632406e+03 1.0000000e+00 + 1.8635750e+03 0.0000000e+00 + 1.8639094e+03 3.0000000e+00 + 1.8642438e+03 0.0000000e+00 + 1.8645782e+03 0.0000000e+00 + 1.8649125e+03 0.0000000e+00 + 1.8652469e+03 0.0000000e+00 + 1.8655813e+03 1.0000000e+00 + 1.8659157e+03 2.0000000e+00 + 1.8662501e+03 1.0000000e+00 + 1.8665845e+03 1.0000000e+00 + 1.8669189e+03 3.0000000e+00 + 1.8672533e+03 0.0000000e+00 + 1.8675877e+03 1.0000000e+00 + 1.8679220e+03 2.0000000e+00 + 1.8682564e+03 0.0000000e+00 + 1.8685908e+03 1.0000000e+00 + 1.8689252e+03 2.0000000e+00 + 1.8692596e+03 1.0000000e+00 + 1.8695940e+03 2.0000000e+00 + 1.8699284e+03 2.0000000e+00 + 1.8702628e+03 1.0000000e+00 + 1.8705972e+03 3.0000000e+00 + 1.8709315e+03 1.0000000e+00 + 1.8712659e+03 2.0000000e+00 + 1.8716003e+03 1.0000000e+00 + 1.8719347e+03 1.0000000e+00 + 1.8722691e+03 2.0000000e+00 + 1.8726035e+03 1.0000000e+00 + 1.8729379e+03 0.0000000e+00 + 1.8732723e+03 1.0000000e+00 + 1.8736067e+03 0.0000000e+00 + 1.8739410e+03 0.0000000e+00 + 1.8742754e+03 1.0000000e+00 + 1.8746098e+03 1.0000000e+00 + 1.8749442e+03 2.0000000e+00 + 1.8752786e+03 1.0000000e+00 + 1.8756130e+03 0.0000000e+00 + 1.8759474e+03 3.0000000e+00 + 1.8762818e+03 0.0000000e+00 + 1.8766162e+03 0.0000000e+00 + 1.8769505e+03 3.0000000e+00 + 1.8772849e+03 1.0000000e+00 + 1.8776193e+03 1.0000000e+00 + 1.8779537e+03 1.0000000e+00 + 1.8782881e+03 4.0000000e+00 + 1.8786225e+03 1.0000000e+00 + 1.8789569e+03 1.0000000e+00 + 1.8792913e+03 1.0000000e+00 + 1.8796257e+03 1.0000000e+00 + 1.8799600e+03 1.0000000e+00 + 1.8802944e+03 0.0000000e+00 + 1.8806288e+03 2.0000000e+00 + 1.8809632e+03 2.0000000e+00 + 1.8812976e+03 0.0000000e+00 + 1.8816320e+03 0.0000000e+00 + 1.8819664e+03 2.0000000e+00 + 1.8823008e+03 3.0000000e+00 + 1.8826352e+03 0.0000000e+00 + 1.8829695e+03 1.0000000e+00 + 1.8833039e+03 0.0000000e+00 + 1.8836383e+03 1.0000000e+00 + 1.8839727e+03 2.0000000e+00 + 1.8843071e+03 0.0000000e+00 + 1.8846415e+03 0.0000000e+00 + 1.8849759e+03 1.0000000e+00 + 1.8853103e+03 2.0000000e+00 + 1.8856447e+03 0.0000000e+00 + 1.8859790e+03 1.0000000e+00 + 1.8863134e+03 0.0000000e+00 + 1.8866478e+03 3.0000000e+00 + 1.8869822e+03 2.0000000e+00 + 1.8873166e+03 1.0000000e+00 + 1.8876510e+03 0.0000000e+00 + 1.8879854e+03 0.0000000e+00 + 1.8883198e+03 1.0000000e+00 + 1.8886542e+03 0.0000000e+00 + 1.8889885e+03 1.0000000e+00 + 1.8893229e+03 1.0000000e+00 + 1.8896573e+03 1.0000000e+00 + 1.8899917e+03 1.0000000e+00 + 1.8903261e+03 1.0000000e+00 + 1.8906605e+03 0.0000000e+00 + 1.8909949e+03 0.0000000e+00 + 1.8913293e+03 1.0000000e+00 + 1.8916637e+03 1.0000000e+00 + 1.8919980e+03 0.0000000e+00 + 1.8923324e+03 0.0000000e+00 + 1.8926668e+03 1.0000000e+00 + 1.8930012e+03 0.0000000e+00 + 1.8933356e+03 0.0000000e+00 + 1.8936700e+03 1.0000000e+00 + 1.8940044e+03 0.0000000e+00 + 1.8943388e+03 1.0000000e+00 + 1.8946732e+03 0.0000000e+00 + 1.8950075e+03 0.0000000e+00 + 1.8953419e+03 1.0000000e+00 + 1.8956763e+03 0.0000000e+00 + 1.8960107e+03 1.0000000e+00 + 1.8963451e+03 2.0000000e+00 + 1.8966795e+03 0.0000000e+00 + 1.8970139e+03 0.0000000e+00 + 1.8973483e+03 0.0000000e+00 + 1.8976827e+03 0.0000000e+00 + 1.8980170e+03 0.0000000e+00 + 1.8983514e+03 0.0000000e+00 + 1.8986858e+03 0.0000000e+00 + 1.8990202e+03 0.0000000e+00 + 1.8993546e+03 0.0000000e+00 + 1.8996890e+03 1.0000000e+00 + 1.9000234e+03 0.0000000e+00 + 1.9003578e+03 0.0000000e+00 + 1.9006922e+03 0.0000000e+00 + 1.9010265e+03 0.0000000e+00 + 1.9013609e+03 1.0000000e+00 + 1.9016953e+03 0.0000000e+00 + 1.9020297e+03 0.0000000e+00 + 1.9023641e+03 0.0000000e+00 + 1.9026985e+03 1.0000000e+00 + 1.9030329e+03 0.0000000e+00 + 1.9033673e+03 0.0000000e+00 + 1.9037017e+03 0.0000000e+00 + 1.9040360e+03 0.0000000e+00 + 1.9043704e+03 0.0000000e+00 + 1.9047048e+03 0.0000000e+00 + 1.9050392e+03 2.0000000e+00 + 1.9053736e+03 1.0000000e+00 + 1.9057080e+03 0.0000000e+00 + 1.9060424e+03 0.0000000e+00 + 1.9063768e+03 0.0000000e+00 + 1.9067112e+03 1.0000000e+00 + 1.9070455e+03 0.0000000e+00 + 1.9073799e+03 0.0000000e+00 + 1.9077143e+03 0.0000000e+00 + 1.9080487e+03 1.0000000e+00 + 1.9083831e+03 1.0000000e+00 + 1.9087175e+03 1.0000000e+00 + 1.9090519e+03 1.0000000e+00 + 1.9093863e+03 0.0000000e+00 + 1.9097206e+03 1.0000000e+00 + 1.9100550e+03 0.0000000e+00 + 1.9103894e+03 1.0000000e+00 + 1.9107238e+03 0.0000000e+00 + 1.9110582e+03 1.0000000e+00 + 1.9113926e+03 0.0000000e+00 + 1.9117270e+03 0.0000000e+00 + 1.9120614e+03 0.0000000e+00 + 1.9123958e+03 1.0000000e+00 + 1.9127301e+03 0.0000000e+00 + 1.9130645e+03 0.0000000e+00 + 1.9133989e+03 0.0000000e+00 + 1.9137333e+03 1.0000000e+00 + 1.9140677e+03 0.0000000e+00 + 1.9144021e+03 1.0000000e+00 + 1.9147365e+03 0.0000000e+00 + 1.9150709e+03 0.0000000e+00 + 1.9154053e+03 0.0000000e+00 + 1.9157396e+03 0.0000000e+00 + 1.9160740e+03 0.0000000e+00 + 1.9164084e+03 0.0000000e+00 + 1.9167428e+03 1.0000000e+00 + 1.9170772e+03 0.0000000e+00 + 1.9174116e+03 0.0000000e+00 + 1.9177460e+03 0.0000000e+00 + 1.9180804e+03 1.0000000e+00 + 1.9184148e+03 0.0000000e+00 + 1.9187491e+03 0.0000000e+00 + 1.9190835e+03 0.0000000e+00 + 1.9194179e+03 0.0000000e+00 + 1.9197523e+03 0.0000000e+00 + 1.9200867e+03 0.0000000e+00 + 1.9204211e+03 0.0000000e+00 + 1.9207555e+03 0.0000000e+00 + 1.9210899e+03 0.0000000e+00 + 1.9214243e+03 0.0000000e+00 + 1.9217586e+03 0.0000000e+00 + 1.9220930e+03 0.0000000e+00 + 1.9224274e+03 1.0000000e+00 + 1.9227618e+03 1.0000000e+00 + 1.9230962e+03 0.0000000e+00 + 1.9234306e+03 1.0000000e+00 + 1.9237650e+03 0.0000000e+00 + 1.9240994e+03 0.0000000e+00 + 1.9244338e+03 0.0000000e+00 + 1.9247681e+03 0.0000000e+00 + 1.9251025e+03 0.0000000e+00 + 1.9254369e+03 0.0000000e+00 + 1.9257713e+03 0.0000000e+00 + 1.9261057e+03 0.0000000e+00 + 1.9264401e+03 0.0000000e+00 + 1.9267745e+03 0.0000000e+00 + 1.9271089e+03 1.0000000e+00 + 1.9274433e+03 0.0000000e+00 + 1.9277776e+03 0.0000000e+00 + 1.9281120e+03 1.0000000e+00 + 1.9284464e+03 0.0000000e+00 + 1.9287808e+03 0.0000000e+00 + 1.9291152e+03 0.0000000e+00 + 1.9294496e+03 0.0000000e+00 + 1.9297840e+03 0.0000000e+00 + 1.9301184e+03 1.0000000e+00 + 1.9304528e+03 0.0000000e+00 + 1.9307871e+03 1.0000000e+00 + 1.9311215e+03 1.0000000e+00 + 1.9314559e+03 0.0000000e+00 + 1.9317903e+03 2.0000000e+00 + 1.9321247e+03 1.0000000e+00 + 1.9324591e+03 0.0000000e+00 + 1.9327935e+03 2.0000000e+00 + 1.9331279e+03 0.0000000e+00 + 1.9334623e+03 0.0000000e+00 + 1.9337966e+03 1.0000000e+00 + 1.9341310e+03 0.0000000e+00 + 1.9344654e+03 0.0000000e+00 + 1.9347998e+03 1.0000000e+00 + 1.9351342e+03 0.0000000e+00 + 1.9354686e+03 0.0000000e+00 + 1.9358030e+03 2.0000000e+00 + 1.9361374e+03 0.0000000e+00 + 1.9364718e+03 1.0000000e+00 + 1.9368061e+03 0.0000000e+00 + 1.9371405e+03 0.0000000e+00 + 1.9374749e+03 1.0000000e+00 + 1.9378093e+03 0.0000000e+00 + 1.9381437e+03 0.0000000e+00 + 1.9384781e+03 1.0000000e+00 + 1.9388125e+03 0.0000000e+00 + 1.9391469e+03 0.0000000e+00 + 1.9394813e+03 0.0000000e+00 + 1.9398156e+03 1.0000000e+00 + 1.9401500e+03 1.0000000e+00 + 1.9404844e+03 1.0000000e+00 + 1.9408188e+03 0.0000000e+00 + 1.9411532e+03 0.0000000e+00 + 1.9414876e+03 3.0000000e+00 + 1.9418220e+03 0.0000000e+00 + 1.9421564e+03 2.0000000e+00 + 1.9424908e+03 1.0000000e+00 + 1.9428251e+03 0.0000000e+00 + 1.9431595e+03 0.0000000e+00 + 1.9434939e+03 0.0000000e+00 + 1.9438283e+03 0.0000000e+00 + 1.9441627e+03 0.0000000e+00 + 1.9444971e+03 0.0000000e+00 + 1.9448315e+03 0.0000000e+00 + 1.9451659e+03 0.0000000e+00 + 1.9455003e+03 0.0000000e+00 + 1.9458346e+03 0.0000000e+00 + 1.9461690e+03 0.0000000e+00 + 1.9465034e+03 0.0000000e+00 + 1.9468378e+03 1.0000000e+00 + 1.9471722e+03 1.0000000e+00 + 1.9475066e+03 2.0000000e+00 + 1.9478410e+03 1.0000000e+00 + 1.9481754e+03 0.0000000e+00 + 1.9485098e+03 0.0000000e+00 + 1.9488441e+03 0.0000000e+00 + 1.9491785e+03 0.0000000e+00 + 1.9495129e+03 0.0000000e+00 + 1.9498473e+03 0.0000000e+00 + 1.9501817e+03 0.0000000e+00 + 1.9505161e+03 0.0000000e+00 + 1.9508505e+03 0.0000000e+00 + 1.9511849e+03 0.0000000e+00 + 1.9515193e+03 1.0000000e+00 + 1.9518536e+03 0.0000000e+00 + 1.9521880e+03 0.0000000e+00 + 1.9525224e+03 0.0000000e+00 + 1.9528568e+03 0.0000000e+00 + 1.9531912e+03 0.0000000e+00 + 1.9535256e+03 0.0000000e+00 + 1.9538600e+03 0.0000000e+00 + 1.9541944e+03 0.0000000e+00 + 1.9545288e+03 0.0000000e+00 + 1.9548631e+03 0.0000000e+00 + 1.9551975e+03 0.0000000e+00 + 1.9555319e+03 0.0000000e+00 + 1.9558663e+03 0.0000000e+00 + 1.9562007e+03 0.0000000e+00 + 1.9565351e+03 0.0000000e+00 + 1.9568695e+03 0.0000000e+00 + 1.9572039e+03 0.0000000e+00 + 1.9575383e+03 0.0000000e+00 + 1.9578726e+03 1.0000000e+00 + 1.9582070e+03 0.0000000e+00 + 1.9585414e+03 0.0000000e+00 + 1.9588758e+03 0.0000000e+00 + 1.9592102e+03 0.0000000e+00 + 1.9595446e+03 0.0000000e+00 + 1.9598790e+03 0.0000000e+00 + 1.9602134e+03 0.0000000e+00 + 1.9605478e+03 1.0000000e+00 + 1.9608821e+03 0.0000000e+00 + 1.9612165e+03 0.0000000e+00 + 1.9615509e+03 0.0000000e+00 + 1.9618853e+03 0.0000000e+00 + 1.9622197e+03 1.0000000e+00 diff --git a/newt/experiments/coal/coal.py b/newt/experiments/coal/coal.py new file mode 100644 index 0000000..a44abbd --- /dev/null +++ b/newt/experiments/coal/coal.py @@ -0,0 +1,116 @@ +import sys +import newt +import objax +import numpy as np +import pandas as pd +import time +import pickle + +plot_final = False +plot_intermediate = False + +print('loading coal data ...') +if plot_final: + disaster_timings = pd.read_csv('../data/coal.txt', header=None).values[:, 0] +cvind = np.loadtxt('cvind.csv').astype(int) +# 10-fold cross-validation +nt = np.floor(cvind.shape[0]/10).astype(int) +cvind = np.reshape(cvind[:10*nt], (10, nt)) + +D = np.loadtxt('binned.csv') +x = D[:, 0:1] +y = D[:, 1:] +N = D.shape[0] +N_batch = 300 +M = 15 +z = np.linspace(np.min(x), np.max(x), M) +num_time_bins = x.shape[0] +binsize = (max(x) - min(x)) / num_time_bins + +np.random.seed(123) +# meanval = np.log(len(disaster_timings)/num_time_bins) # TODO: incorporate mean + +if len(sys.argv) > 1: + method = int(sys.argv[1]) + fold = int(sys.argv[2]) +else: + method = 0 + fold = 0 + +if len(sys.argv) > 3: + baseline = int(sys.argv[3]) +else: + baseline = 0 + +print('method number', method) +print('batch number', fold) + +# Get training and test indices +ind_test = cvind[fold, :] +ind_train = np.setdiff1d(cvind, ind_test) + +x_train = x[ind_train, ...] # 90/10 train/test split +x_test = x[ind_test, ...] +y_train = y[ind_train, ...] +y_test = y[ind_test, ...] + +var_f = 1.0 # GP variance +len_f = 4.0 # GP lengthscale + +kern = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) +lik = newt.likelihoods.Poisson(binsize=binsize) + +if method == 0: + inf = newt.inference.Taylor() +elif method == 1: + inf = newt.inference.PosteriorLinearisation() +elif method == 2: + inf = newt.inference.ExpectationPropagation(power=1) +elif method == 3: + inf = newt.inference.ExpectationPropagation(power=0.5) +elif method == 4: + inf = newt.inference.ExpectationPropagation(power=0.01) +elif method == 5: + inf = newt.inference.VariationalInference() + +if baseline: + model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train) +else: + model = newt.models.SparseMarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train, Z=z) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.2 +lr_newton = .5 +iters = 500 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +t0 = time.time() +nlpd = model.negative_log_predictive_density(X=x_test, Y=y_test) +t1 = time.time() +print('NLPD: %1.2f' % nlpd) + +if baseline: + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) +else: + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) diff --git a/newt/experiments/coal/coal.slrm b/newt/experiments/coal/coal.slrm new file mode 100644 index 0000000..cd15d66 --- /dev/null +++ b/newt/experiments/coal/coal.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 12:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=1000 +#SBATCH --array=0-5 +#SBATCH -o coal-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python coal.py $SLURM_ARRAY_TASK_ID $run +done \ No newline at end of file diff --git a/newt/experiments/coal/coal_baseline.slrm b/newt/experiments/coal/coal_baseline.slrm new file mode 100644 index 0000000..1b4ecb5 --- /dev/null +++ b/newt/experiments/coal/coal_baseline.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 12:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=1000 +#SBATCH --array=0-5 +#SBATCH -o coal-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python coal.py $SLURM_ARRAY_TASK_ID $run 1 +done \ No newline at end of file diff --git a/newt/experiments/coal/cvind.csv b/newt/experiments/coal/cvind.csv new file mode 100644 index 0000000..fe9460d --- /dev/null +++ b/newt/experiments/coal/cvind.csv @@ -0,0 +1,333 @@ + 1.2200000e+02 + 9.8000000e+01 + 1.7300000e+02 + 3.3200000e+02 + 2.9300000e+02 + 2.4300000e+02 + 2.8000000e+02 + 3.1000000e+01 + 3.9000000e+01 + 2.1000000e+01 + 2.6000000e+02 + 3.0200000e+02 + 1.7400000e+02 + 3.3000000e+01 + 1.5100000e+02 + 9.1000000e+01 + 3.0000000e+02 + 1.6900000e+02 + 3.2400000e+02 + 3.0300000e+02 + 9.0000000e+01 + 1.4500000e+02 + 1.1800000e+02 + 1.8900000e+02 + 3.1800000e+02 + 1.1100000e+02 + 1.2600000e+02 + 2.4000000e+02 + 1.6400000e+02 + 3.0500000e+02 + 3.4000000e+01 + 5.0000000e+00 + 2.6400000e+02 + 1.2000000e+02 + 2.7000000e+02 + 2.2700000e+02 + 1.5900000e+02 + 2.3500000e+02 + 5.4000000e+01 + 1.4700000e+02 + 2.0000000e+00 + 9.5000000e+01 + 1.6500000e+02 + 3.1900000e+02 + 1.3500000e+02 + 2.6800000e+02 + 6.7000000e+01 + 1.5000000e+01 + 1.3900000e+02 + 1.3400000e+02 + 6.8000000e+01 + 3.0900000e+02 + 1.1400000e+02 + 1.0000000e+01 + 1.0000000e+02 + 5.3000000e+01 + 1.0400000e+02 + 2.9500000e+02 + 1.7500000e+02 + 2.1700000e+02 + 2.9000000e+01 + 3.2000000e+02 + 1.3200000e+02 + 2.9000000e+02 + 1.8500000e+02 + 1.4800000e+02 + 2.2300000e+02 + 4.4000000e+01 + 1.8300000e+02 + 2.1500000e+02 + 7.6000000e+01 + 2.7900000e+02 + 2.1000000e+02 + 2.3400000e+02 + 5.9000000e+01 + 2.1600000e+02 + 2.1800000e+02 + 1.1200000e+02 + 2.1300000e+02 + 2.5300000e+02 + 1.7000000e+02 + 2.4800000e+02 + 1.4600000e+02 + 2.9100000e+02 + 1.4900000e+02 + 1.6200000e+02 + 7.3000000e+01 + 7.7000000e+01 + 7.1000000e+01 + 6.1000000e+01 + 6.9000000e+01 + 2.2800000e+02 + 1.2800000e+02 + 2.6500000e+02 + 2.3100000e+02 + 2.4100000e+02 + 1.0600000e+02 + 1.3300000e+02 + 5.0000000e+01 + 3.2000000e+01 + 6.0000000e+00 + 8.5000000e+01 + 3.3000000e+02 + 1.8100000e+02 + 2.3600000e+02 + 2.1100000e+02 + 1.9500000e+02 + 2.2000000e+02 + 1.0200000e+02 + 3.7000000e+01 + 2.3700000e+02 + 2.7700000e+02 + 2.6600000e+02 + 9.9000000e+01 + 1.5600000e+02 + 5.7000000e+01 + 7.5000000e+01 + 2.0400000e+02 + 1.4200000e+02 + 8.0000000e+01 + 1.7100000e+02 + 2.5800000e+02 + 1.8600000e+02 + 1.5800000e+02 + 2.0100000e+02 + 8.8000000e+01 + 4.1000000e+01 + 1.6100000e+02 + 3.2100000e+02 + 2.7000000e+01 + 2.5700000e+02 + 3.2500000e+02 + 1.2700000e+02 + 1.4400000e+02 + 1.6300000e+02 + 2.2900000e+02 + 3.2700000e+02 + 1.5000000e+02 + 1.6000000e+01 + 2.3800000e+02 + 2.2200000e+02 + 1.3000000e+02 + 3.3100000e+02 + 3.1600000e+02 + 2.1900000e+02 + 1.9300000e+02 + 4.0000000e+01 + 2.2600000e+02 + 1.1900000e+02 + 4.6000000e+01 + 1.9400000e+02 + 1.1000000e+02 + 1.7900000e+02 + 3.1500000e+02 + 2.4900000e+02 + 9.7000000e+01 + 2.1200000e+02 + 2.9900000e+02 + 7.9000000e+01 + 2.8300000e+02 + 1.3000000e+01 + 1.9200000e+02 + 2.4600000e+02 + 2.5400000e+02 + 1.5500000e+02 + 4.5000000e+01 + 2.9400000e+02 + 1.5400000e+02 + 2.7200000e+02 + 5.5000000e+01 + 2.8200000e+02 + 2.9800000e+02 + 6.2000000e+01 + 2.3900000e+02 + 1.9600000e+02 + 1.9700000e+02 + 1.4300000e+02 + 3.1100000e+02 + 2.5200000e+02 + 3.0400000e+02 + 3.2600000e+02 + 1.0300000e+02 + 9.2000000e+01 + 2.0300000e+02 + 1.1600000e+02 + 2.5100000e+02 + 7.0000000e+00 + 1.8000000e+02 + 6.6000000e+01 + 8.3000000e+01 + 1.3800000e+02 + 2.0700000e+02 + 8.9000000e+01 + 9.6000000e+01 + 1.6800000e+02 + 2.8900000e+02 + 2.4700000e+02 + 1.3700000e+02 + 8.2000000e+01 + 5.8000000e+01 + 2.0900000e+02 + 2.3000000e+02 + 1.0500000e+02 + 2.3200000e+02 + 2.8500000e+02 + 7.8000000e+01 + 2.8600000e+02 + 1.4100000e+02 + 2.0800000e+02 + 2.5500000e+02 + 1.8700000e+02 + 3.2900000e+02 + 4.0000000e+00 + 2.0000000e+02 + 4.7000000e+01 + 1.7800000e+02 + 3.1300000e+02 + 1.7600000e+02 + 2.7100000e+02 + 1.0700000e+02 + 5.2000000e+01 + 2.8000000e+01 + 2.0000000e+01 + 3.2800000e+02 + 3.1000000e+02 + 2.4000000e+01 + 2.5600000e+02 + 5.1000000e+01 + 2.6700000e+02 + 3.0100000e+02 + 1.8400000e+02 + 1.0800000e+02 + 3.6000000e+01 + 2.7800000e+02 + 6.3000000e+01 + 3.0000000e+01 + 4.8000000e+01 + 2.3300000e+02 + 2.9700000e+02 + 2.7400000e+02 + 2.6900000e+02 + 3.0800000e+02 + 2.4500000e+02 + 1.7700000e+02 + 2.6000000e+01 + 2.8100000e+02 + 1.8200000e+02 + 1.0900000e+02 + 6.0000000e+01 + 8.7000000e+01 + 4.9000000e+01 + 8.6000000e+01 + 2.5000000e+01 + 4.2000000e+01 + 1.2300000e+02 + 1.9100000e+02 + 2.7300000e+02 + 9.3000000e+01 + 1.8800000e+02 + 1.6000000e+02 + 1.8000000e+01 + 1.0100000e+02 + 1.9900000e+02 + 4.3000000e+01 + 2.6300000e+02 + 1.2900000e+02 + 1.4000000e+01 + 3.1400000e+02 + 2.4200000e+02 + 3.2300000e+02 + 2.8800000e+02 + 2.0200000e+02 + 7.2000000e+01 + 0.0000000e+00 + 1.2400000e+02 + 3.0700000e+02 + 1.9800000e+02 + 3.0600000e+02 + 1.7200000e+02 + 3.5000000e+01 + 3.1700000e+02 + 1.1500000e+02 + 8.1000000e+01 + 3.2200000e+02 + 7.0000000e+01 + 2.1400000e+02 + 2.2000000e+01 + 1.4000000e+02 + 2.8700000e+02 + 1.2500000e+02 + 1.3600000e+02 + 2.0600000e+02 + 2.6100000e+02 + 2.9200000e+02 + 6.4000000e+01 + 2.7600000e+02 + 1.5700000e+02 + 1.5200000e+02 + 2.7500000e+02 + 2.8400000e+02 + 2.2400000e+02 + 1.0000000e+00 + 1.3100000e+02 + 2.6200000e+02 + 1.1300000e+02 + 3.0000000e+00 + 1.7000000e+01 + 8.4000000e+01 + 2.2100000e+02 + 2.4400000e+02 + 7.4000000e+01 + 1.9000000e+02 + 2.3000000e+01 + 9.4000000e+01 + 2.0500000e+02 + 1.6600000e+02 + 1.5300000e+02 + 3.8000000e+01 + 1.6700000e+02 + 1.2000000e+01 + 8.0000000e+00 + 6.5000000e+01 + 1.9000000e+01 + 5.6000000e+01 + 1.2100000e+02 + 2.5000000e+02 + 9.0000000e+00 + 1.1000000e+01 + 3.1200000e+02 + 2.9600000e+02 + 2.2500000e+02 + 2.5900000e+02 + 1.1700000e+02 diff --git a/newt/experiments/coal/results.py b/newt/experiments/coal/results.py new file mode 100644 index 0000000..6ecc9e1 --- /dev/null +++ b/newt/experiments/coal/results.py @@ -0,0 +1,26 @@ +import pickle +import numpy as np + +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) + + +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +print('baselines:') +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) diff --git a/newt/experiments/electricity/electricity.py b/newt/experiments/electricity/electricity.py new file mode 100644 index 0000000..462500c --- /dev/null +++ b/newt/experiments/electricity/electricity.py @@ -0,0 +1,147 @@ +import sys +import newt +import objax +import numpy as np +import pandas as pd +import matplotlib.pyplot as plt +import time +import pickle + +plot_intermediate = False + +print('loading data ...') +np.random.seed(99) +# N = 52 * 10080 # 10080 = one week, 2049280 total points +N = 26 * 10080 # 6 months +electricity_data = pd.read_csv('./electricity.csv', sep=' ', header=None, engine='python').values[:N, :] +x = electricity_data[:, 0][:, None] +y = electricity_data[:, 1][:, None] +print('N =', N) + +ind_shuffled = np.random.permutation(N) +ind_split = np.stack(np.split(ind_shuffled, 10)) # 10 random batches of data indices + +if len(sys.argv) > 1: + plot_final = False + method = int(sys.argv[1]) + fold = int(sys.argv[2]) +else: + plot_final = True + method = 4 + fold = 0 + +if len(sys.argv) > 3: + baseline = int(sys.argv[3]) +else: + baseline = 0 + +print('method number', method) +print('batch number', fold) + +# Get training and test indices +ind_test = ind_split[fold] # np.sort(ind_shuffled[:N//10]) +ind_train = np.concatenate(ind_split[np.arange(10) != fold]) + +x_train = x[ind_train] # 90/10 train/test split +x_test = x[ind_test] +y_train = y[ind_train] +y_test = y[ind_test] +# batch_size = 100000 # x_train.shape[0] +batch_size = 50000 # x_train.shape[0] +# M = 100000 +M = 50000 +z = np.linspace(x[0], x[-1], M) + +var_y = .1 +var_f = 1. # GP variance +len_f = 1. # GP lengthscale +period = 1. # period of quasi-periodic component +len_p = 5. # lengthscale of quasi-periodic component +var_f_mat = 1. +len_f_mat = 1. + +kern1 = newt.kernels.Matern32(variance=var_f_mat, lengthscale=len_f_mat) +kern2 = newt.kernels.QuasiPeriodicMatern12(variance=var_f, + lengthscale_periodic=len_p, + period=period, + lengthscale_matern=len_f) +kern = newt.kernels.Sum([kern1, kern2]) + +lik = newt.likelihoods.Gaussian(variance=var_y) + +if method == 0: + inf = newt.inference.Taylor() +elif method == 1: + inf = newt.inference.PosteriorLinearisation() +elif method == 2: + inf = newt.inference.ExpectationPropagation(power=1) +elif method == 3: + inf = newt.inference.ExpectationPropagation(power=0.5) +elif method == 4: + inf = newt.inference.ExpectationPropagation(power=0.01) +elif method == 5: + inf = newt.inference.VariationalInference() + +if baseline: + model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train) +else: + model = newt.models.SparseMarkovGP(kernel=kern, likelihood=lik, X=x_train, Y=y_train, Z=z) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.1 +lr_newton = 0.1 +iters = 200 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +# calculate posterior predictive distribution via filtering and smoothing at train & test locations: +print('calculating the posterior predictive distribution ...') +t0 = time.time() +nlpd = model.negative_log_predictive_density(X=x_test, Y=y_test) +t1 = time.time() +print('NLPD: %1.2f' % nlpd) + +if baseline: + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) +else: + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) + + +# if plot_final: +# lb = posterior_mean[:, 0, 0] - 1.96 * posterior_cov[:, 0, 0]**0.5 +# ub = posterior_mean[:, 0, 0] + 1.96 * posterior_cov[:, 0, 0]**0.5 +# t_test = model.t_all[model.test_id, 0] +# +# print('plotting ...') +# plt.figure(1, figsize=(12, 5)) +# plt.clf() +# plt.plot(x, y, 'b.', label='training observations', markersize=4) +# plt.plot(x_test, y_test, 'r.', alpha=0.5, label='test observations', markersize=4) +# plt.plot(t_test, posterior_mean[:, 0], 'g', label='posterior mean') +# plt.plot(z, inducing_mean[:, 0], 'g.', label='inducing mean') +# plt.fill_between(t_test, lb, ub, color='g', alpha=0.05, label='95% confidence') +# plt.xlim(t_test[0], t_test[-1]) +# plt.legend() +# plt.title('GP regression via Kalman smoothing. Test NLPD: %1.2f' % nlpd) +# plt.xlabel('time, $t$') +# plt.show() diff --git a/newt/experiments/electricity/electricity.slrm b/newt/experiments/electricity/electricity.slrm new file mode 100644 index 0000000..6f613e2 --- /dev/null +++ b/newt/experiments/electricity/electricity.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 24:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=50000 +#SBATCH --array=0-5 +#SBATCH -o electricity-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python electricity.py $SLURM_ARRAY_TASK_ID $run +done \ No newline at end of file diff --git a/newt/experiments/electricity/electricity_baselines.slrm b/newt/experiments/electricity/electricity_baselines.slrm new file mode 100644 index 0000000..f18bf1e --- /dev/null +++ b/newt/experiments/electricity/electricity_baselines.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 24:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=50000 +#SBATCH --array=0-5 +#SBATCH -o electricity-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python electricity.py $SLURM_ARRAY_TASK_ID $run 1 +done \ No newline at end of file diff --git a/newt/experiments/electricity/results.py b/newt/experiments/electricity/results.py new file mode 100644 index 0000000..caca00b --- /dev/null +++ b/newt/experiments/electricity/results.py @@ -0,0 +1,25 @@ +import pickle +import numpy as np + +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) + +print('baselines:') +method_nlpd = np.zeros([6, 10]) +for method in range(6): + for fold in range(10): + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + method_nlpd[method, fold] = pickle.load(fp) + +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) diff --git a/newt/experiments/motorcycle/cvind.csv b/newt/experiments/motorcycle/cvind.csv new file mode 100644 index 0000000..61802ad --- /dev/null +++ b/newt/experiments/motorcycle/cvind.csv @@ -0,0 +1,133 @@ + 1.2200000e+02 + 9.8000000e+01 + 3.1000000e+01 + 3.9000000e+01 + 2.1000000e+01 + 3.3000000e+01 + 9.1000000e+01 + 9.0000000e+01 + 1.1800000e+02 + 1.1100000e+02 + 1.2600000e+02 + 3.4000000e+01 + 5.0000000e+00 + 1.2000000e+02 + 5.4000000e+01 + 2.0000000e+00 + 9.5000000e+01 + 6.7000000e+01 + 1.5000000e+01 + 6.8000000e+01 + 1.1400000e+02 + 1.0000000e+01 + 1.0000000e+02 + 5.3000000e+01 + 1.0400000e+02 + 2.9000000e+01 + 1.3200000e+02 + 4.4000000e+01 + 7.6000000e+01 + 5.9000000e+01 + 1.1200000e+02 + 7.3000000e+01 + 7.7000000e+01 + 7.1000000e+01 + 6.1000000e+01 + 6.9000000e+01 + 1.2800000e+02 + 1.0600000e+02 + 5.0000000e+01 + 3.2000000e+01 + 6.0000000e+00 + 8.5000000e+01 + 1.0200000e+02 + 3.7000000e+01 + 9.9000000e+01 + 5.7000000e+01 + 7.5000000e+01 + 8.0000000e+01 + 8.8000000e+01 + 4.1000000e+01 + 2.7000000e+01 + 1.2700000e+02 + 1.6000000e+01 + 1.3000000e+02 + 4.0000000e+01 + 1.1900000e+02 + 4.6000000e+01 + 1.1000000e+02 + 9.7000000e+01 + 7.9000000e+01 + 1.3000000e+01 + 4.5000000e+01 + 5.5000000e+01 + 6.2000000e+01 + 1.0300000e+02 + 9.2000000e+01 + 1.1600000e+02 + 7.0000000e+00 + 6.6000000e+01 + 8.3000000e+01 + 8.9000000e+01 + 9.6000000e+01 + 8.2000000e+01 + 5.8000000e+01 + 1.0500000e+02 + 7.8000000e+01 + 4.0000000e+00 + 4.7000000e+01 + 1.0700000e+02 + 5.2000000e+01 + 2.8000000e+01 + 2.0000000e+01 + 2.4000000e+01 + 5.1000000e+01 + 1.0800000e+02 + 3.6000000e+01 + 6.3000000e+01 + 3.0000000e+01 + 4.8000000e+01 + 2.6000000e+01 + 1.0900000e+02 + 6.0000000e+01 + 8.7000000e+01 + 4.9000000e+01 + 8.6000000e+01 + 2.5000000e+01 + 4.2000000e+01 + 1.2300000e+02 + 9.3000000e+01 + 1.8000000e+01 + 1.0100000e+02 + 4.3000000e+01 + 1.2900000e+02 + 1.4000000e+01 + 7.2000000e+01 + 0.0000000e+00 + 1.2400000e+02 + 3.5000000e+01 + 1.1500000e+02 + 8.1000000e+01 + 7.0000000e+01 + 2.2000000e+01 + 1.2500000e+02 + 6.4000000e+01 + 1.0000000e+00 + 1.3100000e+02 + 1.1300000e+02 + 3.0000000e+00 + 1.7000000e+01 + 8.4000000e+01 + 7.4000000e+01 + 2.3000000e+01 + 9.4000000e+01 + 3.8000000e+01 + 1.2000000e+01 + 8.0000000e+00 + 6.5000000e+01 + 1.9000000e+01 + 5.6000000e+01 + 1.2100000e+02 + 9.0000000e+00 + 1.1000000e+01 + 1.1700000e+02 diff --git a/newt/experiments/motorcycle/motorcycle.py b/newt/experiments/motorcycle/motorcycle.py new file mode 100644 index 0000000..ee15f87 --- /dev/null +++ b/newt/experiments/motorcycle/motorcycle.py @@ -0,0 +1,170 @@ +import newt +import objax +import numpy as np +import matplotlib.pyplot as plt +import time +import pickle +from sklearn.preprocessing import StandardScaler +import sys + +plot_intermediate = False + +print('loading data ...') +D = np.loadtxt('../../data/mcycle.csv', delimiter=',') +X = D[:, 1:2] +Y = D[:, 2:] +N_batch = 100 +M = 30 + +# Standardize +X_scaler = StandardScaler().fit(X) +y_scaler = StandardScaler().fit(Y) +Xall = X_scaler.transform(X) +Yall = y_scaler.transform(Y) +Z = np.linspace(np.min(Xall), np.max(Xall), M) +x_plot = np.linspace(np.min(Xall)-0.2, np.max(Xall)+0.2, 200) + +# Load cross-validation indices +cvind = np.loadtxt('cvind.csv').astype(int) + +# 10-fold cross-validation setup +nt = np.floor(cvind.shape[0]/10).astype(int) +cvind = np.reshape(cvind[:10*nt], (10, nt)) + +np.random.seed(123) + +if len(sys.argv) > 1: + method = int(sys.argv[1]) + fold = int(sys.argv[2]) + plot_final = False +else: + method = 4 + fold = 8 + plot_final = True + +if len(sys.argv) > 3: + baseline = int(sys.argv[3]) +else: + baseline = 0 + +print('method number', method) +print('batch number', fold) + +# Get training and test indices +test = cvind[fold, :] +train = np.setdiff1d(cvind, test) + +# Set training and test data +X = Xall[train, :] +Y = Yall[train, :] +XT = Xall[test, :] +YT = Yall[test, :] +N = X.shape[0] + +var_f1 = 3. # GP variance +len_f1 = 1. # GP lengthscale +var_f2 = 3. # GP variance +len_f2 = 1. # GP lengthscale + +kern1 = newt.kernels.Matern32(variance=var_f1, lengthscale=len_f1) +kern2 = newt.kernels.Matern32(variance=var_f2, lengthscale=len_f2) +kern = newt.kernels.Independent([kern1, kern2]) +lik = newt.likelihoods.HeteroscedasticNoise() + +lr_adam = 0.025 +# lr_adam = 0.01 +lr_newton = .05 +# lr_newton = 0.01 +iters = 500 + +if method == 0: + inf = newt.inference.Taylor() +elif method == 1: + inf = newt.inference.PosteriorLinearisation(cubature=newt.cubature.Unscented()) +elif method == 2: + inf = newt.inference.PosteriorLinearisation() +elif method == 3: + inf = newt.inference.ExpectationPropagation(power=1) +elif method == 4: + inf = newt.inference.ExpectationPropagation(power=0.5) +elif method == 5: + inf = newt.inference.ExpectationPropagation(power=0.01) +elif method == 6: + inf = newt.inference.VariationalInference() + +if baseline: + model = newt.models.MarkovGP(X=X, Y=Y, kernel=kern, likelihood=lik) +else: + model = newt.models.SparseMarkovGP(X=X, Y=Y, Z=Z, kernel=kern, likelihood=lik) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + + +print('optimising the hyperparameters ...') +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +# calculate posterior predictive distribution via filtering and smoothing at train & test locations: +print('calculating the posterior predictive distribution ...') +t0 = time.time() +posterior_mean, posterior_var = model.predict(X=x_plot) +nlpd = model.negative_log_predictive_density(X=XT, Y=YT) +t1 = time.time() +print('prediction time: %2.2f secs' % (t1-t0)) +print('NLPD: %1.2f' % nlpd) + +if baseline: + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) +else: + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) + +# with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: +# nlpd_show = pickle.load(fp) +# print(nlpd_show) + +if plot_final: + x_pred = X_scaler.inverse_transform(x_plot) + link = model.likelihood.link_fn + lb = posterior_mean[:, 0] - np.sqrt(posterior_var[:, 0] + link(posterior_mean[:, 1]) ** 2) * 1.96 + ub = posterior_mean[:, 0] + np.sqrt(posterior_var[:, 0] + link(posterior_mean[:, 1]) ** 2) * 1.96 + post_mean = y_scaler.inverse_transform(posterior_mean[:, 0]) + lb = y_scaler.inverse_transform(lb) + ub = y_scaler.inverse_transform(ub) + + print('plotting ...') + plt.figure(1, figsize=(12, 5)) + plt.clf() + plt.plot(X_scaler.inverse_transform(X), y_scaler.inverse_transform(Y), 'k.', label='train') + plt.plot(X_scaler.inverse_transform(XT), y_scaler.inverse_transform(YT), 'r.', label='test') + plt.plot(x_pred, post_mean, 'c', label='posterior mean') + plt.fill_between(x_pred, lb, ub, color='c', alpha=0.05, label='95% confidence') + plt.xlim(x_pred[0], x_pred[-1]) + if hasattr(model, 'Z'): + plt.plot(X_scaler.inverse_transform(model.Z.value[:, 0]), + (np.min(lb) - 5) * np.ones_like(model.Z.value[:, 0]), + 'c^', + markersize=4) + plt.legend() + plt.title('Heteroscedastic Noise Model via Kalman smoothing (motorcycle crash data)') + plt.xlabel('time (milliseconds)') + plt.ylabel('accelerometer reading') + plt.show() diff --git a/newt/experiments/motorcycle/motorcycle.slrm b/newt/experiments/motorcycle/motorcycle.slrm new file mode 100644 index 0000000..08c1696 --- /dev/null +++ b/newt/experiments/motorcycle/motorcycle.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 12:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=1000 +#SBATCH --array=0-6 +#SBATCH -o motorcycle-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python motorcycle.py $SLURM_ARRAY_TASK_ID $run +done \ No newline at end of file diff --git a/newt/experiments/motorcycle/motorcycle_baselines.slrm b/newt/experiments/motorcycle/motorcycle_baselines.slrm new file mode 100644 index 0000000..932be49 --- /dev/null +++ b/newt/experiments/motorcycle/motorcycle_baselines.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 12:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=1000 +#SBATCH --array=0-6 +#SBATCH -o motorcycle_baselines-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and run range +echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM + +# Run the loop of runs for this task. +for (( run=$START_NUM; run<=END_NUM; run++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run + srun python motorcycle.py $SLURM_ARRAY_TASK_ID $run 1 +done \ No newline at end of file diff --git a/newt/experiments/motorcycle/results.py b/newt/experiments/motorcycle/results.py new file mode 100644 index 0000000..1acf467 --- /dev/null +++ b/newt/experiments/motorcycle/results.py @@ -0,0 +1,36 @@ +import pickle +import numpy as np + +method_nlpd = np.zeros([7, 10]) +for method in range(7): + for fold in range(10): + with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + result = pickle.load(fp) + print(result) + method_nlpd[method, fold] = result + +# for fold in range(10): +# with open("output/" + str(15) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: +# print(pickle.load(fp)) + +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +# print(np.nanmean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) +# print(np.nanstd(method_nlpd, axis=1)) + + +method_nlpd = np.zeros([7, 10]) +for method in range(7): + for fold in range(10): + with open("output/baseline_" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + result = pickle.load(fp) + print(result) + method_nlpd[method, fold] = result + +print('baselines:') +np.set_printoptions(precision=3) +print(np.mean(method_nlpd, axis=1)) +np.set_printoptions(precision=2) +print(np.std(method_nlpd, axis=1)) diff --git a/newt/experiments/rainforest/rainforest.py b/newt/experiments/rainforest/rainforest.py new file mode 100644 index 0000000..d457a59 --- /dev/null +++ b/newt/experiments/rainforest/rainforest.py @@ -0,0 +1,98 @@ +import newt +import objax +import numpy as np +import time +import pickle +import sys + +print('loading rainforest data ...') +data = np.loadtxt('../../data/TRI2TU-data.csv', delimiter=',') + +spatial_points = np.array([50, 100, 150, 200, 250, 300, 350, 400, 450, 500]) + +if len(sys.argv) > 1: + model_type = int(sys.argv[1]) + nr_ind = int(sys.argv[2]) + fold = int(sys.argv[3]) +else: + model_type = 0 + nr_ind = 1 + nr = 100 # spatial grid point (y-axis) + fold = 0 + +nr = spatial_points[nr_ind] +nt = 200 # temporal grid points (x-axis) +scale = 1000 / nt + +t, r, Y_ = newt.utils.discretegrid(data, [0, 1000, 0, 500], [nt, nr]) +t_flat, r_flat, Y_flat = t.flatten(), r.flatten(), Y_.flatten() + +N = nr * nt # number of data points + +# sort out the train/test split +np.random.seed(99) +ind_shuffled = np.random.permutation(N) +ind_split = np.stack(np.split(ind_shuffled, 10)) # 10 random batches of data indices +test_ind = ind_split[fold] # test_ind = np.random.permutation(N)[:N//10] +t_test = t_flat[test_ind] +r_test = r_flat[test_ind] +Y_test = Y_flat[test_ind] +Y_flat[test_ind] = np.nan +Y = Y_flat.reshape(nt, nr) + +# put test points on a grid to speed up prediction +X_test = np.concatenate([t_test[:, None], r_test[:, None]], axis=1) +t_test, r_test, Y_test = newt.utils.create_spatiotemporal_grid(X_test, Y_test) + +var_f = 1. # GP variance +len_f = 10. # lengthscale + +kern = newt.kernels.SpatialMatern32(variance=var_f, lengthscale=len_f, z=r[0, ...], sparse=False) +lik = newt.likelihoods.Poisson() +if model_type == 0: + model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=t, R=r, Y=Y) +elif model_type == 1: + model = newt.models.MarkovGPMeanField(kernel=kern, likelihood=lik, X=t, R=r, Y=Y) +elif model_type == 2: + model = newt.models.InfiniteHorizonGP(kernel=kern, likelihood=lik, X=t, R=r, Y=Y) + +print('num spatial pts:', nr) +print(model) + +inf = newt.inference.VariationalInference(cubature=newt.cubature.Unscented()) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.2 +lr_newton = 0.2 +iters = 100 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d: energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +# calculate posterior predictive distribution via filtering and smoothing at train & test locations: +print('calculating the posterior predictive distribution ...') +t0 = time.time() +nlpd = model.negative_log_predictive_density(X=t_test, R=r_test, Y=Y_test) +t1 = time.time() +print('prediction time: %2.2f secs' % (t1-t0)) +print('nlpd: %2.3f' % nlpd) + +with open("output/" + str(model_type) + "_" + str(nr_ind) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: + pickle.dump(nlpd, fp) diff --git a/newt/experiments/rainforest/rainforest0.slrm b/newt/experiments/rainforest/rainforest0.slrm new file mode 100644 index 0000000..b1f1bde --- /dev/null +++ b/newt/experiments/rainforest/rainforest0.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 24:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=2000 +#SBATCH --array=0-9 +#SBATCH -o rainforest1-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and fold range +echo This is task $SLURM_ARRAY_TASK_ID, which will run folds $START_NUM to $END_NUM + +# Run the loop of folds for this task. +for (( fold=$START_NUM; fold<=END_NUM; fold++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, fold number $fold + srun python rainforest.py 0 $SLURM_ARRAY_TASK_ID $fold +done \ No newline at end of file diff --git a/newt/experiments/rainforest/rainforest1.slrm b/newt/experiments/rainforest/rainforest1.slrm new file mode 100644 index 0000000..500ccd6 --- /dev/null +++ b/newt/experiments/rainforest/rainforest1.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 24:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=2000 +#SBATCH --array=0-9 +#SBATCH -o rainforest1-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and fold range +echo This is task $SLURM_ARRAY_TASK_ID, which will run folds $START_NUM to $END_NUM + +# Run the loop of folds for this task. +for (( fold=$START_NUM; fold<=END_NUM; fold++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, fold number $fold + srun python rainforest.py 1 $SLURM_ARRAY_TASK_ID $fold +done \ No newline at end of file diff --git a/newt/experiments/rainforest/rainforest2.slrm b/newt/experiments/rainforest/rainforest2.slrm new file mode 100644 index 0000000..f195f52 --- /dev/null +++ b/newt/experiments/rainforest/rainforest2.slrm @@ -0,0 +1,21 @@ +#!/bin/bash -l +#SBATCH -p short +#SBATCH -t 24:00:00 +#SBATCH -n 1 +#SBATCH --mem-per-cpu=2000 +#SBATCH --array=0-9 +#SBATCH -o rainforest2-%a.out +module load miniconda +source activate venv + +START_NUM=0 +END_NUM=9 + +# Print the task and fold range +echo This is task $SLURM_ARRAY_TASK_ID, which will run folds $START_NUM to $END_NUM + +# Run the loop of folds for this task. +for (( fold=$START_NUM; fold<=END_NUM; fold++ )); do + echo This is SLURM task $SLURM_ARRAY_TASK_ID, fold number $fold + srun python rainforest.py 2 $SLURM_ARRAY_TASK_ID $fold +done \ No newline at end of file diff --git a/newt/experiments/rainforest/results.py b/newt/experiments/rainforest/results.py new file mode 100644 index 0000000..a928b84 --- /dev/null +++ b/newt/experiments/rainforest/results.py @@ -0,0 +1,77 @@ +import pickle +import numpy as np +import matplotlib.pyplot as plt +import tikzplotlib + +color_palette = { + 'black': '#000000', + 'orange': '#E69F00', + 'blue': '#56B4E9', + 'green': '#009E73', + 'orange': '#F0E442', + 'dark_blue': '#0072B2', + 'dark_orange': '#D55E00', + 'pink': '#CC79A7', + 'white': '#111111', + 'grey': 'grey' +} + +# timings = np.zeros([3, 10]) +num_complete = 7 +timings = np.zeros([3, num_complete]) +for model_type in range(3): + # for nr_ind in range(10): + for nr_ind in range(num_complete): + with open("output/cpu_" + str(model_type) + "_" + str(nr_ind) + "_time.txt", "rb") as fp: + result = pickle.load(fp) + # print(result) + timings[model_type, nr_ind] = result + +num_complete_nlpd = 7 +nlpd = np.zeros([3, num_complete_nlpd]) +for model_type in range(3): + # for nr_ind in range(10): + for nr_ind in range(num_complete_nlpd): + nlpdf = 0. + for fold in range(10): + print("output/" + str(model_type) + "_" + str(nr_ind) + "_" + str(fold) + "_nlpd.txt") + with open("output/" + str(model_type) + "_" + str(nr_ind) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: + result = pickle.load(fp) + nlpdf += result + # print(result) + nlpd[model_type, nr_ind] = nlpdf / 10 + +num_space = np.array([50, 100, 150, 200, 250, 300, 350, 400, 450, 500]) +plt.figure(1) +plt.plot(num_space[:num_complete], timings.T[:, 0], '.--', markersize=3, linewidth=2.5, color=color_palette['dark_orange']) +plt.plot(num_space[:num_complete], timings.T[:, 1], 'x--', markersize=5, linewidth=2.5, color=color_palette['dark_blue']) +plt.plot(num_space[:num_complete], timings.T[:, 2], 'x-.', markersize=5, linewidth=2.5, color=color_palette['green']) +plt.xlabel('Number of spatial points') +plt.ylabel('Training step time (secs)') +plt.ylim([-0., 82]) +ax = plt.gca() +ax.set_xticks(num_space[:num_complete]) +plt.legend(['Full', 'Spatial mean-field', 'Infinite-horizon'], loc=2) +if True: + tikzplotlib.save('//Users/wilkinw1/postdoc/inprogress/ati-fcai/paper/icml2021/fig/scalability.tex', + axis_width='\\figurewidth', + axis_height='\\figureheight', + tex_relative_path_to_data='./fig/') + +plt.figure(2) +plt.plot(num_space[:num_complete_nlpd], nlpd.T[:, 0], '.--', markersize=3, linewidth=2.5, color=color_palette['dark_orange']) +plt.plot(num_space[:num_complete_nlpd], nlpd.T[:, 1], 'x--', markersize=5, linewidth=2.5, color=color_palette['dark_blue']) +plt.plot(num_space[:num_complete_nlpd], nlpd.T[:, 2], 'x-.', markersize=6, linewidth=2.5, color=color_palette['green']) +plt.plot(num_space[:num_complete_nlpd], nlpd.T[:, 0], '.--', markersize=3, linewidth=2.5, color=color_palette['dark_orange']) +plt.xlabel('Number of spatial points') +plt.ylabel('Test NLPD') +# plt.ylim([-0., 82]) +ax = plt.gca() +ax.set_xticks(num_space[:num_complete_nlpd]) +plt.legend(['Full', 'Spatial mean-field', 'Infinite-horizon'], loc=1) +if True: + tikzplotlib.save('//Users/wilkinw1/postdoc/inprogress/ati-fcai/paper/icml2021/fig/approx_perf.tex', + axis_width='\\figurewidth', + axis_height='\\figureheight', + tex_relative_path_to_data='./fig/') +plt.show() diff --git a/newt/experiments/rainforest/timings.py b/newt/experiments/rainforest/timings.py new file mode 100644 index 0000000..e561958 --- /dev/null +++ b/newt/experiments/rainforest/timings.py @@ -0,0 +1,80 @@ +import newt +import objax +import numpy as np +import time +import pickle +import sys + +data = np.loadtxt('../../data/TRI2TU-data.csv', delimiter=',') + +spatial_points = np.array([50, 100, 150, 200, 250, 300, 350, 400, 450, 500]) + +if len(sys.argv) > 1: + model_type = int(sys.argv[1]) + nr_ind = int(sys.argv[2]) +else: + model_type = 0 + nr_ind = 1 + nr = 100 # spatial grid point (y-axis) +nr = spatial_points[nr_ind] +nt = 200 # temporal grid points (x-axis) +scale = 1000 / nt + +t, r, Y_ = newt.utils.discretegrid(data, [0, 1000, 0, 500], [nt, nr]) + +np.random.seed(99) +N = nr * nt # number of data points + +test_ind = np.random.permutation(N)[:N//10] # [:N//4] +Y = Y_.flatten() +Y[test_ind] = np.nan +Y = Y.reshape(nt, nr) + +var_f = 1. # GP variance +len_f = 10. # lengthscale + +kern = newt.kernels.SpatialMatern32(variance=var_f, lengthscale=len_f, z=r[0, ...], sparse=False) +lik = newt.likelihoods.Poisson() +# lik = newt.likelihoods.Gaussian() +# model = newt.models.GP(kernel=kern, likelihood=lik, X=x, Y=Y) +if model_type == 0: + model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=t, R=r, Y=Y) +elif model_type == 1: + model = newt.models.MarkovGPMeanField(kernel=kern, likelihood=lik, X=t, R=r, Y=Y) +elif model_type == 2: + model = newt.models.InfiniteHorizonGP(kernel=kern, likelihood=lik, X=t, R=r, Y=Y) + +print('num spatial pts:', nr) +print(model) + +inf = newt.inference.VariationalInference(cubature=newt.cubature.Unscented()) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.2 +lr_newton = 0.1 +iters = 11 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + +for i in range(1, iters + 1): + if i == 2: + t0 = time.time() + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d: energy: %1.4f' % (i, loss[0])) +t1 = time.time() +avg_time_taken = (t1-t0)/(iters - 1) +print('optimisation time: %2.2f secs' % avg_time_taken) + +with open("output/" + str(model_type) + "_" + str(nr_ind) + "_time.txt", "wb") as fp: + pickle.dump(avg_time_taken, fp) diff --git a/newt/inference.py b/newt/inference.py new file mode 100644 index 0000000..b50a678 --- /dev/null +++ b/newt/inference.py @@ -0,0 +1,438 @@ +import objax +import jax.numpy as np +from jax import vmap +from .cubature import GaussHermite +from .utils import ( + diag, + transpose, + inv, + solve, + ensure_positive_precision, + ensure_diagonal_positive_precision, + mvn_logpdf, + pep_constant +) +import math + +LOG2PI = math.log(2 * math.pi) + + +def newton_update(mean, jacobian, hessian): + """ + Applies one step of Newton's method to update the pseudo_likelihood parameters + """ + + # deal with missing data + hessian = np.where(np.isnan(hessian), -1e-6, hessian) + jacobian = np.where(np.isnan(jacobian), hessian @ mean, jacobian) + + # Newton update + pseudo_likelihood_nat1 = ( + jacobian - hessian @ mean + ) + pseudo_likelihood_nat2 = ( + -hessian + ) + + return pseudo_likelihood_nat1, pseudo_likelihood_nat2 + + +class Inference(objax.Module): + """ + The approximate inference class. + Each approximate inference scheme implements an 'update' method which is called during + inference in order to update the local likelihood approximation (the sites). + """ + def __init__(self, + cubature=GaussHermite()): + self.cubature = cubature + + def __call__(self, model, lr=1., batch_ind=None): + + if (batch_ind is None) or (batch_ind.shape[0] == model.num_data): + batch_ind = None + + model.update_posterior() # make sure the posterior is up to date + + # use the chosen inference method (VI, EP, ...) to compute the necessary terms for the parameter update + mean, jacobian, hessian = self.update(model, batch_ind, lr) + # ---- Newton update ---- + nat1_n, nat2_n = newton_update(mean, jacobian, hessian) # parallel operation + # ----------------------- + nat1, nat2 = model.group_natural_params(nat1_n, nat2_n, batch_ind) # sequential / batch operation + + # apply the parameter update and return the energy + # ---- update the model parameters ---- + model.pseudo_likelihood_nat1.value = ( + (1 - lr) * model.pseudo_likelihood_nat1.value + + lr * nat1 + ) + model.pseudo_likelihood_nat2.value = ( + (1 - lr) * model.pseudo_likelihood_nat2.value + + lr * nat2 + ) + model.set_pseudo_likelihood() # update mean and covariance + + model.update_posterior() # recompute posterior with new params + + def update(self, model, batch_ind=None, lr=1.): + raise NotImplementedError + + def energy(self, model, batch_ind=None): + return model.filter_energy() + + +class Laplace(Inference): + """ + """ + def __init__(self): + super().__init__() + self.name = 'Laplace / Newton\'s Algorithm (NA)' + + def update(self, model, batch_ind=None, lr=1.): + """ + """ + if batch_ind is None: + batch_ind = np.arange(model.num_data) + + mean_f, _ = model.conditional_posterior_to_data(batch_ind) + + # Laplace approximates the expected density with a point estimate at the posterior mean: log p(y|f=m) + log_lik, jacobian, hessian = vmap(model.likelihood.log_likelihood_gradients)( # parallel + model.Y[batch_ind], + mean_f + ) + + hessian = -ensure_positive_precision(-hessian) # manual fix to avoid non-PSD precision + + jacobian, hessian = model.conditional_data_to_posterior(jacobian[..., None], hessian) + + if mean_f.shape[1] == jacobian.shape[1]: + return mean_f, jacobian, hessian + else: + ind = model.ind[batch_ind] + return model.posterior_mean.value[ind], jacobian, hessian # sparse Markov case + + def energy(self, model, batch_ind=None): + """ + """ + if batch_ind is None: + batch_ind = np.arange(model.num_data) + scale = 1 + else: + scale = model.num_data / batch_ind.shape[0] + + mean_f, _ = model.conditional_posterior_to_data(batch_ind) + + # Laplace approximates the expected density with a point estimate at the posterior mean: log p(y|f=m) + log_lik, _, _ = vmap(model.likelihood.log_likelihood_gradients)( # parallel + model.Y[batch_ind], + mean_f + ) + + KL = model.compute_kl() # KL[q(f)|p(f)] + laplace_energy = -( # Laplace approximation to the negative log marginal likelihood + scale * np.nansum(log_lik) # nansum accounts for missing data + - KL + ) + + return laplace_energy + + +class VariationalInference(Inference): + """ + Natural gradient VI (using the conjugate-computation VI approach) + Refs: + Khan & Lin 2017 "Conugate-computation variational inference - converting inference + in non-conjugate models in to inference in conjugate models" + Chang, Wilkinson, Khan & Solin 2020 "Fast variational learning in state space Gaussian process models" + """ + def __init__(self, + cubature=GaussHermite()): + super().__init__(cubature=cubature) + self.name = 'Variational Inference (VI)' + + def update(self, model, batch_ind=None, lr=1.): + """ + """ + if batch_ind is None: + batch_ind = np.arange(model.num_data) + + mean_f, cov_f = model.conditional_posterior_to_data(batch_ind) + + # VI expected density is E_q[log p(y|f)] + expected_density, dE_dm, d2E_dm2 = vmap(model.likelihood.variational_expectation, (0, 0, 0, None))( + model.Y[batch_ind], + mean_f, + cov_f, + self.cubature + ) + + d2E_dm2 = -ensure_diagonal_positive_precision(-d2E_dm2) # manual fix to avoid non-PSD precision + + jacobian, hessian = model.conditional_data_to_posterior(dE_dm, d2E_dm2) + + if mean_f.shape[1] == jacobian.shape[1]: + return mean_f, jacobian, hessian + else: + ind = model.ind[batch_ind] + return model.posterior_mean.value[ind], jacobian, hessian # sparse Markov case + + def energy(self, model, batch_ind=None): + """ + """ + if batch_ind is None: + batch_ind = np.arange(model.num_data) + scale = 1 + else: + scale = model.num_data / batch_ind.shape[0] + + mean_f, cov_f = model.conditional_posterior_to_data(batch_ind) + + # VI expected density is E_q[log p(y|f)] + expected_density, _, _ = vmap(model.likelihood.variational_expectation, (0, 0, 0, None))( + model.Y[batch_ind], + mean_f, + cov_f, + self.cubature + ) + + KL = model.compute_kl() # KL[q(f)|p(f)] + variational_free_energy = -( # the variational free energy, i.e., the negative ELBO + scale * np.nansum(expected_density) # nansum accounts for missing data + - KL + ) + + return variational_free_energy + + +class ExpectationPropagation(Inference): + """ + Expectation propagation (EP) + """ + def __init__(self, + power=1.0, + cubature=GaussHermite()): + self.power = power + super().__init__(cubature=cubature) + self.name = 'Expectation Propagation (EP)' + + def update(self, model, batch_ind=None, lr=1.): + """ + TODO: will not currently work with SparseGP because cavity_cov is a vector (SparseGP and SparseMarkovGP use different parameterisations) + """ + if batch_ind is None: + batch_ind = np.arange(model.num_data) + + # compute the cavity distribution + cavity_mean, cavity_cov = model.cavity_distribution(batch_ind, self.power) + cav_mean_f, cav_cov_f = model.conditional_posterior_to_data(batch_ind, cavity_mean, cavity_cov) + + # calculate log marginal likelihood and the new sites via moment matching: + # EP expected density is log E_q[p(y|f)] + lZ, dlZ, d2lZ = vmap(model.likelihood.moment_match, (0, 0, 0, None, None))( + model.Y[batch_ind], + cav_mean_f, + cav_cov_f, + self.power, + self.cubature + ) + + scale_factor = vmap(inv)(cav_cov_f @ d2lZ + np.eye(d2lZ.shape[1])) / self.power + dlZ = scale_factor @ dlZ + d2lZ = scale_factor @ d2lZ + if model.mask is not None: + # apply mask + mask = model.mask[batch_ind][..., None] + dlZ = np.where(mask, np.nan, dlZ) + d2lZ_masked = np.where(mask + transpose(mask), 0., d2lZ) # ensure masked entries are independent + d2lZ = np.where(diag(mask)[..., None], np.nan, d2lZ_masked) # ensure masked entries return log like of 0 + + d2lZ = -ensure_diagonal_positive_precision(-d2lZ) # manual fix to avoid non-PSD precision + + jacobian, hessian = model.conditional_data_to_posterior(dlZ, d2lZ) + + if cav_mean_f.shape[1] == jacobian.shape[1]: + return cav_mean_f, jacobian, hessian + else: + ind = model.ind[batch_ind] + return cavity_mean[ind], jacobian, hessian # sparse Markov case + + def energy(self, model, batch_ind=None): + """ + TODO: implement for SparseGP + """ + if batch_ind is None: + batch_ind = np.arange(model.num_data) + scale = 1 + else: + scale = model.num_data / batch_ind.shape[0] + + # compute the cavity distribution + cavity_mean, cavity_cov = model.cavity_distribution(None, self.power) + cav_mean_f, cav_cov_f = model.conditional_posterior_to_data(None, cavity_mean, cavity_cov) + + # calculate log marginal likelihood and the new sites via moment matching: + # EP expected density is log E_q[p(y|f)] + lZ, _, _ = vmap(model.likelihood.moment_match, (0, 0, 0, None, None))( + model.Y[batch_ind], + cav_mean_f[batch_ind], + cav_cov_f[batch_ind], + self.power, + self.cubature + ) + + mask = model.mask # [batch_ind] + if model.mask is not None: + if np.squeeze(mask[batch_ind]).ndim != np.squeeze(lZ).ndim: + raise NotImplementedError('masking in spatio-temporal models not implemented for EP') + lZ = np.where(np.squeeze(mask[batch_ind]), 0., np.squeeze(lZ)) # apply mask + if mask.shape[1] != cavity_cov.shape[1]: + mask = np.tile(mask, [1, cavity_cov.shape[1]]) + + pseudo_y, pseudo_var = model.compute_full_pseudo_lik() + lZ_pseudo = vmap(mvn_logpdf)( + pseudo_y, + cavity_mean, + pseudo_var / self.power + cavity_cov, + mask + ) + constant = vmap(pep_constant, [0, None, 0])(pseudo_var, self.power, mask) # PEP constant + lZ_pseudo += constant + + lZ_post = model.compute_log_lik(pseudo_y, pseudo_var) + + ep_energy = -( + lZ_post + + 1 / self.power * (scale * np.nansum(lZ) - np.nansum(lZ_pseudo)) + ) + + return ep_energy + + +class PosteriorLinearisation(Inference): + """ + An iterated smoothing algorithm based on statistical linear regression (SLR). + This method linearises the likelihood model in the region described by the posterior. + """ + def __init__(self, + cubature=GaussHermite(), + energy_function=None): + super().__init__(cubature=cubature) + if energy_function is None: + self.energy_function = ExpectationPropagation(power=1, cubature=cubature).energy + # self.energy_function = VariationalInference(cubature=cubature).energy + else: + self.energy_function = energy_function + self.name = 'Posterior Linearisation (PL)' + + def update(self, model, batch_ind=None, lr=1.): + """ + """ + if batch_ind is None: + batch_ind = np.arange(model.num_data) + + mean_f, cov_f = model.conditional_posterior_to_data(batch_ind) + + # PL expected density is mu=E_q[E(y|f)] + mu, d_mu, omega = vmap(model.likelihood.statistical_linear_regression, (0, 0, None))( + mean_f, + cov_f, + self.cubature + ) + residual = model.Y[batch_ind].reshape(mu.shape) - mu + mask = np.isnan(residual) + residual = np.where(mask, 0., residual) + + dmu_omega = transpose(vmap(solve)(omega, d_mu)) # d_mu^T @ inv(omega) + jacobian = dmu_omega @ residual + hessian = -dmu_omega @ d_mu + + hessian = -ensure_diagonal_positive_precision(-hessian) # manual fix to avoid non-PSD precision + + # deal with missing data + jacobian = np.where(mask, np.nan, jacobian) + hessian = np.where(diag(np.squeeze(mask, axis=-1)), np.nan, hessian) + + jacobian, hessian = model.conditional_data_to_posterior(jacobian, hessian) + + if mean_f.shape[1] == jacobian.shape[1]: + return mean_f, jacobian, hessian + else: + ind = model.ind[batch_ind] + return model.posterior_mean.value[ind], jacobian, hessian # sparse Markov case + + def energy(self, model, batch_ind=None): + """ + The PL energy given in [1] is a poor approximation to the EP energy (although the gradients are ok, since + the part they discard does not depends on the hyperparameters). Therefore, we can choose to use either + the variational free energy or EP energy here. + TODO: develop a PL energy approximation that reuses the linearisation quantities and matches GHS / UKS etc. + [1] Garcia-Fernandez, Tronarp, Sรคrkkรค (2018) 'Gaussian process classification using posterior linearisation' + """ + return self.energy_function(model, batch_ind) + + +class Taylor(Inference): + """ + Inference using analytical linearisation, i.e. a first order Taylor expansion. This is equivalent to + the Extended Kalman Smoother when using a Markov GP. + """ + def __init__(self, + cubature=GaussHermite(), # cubature is only used in the energy calc. TODO: remove need for this + energy_function=None): + super().__init__(cubature=cubature) + self.name = 'Taylor / Extended Kalman Smoother (EKS)' + if energy_function is None: + self.energy_function = ExpectationPropagation(power=1, cubature=cubature).energy + # self.energy_function = VariationalInference(cubature=cubature).energy + else: + self.energy_function = energy_function + + def update(self, model, batch_ind=None, lr=1.): + """ + """ + if batch_ind is None: + batch_ind = np.arange(model.num_data) + Y = model.Y[batch_ind] + + mean_f, cov_f = model.conditional_posterior_to_data(batch_ind) + + # calculate the Jacobian of the observation model w.r.t. function fโ‚™ and noise term rโ‚™ + Jf, Jsigma = vmap(model.likelihood.analytical_linearisation)(mean_f, np.zeros_like(Y)) # evaluate at mean + + obs_cov = np.eye(Y.shape[1]) # observation noise scale is w.l.o.g. 1 + sigma = Jsigma @ obs_cov @ transpose(Jsigma) + likelihood_expectation, _ = vmap(model.likelihood.conditional_moments)(mean_f) + residual = Y.reshape(likelihood_expectation.shape) - likelihood_expectation # residual, yโ‚™-E[yโ‚™|fโ‚™] + + mask = np.isnan(residual) + residual = np.where(mask, 0., residual) + + Jf_invsigma = transpose(vmap(solve)(sigma, Jf)) # Jf^T @ sigma + jacobian = Jf_invsigma @ residual + hessian = -Jf_invsigma @ Jf + + # deal with missing data + jacobian = np.where(mask, np.nan, jacobian) + hessian = np.where(diag(np.squeeze(mask, axis=-1)), np.nan, hessian) + + jacobian, hessian = model.conditional_data_to_posterior(jacobian, hessian) + + if mean_f.shape[1] == jacobian.shape[1]: + return mean_f, jacobian, hessian + else: + ind = model.ind[batch_ind] + return model.posterior_mean.value[ind], jacobian, hessian # sparse Markov case + + def energy(self, model, batch_ind=None): + """ + Arguably, we should use the filtering energy here, such that the result matches that of the standard + extended Kalman smoother. + TODO: implement energy that matches standard EKS + """ + return self.energy_function(model, batch_ind) + + +class ExtendedKalmanSmoother(Taylor): + pass diff --git a/newt/kernels.py b/newt/kernels.py new file mode 100644 index 0000000..5c36875 --- /dev/null +++ b/newt/kernels.py @@ -0,0 +1,1159 @@ +import objax +from jax import vmap +import jax.numpy as np +from jax.scipy.linalg import cho_factor, cho_solve, block_diag +from .utils import scaled_squared_euclid_dist, softplus, softplus_inv, rotation_matrix +from warnings import warn + + +class Kernel(objax.Module): + """ + """ + + def __call__(self, X, X2): + return self.K(X, X2) + + def K(self, X, X2): + raise NotImplementedError('kernel function not implemented') + + def measurement_model(self): + raise NotImplementedError + + def inducing_precision(self): + return None, None + + def kernel_to_state_space(self, R=None): + raise NotImplementedError + + def spatial_conditional(self, R=None, predict=False): + """ + """ + return None, None + + +class StationaryKernel(Kernel): + """ + """ + + def __init__(self, + variance, + lengthscale, + fix_variance=False, + fix_lengthscale=False): + # check whether the parameters are to be optimised + if fix_lengthscale: + self.transformed_lengthscale = objax.StateVar(softplus_inv(np.array(lengthscale))) + else: + self.transformed_lengthscale = objax.TrainVar(softplus_inv(np.array(lengthscale))) + if fix_variance: + self.transformed_variance = objax.StateVar(softplus_inv(np.array(variance))) + else: + self.transformed_variance = objax.TrainVar(softplus_inv(np.array(variance))) + + @property + def variance(self): + return softplus(self.transformed_variance.value) + + @property + def lengthscale(self): + return softplus(self.transformed_lengthscale.value) + + def K(self, X, X2): + r2 = scaled_squared_euclid_dist(X, X2, self.lengthscale) + return self.K_r2(r2) + + def K_r2(self, r2): + # Clipping around the (single) float precision which is ~1e-45. + r = np.sqrt(np.maximum(r2, 1e-36)) + return self.K_r(r) + + @staticmethod + def K_r(r): + raise NotImplementedError('kernel not implemented') + + def kernel_to_state_space(self, R=None): + raise NotImplementedError + + def measurement_model(self): + raise NotImplementedError + + def state_transition(self, dt): + raise NotImplementedError + + def stationary_covariance(self): + raise NotImplementedError + + +class Matern12(StationaryKernel): + """ + The Matern 1/2 kernel. Functions drawn from a GP with this kernel are not + differentiable anywhere. The kernel equation is + + k(r) = ฯƒยฒ exp{-r} + + where: + r is the Euclidean distance between the input points, scaled by the lengthscales parameter โ„“. + ฯƒยฒ is the variance parameter + """ + + @property + def state_dim(self): + return 1 + + def K_r(self, r): + return self.variance * np.exp(-r) + + def kernel_to_state_space(self, R=None): + F = np.array([[-1.0 / self.lengthscale]]) + L = np.array([[1.0]]) + Qc = np.array([[2.0 * self.variance / self.lengthscale]]) + H = np.array([[1.0]]) + Pinf = np.array([[self.variance]]) + return F, L, Qc, H, Pinf + + def stationary_covariance(self): + Pinf = np.array([[self.variance]]) + return Pinf + + def measurement_model(self): + H = np.array([[1.0]]) + return H + + def state_transition(self, dt): + """ + Calculation of the discrete-time state transition matrix A = expm(Fฮ”t) for the exponential prior. + :param dt: step size(s), ฮ”tโ‚™ = tโ‚™ - tโ‚™โ‚‹โ‚ [scalar] + :return: state transition matrix A [1, 1] + """ + A = np.broadcast_to(np.exp(-dt / self.lengthscale), [1, 1]) + return A + + +class Matern32(StationaryKernel): + """ + The Matern 3/2 kernel. Functions drawn from a GP with this kernel are once + differentiable. The kernel equation is + + k(r) = ฯƒยฒ (1 + โˆš3r) exp{-โˆš3 r} + + where: + r is the Euclidean distance between the input points, scaled by the lengthscales parameter โ„“, + ฯƒยฒ is the variance parameter. + """ + + @property + def state_dim(self): + return 2 + + def K_r(self, r): + sqrt3 = np.sqrt(3.0) + return self.variance * (1.0 + sqrt3 * r) * np.exp(-sqrt3 * r) + + def kernel_to_state_space(self, R=None): + lam = 3.0 ** 0.5 / self.lengthscale + F = np.array([[0.0, 1.0], + [-lam ** 2, -2 * lam]]) + L = np.array([[0], + [1]]) + Qc = np.array([[12.0 * 3.0 ** 0.5 / self.lengthscale ** 3.0 * self.variance]]) + H = np.array([[1.0, 0.0]]) + Pinf = np.array([[self.variance, 0.0], + [0.0, 3.0 * self.variance / self.lengthscale ** 2.0]]) + return F, L, Qc, H, Pinf + + def stationary_covariance(self): + Pinf = np.array([[self.variance, 0.0], + [0.0, 3.0 * self.variance / self.lengthscale ** 2.0]]) + return Pinf + + def measurement_model(self): + H = np.array([[1.0, 0.0]]) + return H + + def state_transition(self, dt): + """ + Calculation of the discrete-time state transition matrix A = expm(Fฮ”t) for the Matern-3/2 prior. + :param dt: step size(s), ฮ”tโ‚™ = tโ‚™ - tโ‚™โ‚‹โ‚ [scalar] + :return: state transition matrix A [2, 2] + """ + lam = np.sqrt(3.0) / self.lengthscale + A = np.exp(-dt * lam) * (dt * np.array([[lam, 1.0], [-lam**2.0, -lam]]) + np.eye(2)) + return A + + +class Matern52(StationaryKernel): + """ + The Matern 5/2 kernel. Functions drawn from a GP with this kernel are twice + differentiable. The kernel equation is + + k(r) = ฯƒยฒ (1 + โˆš5r + 5/3rยฒ) exp{-โˆš5 r} + + where: + r is the Euclidean distance between the input points, scaled by the lengthscales parameter โ„“, + ฯƒยฒ is the variance parameter. + """ + + @property + def state_dim(self): + return 3 + + def K_r(self, r): + sqrt5 = np.sqrt(5.0) + return self.variance * (1.0 + sqrt5 * r + 5.0 / 3.0 * np.square(r)) * np.exp(-sqrt5 * r) + + def kernel_to_state_space(self, R=None): + # uses variance and lengthscale hyperparameters to construct the state space model + lam = 5.0**0.5 / self.lengthscale + F = np.array([[0.0, 1.0, 0.0], + [0.0, 0.0, 1.0], + [-lam**3.0, -3.0*lam**2.0, -3.0*lam]]) + L = np.array([[0.0], + [0.0], + [1.0]]) + Qc = np.array([[self.variance * 400.0 * 5.0 ** 0.5 / 3.0 / self.lengthscale ** 5.0]]) + H = np.array([[1.0, 0.0, 0.0]]) + kappa = 5.0 / 3.0 * self.variance / self.lengthscale**2.0 + Pinf = np.array([[self.variance, 0.0, -kappa], + [0.0, kappa, 0.0], + [-kappa, 0.0, 25.0*self.variance / self.lengthscale**4.0]]) + return F, L, Qc, H, Pinf + + def measurement_model(self): + H = np.array([[1.0, 0.0, 0.0]]) + return H + + def state_transition(self, dt): + """ + Calculation of the discrete-time state transition matrix A = expm(Fฮ”t) for the Matern-5/2 prior. + :param dt: step size(s), ฮ”tโ‚™ = tโ‚™ - tโ‚™โ‚‹โ‚ [scalar] + :return: state transition matrix A [3, 3] + """ + lam = np.sqrt(5.0) / self.lengthscale + dtlam = dt * lam + A = np.exp(-dtlam) \ + * (dt * np.array([[lam * (0.5 * dtlam + 1.0), dtlam + 1.0, 0.5 * dt], + [-0.5 * dtlam * lam ** 2, lam * (1.0 - dtlam), 1.0 - 0.5 * dtlam], + [lam ** 3 * (0.5 * dtlam - 1.0), lam ** 2 * (dtlam - 3), lam * (0.5 * dtlam - 2.0)]]) + + np.eye(3)) + return A + + def stationary_covariance(self): + kappa = 5.0 / 3.0 * self.variance / self.lengthscale**2.0 + Pinf = np.array([[self.variance, 0.0, -kappa], + [0.0, kappa, 0.0], + [-kappa, 0.0, 25.0*self.variance / self.lengthscale**4.0]]) + return Pinf + + +class Matern72(StationaryKernel): + """ + The Matern 7/2 kernel. Functions drawn from a GP with this kernel are three times differentiable. + + where: + r is the Euclidean distance between the input points, scaled by the lengthscales parameter โ„“, + ฯƒยฒ is the variance parameter. + """ + + @property + def state_dim(self): + return 4 + + def K_r(self, r): + sqrt7 = np.sqrt(7.0) + return self.variance * (1. + sqrt7 * r + 14. / 5. * np.square(r) + 7. * sqrt7 / 15. * r**3) * np.exp(-sqrt7 * r) + + def kernel_to_state_space(self, R=None): + # uses variance and lengthscale hyperparameters to construct the state space model + lam = 7.0**0.5 / self.lengthscale + F = np.array([[0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 1.0], + [-lam**4.0, -4.0*lam**3.0, -6.0*lam**2.0, -4.0*lam]]) + L = np.array([[0.0], + [0.0], + [0.0], + [1.0]]) + Qc = np.array([[self.variance * 10976.0 * 7.0 ** 0.5 / 5.0 / self.lengthscale ** 7.0]]) + H = np.array([[1, 0, 0, 0]]) + kappa = 7.0 / 5.0 * self.variance / self.lengthscale**2.0 + kappa2 = 9.8 * self.variance / self.lengthscale**4.0 + Pinf = np.array([[self.variance, 0.0, -kappa, 0.0], + [0.0, kappa, 0.0, -kappa2], + [-kappa, 0.0, kappa2, 0.0], + [0.0, -kappa2, 0.0, 343.0*self.variance / self.lengthscale**6.0]]) + return F, L, Qc, H, Pinf + + def measurement_model(self): + H = np.array([[1.0, 0.0, 0.0, 0.0]]) + return H + + def state_transition(self, dt): + """ + Calculation of the discrete-time state transition matrix A = expm(Fฮ”t) for the Matern-7/2 prior. + :param dt: step size(s), ฮ”tโ‚™ = tโ‚™ - tโ‚™โ‚‹โ‚ [scalar] + :return: state transition matrix A [4, 4] + """ + lam = np.sqrt(7.0) / self.lengthscale + lam2 = lam * lam + lam3 = lam2 * lam + dtlam = dt * lam + dtlam2 = dtlam ** 2 + A = np.exp(-dtlam) \ + * (dt * np.array([[lam * (1.0 + 0.5 * dtlam + dtlam2 / 6.0), 1.0 + dtlam + 0.5 * dtlam2, + 0.5 * dt * (1.0 + dtlam), dt ** 2 / 6], + [-dtlam2 * lam ** 2.0 / 6.0, lam * (1.0 + 0.5 * dtlam - 0.5 * dtlam2), + 1.0 + dtlam - 0.5 * dtlam2, dt * (0.5 - dtlam / 6.0)], + [lam3 * dtlam * (dtlam / 6.0 - 0.5), dtlam * lam2 * (0.5 * dtlam - 2.0), + lam * (1.0 - 2.5 * dtlam + 0.5 * dtlam2), 1.0 - dtlam + dtlam2 / 6.0], + [lam2 ** 2 * (dtlam - 1.0 - dtlam2 / 6.0), lam3 * (3.5 * dtlam - 4.0 - 0.5 * dtlam2), + lam2 * (4.0 * dtlam - 6.0 - 0.5 * dtlam2), lam * (1.5 * dtlam - 3.0 - dtlam2 / 6.0)]]) + + np.eye(4)) + return A + + def stationary_covariance(self): + kappa = 7.0 / 5.0 * self.variance / self.lengthscale ** 2.0 + kappa2 = 9.8 * self.variance / self.lengthscale ** 4.0 + Pinf = np.array([[self.variance, 0.0, -kappa, 0.0], + [0.0, kappa, 0.0, -kappa2], + [-kappa, 0.0, kappa2, 0.0], + [0.0, -kappa2, 0.0, 343.0 * self.variance / self.lengthscale ** 6.0]]) + return Pinf + + +class SpatioTemporalKernel(Kernel): + """ + The Spatio-Temporal GP class + :param temporal_kernel: the temporal prior, must be a member of the Prior class + :param spatial_kernel: the kernel used for the spatial dimensions + :param z: the initial spatial locations + :param conditional: specifies which method to use for computing the covariance of the spatial conditional; + must be one of ['DTC', 'FIC', 'Full'] + :param sparse: boolean specifying whether the model is sparse in space + :param opt_z: boolean specifying whether to optimise the spatial input locations z + """ + def __init__(self, + temporal_kernel, + spatial_kernel, + z=None, + conditional=None, + sparse=True, + opt_z=False, + spatial_dims=None): + self.temporal_kernel = temporal_kernel + self.spatial_kernel = spatial_kernel + if conditional is None: + if sparse: + conditional = 'Full' + else: + conditional = 'DTC' + if opt_z and (not sparse): # z should not be optimised if the model is not sparse + warn("spatial inducing inputs z will not be optimised because sparse=False") + opt_z = False + self.sparse = sparse + if z is None: # initialise z + # TODO: smart initialisation + if spatial_dims == 1: + z = np.linspace(-3., 3., num=15) + elif spatial_dims == 2: + z1 = np.linspace(-3., 3., num=5) + zA, zB = np.meshgrid(z1, z1) # Adding additional dimension to inducing points grid + z = np.hstack((zA.reshape(-1, 1), zB.reshape(-1, 1))) # Flattening grid for use in kernel functions + else: + raise NotImplementedError('please provide an initialisation for inducing inputs z') + if z.ndim < 2: + z = z[:, np.newaxis] + if spatial_dims is None: + spatial_dims = z.ndim - 1 + assert spatial_dims == z.ndim - 1 + self.M = z.shape[0] + if opt_z: + self.z = objax.TrainVar(z) # .reshape(-1, 1) + else: + self.z = objax.StateVar(z) + if conditional in ['DTC', 'dtc']: + self.conditional_covariance = self.deterministic_training_conditional + elif conditional in ['FIC', 'FITC', 'fic', 'fitc']: + self.conditional_covariance = self.fully_independent_conditional + elif conditional in ['Full', 'full']: + self.conditional_covariance = self.full_conditional + else: + raise NotImplementedError('conditional method not recognised') + if (not sparse) and (conditional != 'DTC'): + warn("You chose a non-deterministic conditional, but \'DTC\' will be used because the model is not sparse") + + @property + def variance(self): + return self.temporal_kernel.variance + + @property + def temporal_lengthscale(self): + return self.temporal_kernel.lengthscale + + @property + def spatial_lengthscale(self): + return self.spatial_kernel.lengthscale + + @property + def state_dim(self): + return self.temporal_kernel.state_dim + + def K(self, X, X2): + T = X[:, :1] + T2 = X2[:, :1] + R = X[:, 1:] + R2 = X2[:, 1:] + return self.temporal_kernel(T, T2) * self.spatial_kernel(R, R2) + + @staticmethod + def deterministic_training_conditional(X, R, Krz, K): + cov = np.array([[0.0]]) + return cov + + def fully_independent_conditional(self, X, R, Krz, K): + Krr = self.spatial_kernel(R, R) + X = X.reshape(-1, 1) + cov = self.temporal_kernel.K(X, X) * (np.diag(np.diag(Krr - K @ Krz.T))) + return cov + + def full_conditional(self, X, R, Krz, K): + Krr = self.spatial_kernel(R, R) + X = X.reshape(-1, 1) + cov = self.temporal_kernel.K(X, X) * (Krr - K @ Krz.T) + return cov + + def spatial_conditional(self, X=None, R=None, predict=False): + """ + Compute the spatial conditional, i.e. the measurement model projecting the latent function u(t) to f(X,R) + f(X,R) | u(t) ~ N(f(X,R) | B u(t), C) + """ + Qzz, Lzz = self.inducing_precision() # pre-calculate inducing precision and its Cholesky factor + if self.sparse or predict: + # TODO: save compute if R is constant: + # gridded_data = np.all(np.abs(np.diff(R, axis=0)) < 1e-10) + # if gridded_data: + # R = R[:1] + R = R.reshape((R.shape[0],) + (-1,) + self.z.value.shape[1:]) + Krz = vmap(self.spatial_kernel, [0, None])(R, self.z.value) + K = Krz @ Qzz # Krz / Kzz + B = K @ Lzz + C = vmap(self.conditional_covariance)(X, R, Krz, K) # conditional covariance + else: + B = Lzz + # conditional covariance (deterministic mapping is exact in non-sparse case) + C = np.zeros([B.shape[0], B.shape[0]]) + return B, C + + def inducing_precision(self): + """ + Compute the covariance and precision of the inducing spatial points to be used during filtering + """ + Kzz = self.spatial_kernel(self.z.value, self.z.value) + Lzz, low = cho_factor(Kzz, lower=True) # K_zz^(1/2) + Qzz = cho_solve((Lzz, low), np.eye(self.M)) # K_zz^(-1) + return Qzz, Lzz + + def stationary_covariance(self): + """ + Compute the covariance of the stationary state distribution. Since the latent components are independent + under the prior, this is a block-diagonal matrix + """ + Pinf_time = self.temporal_kernel.stationary_covariance() + Pinf = np.kron(np.eye(self.M), Pinf_time) + return Pinf + + def stationary_covariance_meanfield(self): + """ + Stationary covariance as a tensor of blocks, as required when using a mean-field assumption + """ + Pinf_time = self.temporal_kernel.stationary_covariance() + Pinf = np.tile(Pinf_time, [self.M, 1, 1]) + return Pinf + + def measurement_model(self): + """ + Compute the spatial conditional, i.e. the measurement model projecting the state x(t) to function space + f(t, R) = H x(t) + """ + H_time = self.temporal_kernel.measurement_model() + H = np.kron(np.eye(self.M), H_time) + return H + + def state_transition(self, dt): + """ + Calculation of the discrete-time state transition matrix A = expm(Fฮ”t) for the spatio-temporal prior. + :param dt: step size(s), ฮ”tโ‚™ = tโ‚™ - tโ‚™โ‚‹โ‚ [scalar] + :return: state transition matrix A + """ + A_time = self.temporal_kernel.state_transition(dt) + A = np.kron(np.eye(self.M), A_time) + return A + + def state_transition_meanfield(self, dt): + """ + State transition matrix in the form required for mean-field inference. + :param dt: step size(s), ฮ”tโ‚™ = tโ‚™ - tโ‚™โ‚‹โ‚ [scalar] + :return: state transition matrix A + """ + A_time = self.temporal_kernel.state_transition(dt) + A = np.tile(A_time, [self.M, 1, 1]) + return A + + def kernel_to_state_space(self, R=None): + F_t, L_t, Qc_t, H_t, Pinf_t = self.temporal_kernel.kernel_to_state_space() + Kzz = self.spatial_kernel(self.z.value, self.z.value) + F = np.kron(np.eye(self.M), F_t) + Qc = None + L = None + H = self.measurement_model() + Pinf = np.kron(Kzz, Pinf_t) + return F, L, Qc, H, Pinf + + +class SpatioTemporalMatern12(SpatioTemporalKernel): + """ + Spatio-Temporal Matern-1/2 kernel in SDE form. + Hyperparameters: + variance, ฯƒยฒ + temporal lengthscale, lt + spatial lengthscale, ls + """ + def __init__(self, + variance=1.0, + lengthscale_time=1.0, + lengthscale_space=1.0, + z=None, + sparse=True, + opt_z=False, + conditional=None): + super().__init__(temporal_kernel=Matern12(variance=variance, lengthscale=lengthscale_time), + spatial_kernel=Matern12(variance=1., lengthscale=lengthscale_space, fix_variance=True), + z=z, + conditional=conditional, + sparse=sparse, + opt_z=opt_z) + self.name = 'Spatio-Temporal Matern-1/2' + + +class SpatioTemporalMatern32(SpatioTemporalKernel): + """ + Spatio-Temporal Matern-3/2 kernel in SDE form. + Hyperparameters: + variance, ฯƒยฒ + temporal lengthscale, lt + spatial lengthscale, ls + """ + def __init__(self, + variance=1.0, + lengthscale_time=1.0, + lengthscale_space=1.0, + z=None, + sparse=True, + opt_z=False, + conditional=None): + super().__init__(temporal_kernel=Matern32(variance=variance, lengthscale=lengthscale_time), + spatial_kernel=Matern32(variance=1., lengthscale=lengthscale_space, fix_variance=True), + z=z, + conditional=conditional, + sparse=sparse, + opt_z=opt_z) + self.name = 'Spatio-Temporal Matern-3/2' + + +class SpatioTemporalMatern52(SpatioTemporalKernel): + """ + Spatio-Temporal Matern-5/2 kernel in SDE form. + Hyperparameters: + variance, ฯƒยฒ + temporal lengthscale, lt + spatial lengthscale, ls + """ + def __init__(self, + variance=1.0, + lengthscale_time=1.0, + lengthscale_space=1.0, + z=None, + sparse=True, + opt_z=False, + conditional=None): + super().__init__(temporal_kernel=Matern52(variance=variance, lengthscale=lengthscale_time), + spatial_kernel=Matern52(variance=1., lengthscale=lengthscale_space, fix_variance=True), + z=z, + conditional=conditional, + sparse=sparse, + opt_z=opt_z) + self.name = 'Spatio-Temporal Matern-5/2' + + +class SpatialMatern12(SpatioTemporalKernel): + """ + Spatial Matern-1/2 kernel in SDE form. Similar to the spatio-temporal kernel but the + lengthscale is shared across dimensions. + Hyperparameters: + variance, ฯƒยฒ + lengthscale, l + """ + def __init__(self, + variance=1.0, + lengthscale=1.0, + z=None, + sparse=True, + opt_z=False, + conditional=None): + super().__init__(temporal_kernel=Matern12(variance=variance, lengthscale=lengthscale), + spatial_kernel=Matern12(variance=1., lengthscale=lengthscale, fix_variance=True), + z=z, + conditional=conditional, + sparse=sparse, + opt_z=opt_z) + # --- couple the lengthscales --- + self.spatial_kernel.transformed_lengthscale = self.temporal_kernel.transformed_lengthscale + # ------------------------------- + self.name = 'Spatial Matern-1/2' + + +class SpatialMatern32(SpatioTemporalKernel): + """ + Spatial Matern-3/2 kernel in SDE form. Similar to the spatio-temporal kernel but the + lengthscale is shared across dimensions. + Hyperparameters: + variance, ฯƒยฒ + lengthscale, l + """ + def __init__(self, + variance=1.0, + lengthscale=1.0, + z=None, + sparse=True, + opt_z=False, + conditional=None): + super().__init__(temporal_kernel=Matern32(variance=variance, lengthscale=lengthscale), + spatial_kernel=Matern32(variance=1., lengthscale=lengthscale, fix_variance=True), + z=z, + conditional=conditional, + sparse=sparse, + opt_z=opt_z) + # --- couple the lengthscales --- + self.spatial_kernel.transformed_lengthscale = self.temporal_kernel.transformed_lengthscale + # ------------------------------- + self.name = 'Spatial Matern-3/2' + + +class SpatialMatern52(SpatioTemporalKernel): + """ + Spatial Matern-5/2 kernel in SDE form. Similar to the spatio-temporal kernel but the + lengthscale is shared across dimensions. + Hyperparameters: + variance, ฯƒยฒ + lengthscale, l + """ + def __init__(self, + variance=1.0, + lengthscale=1.0, + z=None, + sparse=True, + opt_z=False, + conditional=None): + super().__init__(temporal_kernel=Matern52(variance=variance, lengthscale=lengthscale), + spatial_kernel=Matern52(variance=1., lengthscale=lengthscale, fix_variance=True), + z=z, + conditional=conditional, + sparse=sparse, + opt_z=opt_z) + # --- couple the lengthscales --- + self.spatial_kernel.transformed_lengthscale = self.temporal_kernel.transformed_lengthscale + # ------------------------------- + self.name = 'Spatial Matern-5/2' + + +class QuasiPeriodicMatern12(Kernel): + """ + TODO: implement a general 'Product' class to reduce code duplication + Quasi-periodic kernel in SDE form (product of Periodic and Matern-1/2). + Hyperparameters: + variance, ฯƒยฒ + lengthscale of Periodic, l_p + period, p + lengthscale of Matern, l_m + The associated continuous-time state space model matrices are constructed via + a sum of cosines times a Matern-1/2. + """ + def __init__(self, variance=1.0, lengthscale_periodic=1.0, period=1.0, lengthscale_matern=1.0, order=6): + self.transformed_lengthscale_periodic = objax.TrainVar(np.array(softplus_inv(lengthscale_periodic))) + self.transformed_variance = objax.TrainVar(np.array(softplus_inv(variance))) + self.transformed_period = objax.TrainVar(np.array(softplus_inv(period))) + self.transformed_lengthscale_matern = objax.TrainVar(np.array(softplus_inv(lengthscale_matern))) + super().__init__() + self.name = 'Quasi-periodic Matern-1/2' + self.order = order + self.igrid = np.meshgrid(np.arange(self.order + 1), np.arange(self.order + 1))[1] + factorial_mesh_K = np.array([[1., 1., 1., 1., 1., 1., 1.], + [1., 1., 1., 1., 1., 1., 1.], + [2., 2., 2., 2., 2., 2., 2.], + [6., 6., 6., 6., 6., 6., 6.], + [24., 24., 24., 24., 24., 24., 24.], + [120., 120., 120., 120., 120., 120., 120.], + [720., 720., 720., 720., 720., 720., 720.]]) + b = np.array([[1., 0., 0., 0., 0., 0., 0.], + [0., 2., 0., 0., 0., 0., 0.], + [2., 0., 2., 0., 0., 0., 0.], + [0., 6., 0., 2., 0., 0., 0.], + [6., 0., 8., 0., 2., 0., 0.], + [0., 20., 0., 10., 0., 2., 0.], + [20., 0., 30., 0., 12., 0., 2.]]) + self.b_fmK_2igrid = b * (1. / factorial_mesh_K) * (2. ** -self.igrid) + + @property + def variance(self): + return softplus(self.transformed_variance.value) + + @property + def lengthscale_periodic(self): + return softplus(self.transformed_lengthscale_periodic.value) + + @property + def lengthscale_matern(self): + return softplus(self.transformed_lengthscale_matern.value) + + @property + def period(self): + return softplus(self.transformed_period.value) + + def K(self, X, X2): + raise NotImplementedError + + def kernel_to_state_space(self, R=None): + var_p = 1. + ell_p = self.lengthscale_periodic + a = self.b_fmK_2igrid * ell_p ** (-2. * self.igrid) * np.exp(-1. / ell_p ** 2.) * var_p + q2 = np.sum(a, axis=0) + # The angular frequency + omega = 2 * np.pi / self.period + # The model + F_p = np.kron(np.diag(np.arange(self.order + 1)), np.array([[0., -omega], [omega, 0.]])) + L_p = np.eye(2 * (self.order + 1)) + # Qc_p = np.zeros(2 * (self.N + 1)) + Pinf_p = np.kron(np.diag(q2), np.eye(2)) + H_p = np.kron(np.ones([1, self.order + 1]), np.array([1., 0.])) + F_m = np.array([[-1.0 / self.lengthscale_matern]]) + L_m = np.array([[1.0]]) + Qc_m = np.array([[2.0 * self.variance / self.lengthscale_matern]]) + H_m = np.array([[1.0]]) + Pinf_m = np.array([[self.variance]]) + F = np.kron(F_m, np.eye(2 * (self.order + 1))) + np.kron(np.eye(1), F_p) + L = np.kron(L_m, L_p) + Qc = np.kron(Qc_m, Pinf_p) + H = np.kron(H_m, H_p) + # Pinf = np.kron(Pinf_m, Pinf_p) + Pinf = block_diag( + np.kron(Pinf_m, q2[0] * np.eye(2)), + np.kron(Pinf_m, q2[1] * np.eye(2)), + np.kron(Pinf_m, q2[2] * np.eye(2)), + np.kron(Pinf_m, q2[3] * np.eye(2)), + np.kron(Pinf_m, q2[4] * np.eye(2)), + np.kron(Pinf_m, q2[5] * np.eye(2)), + np.kron(Pinf_m, q2[6] * np.eye(2)), + ) + return F, L, Qc, H, Pinf + + def stationary_covariance(self): + var_p = 1. + ell_p = self.lengthscale_periodic + a = self.b_fmK_2igrid * ell_p ** (-2. * self.igrid) * np.exp(-1. / ell_p ** 2.) * var_p + q2 = np.sum(a, axis=0) + Pinf_m = np.array([[self.variance]]) + Pinf = block_diag( + np.kron(Pinf_m, q2[0] * np.eye(2)), + np.kron(Pinf_m, q2[1] * np.eye(2)), + np.kron(Pinf_m, q2[2] * np.eye(2)), + np.kron(Pinf_m, q2[3] * np.eye(2)), + np.kron(Pinf_m, q2[4] * np.eye(2)), + np.kron(Pinf_m, q2[5] * np.eye(2)), + np.kron(Pinf_m, q2[6] * np.eye(2)), + ) + return Pinf + + def measurement_model(self): + H_p = np.kron(np.ones([1, self.order + 1]), np.array([1., 0.])) + H_m = np.array([[1.0]]) + H = np.kron(H_m, H_p) + return H + + def state_transition(self, dt): + """ + Calculation of the closed form discrete-time state + transition matrix A = expm(Fฮ”t) for the Quasi-Periodic Matern-3/2 prior + :param dt: step size(s), ฮ”t = tโ‚™ - tโ‚™โ‚‹โ‚ [M+1, 1] + :return: state transition matrix A [M+1, D, D] + """ + # The angular frequency + omega = 2 * np.pi / self.period + harmonics = np.arange(self.order + 1) * omega + R0 = rotation_matrix(dt, harmonics[0]) + R1 = rotation_matrix(dt, harmonics[1]) + R2 = rotation_matrix(dt, harmonics[2]) + R3 = rotation_matrix(dt, harmonics[3]) + R4 = rotation_matrix(dt, harmonics[4]) + R5 = rotation_matrix(dt, harmonics[5]) + R6 = rotation_matrix(dt, harmonics[6]) + A = np.exp(-dt / self.lengthscale_matern) * block_diag(R0, R1, R2, R3, R4, R5, R6) + return A + + +class QuasiPeriodicMatern32(Kernel): + """ + Quasi-periodic kernel in SDE form (product of Periodic and Matern-3/2). + Hyperparameters: + variance, ฯƒยฒ + lengthscale of Periodic, l_p + period, p + lengthscale of Matern, l_m + The associated continuous-time state space model matrices are constructed via + a sum of cosines times a Matern-3/2. + """ + def __init__(self, variance=1.0, lengthscale_periodic=1.0, period=1.0, lengthscale_matern=1.0, order=6): + self.transformed_lengthscale_periodic = objax.TrainVar(np.array(softplus_inv(lengthscale_periodic))) + self.transformed_variance = objax.TrainVar(np.array(softplus_inv(variance))) + self.transformed_period = objax.TrainVar(np.array(softplus_inv(period))) + self.transformed_lengthscale_matern = objax.TrainVar(np.array(softplus_inv(lengthscale_matern))) + super().__init__() + self.name = 'Quasi-periodic Matern-3/2' + self.order = order + self.igrid = np.meshgrid(np.arange(self.order + 1), np.arange(self.order + 1))[1] + factorial_mesh_K = np.array([[1., 1., 1., 1., 1., 1., 1.], + [1., 1., 1., 1., 1., 1., 1.], + [2., 2., 2., 2., 2., 2., 2.], + [6., 6., 6., 6., 6., 6., 6.], + [24., 24., 24., 24., 24., 24., 24.], + [120., 120., 120., 120., 120., 120., 120.], + [720., 720., 720., 720., 720., 720., 720.]]) + b = np.array([[1., 0., 0., 0., 0., 0., 0.], + [0., 2., 0., 0., 0., 0., 0.], + [2., 0., 2., 0., 0., 0., 0.], + [0., 6., 0., 2., 0., 0., 0.], + [6., 0., 8., 0., 2., 0., 0.], + [0., 20., 0., 10., 0., 2., 0.], + [20., 0., 30., 0., 12., 0., 2.]]) + self.b_fmK_2igrid = b * (1. / factorial_mesh_K) * (2. ** -self.igrid) + + @property + def variance(self): + return softplus(self.transformed_variance.value) + + @property + def lengthscale_periodic(self): + return softplus(self.transformed_lengthscale_periodic.value) + + @property + def lengthscale_matern(self): + return softplus(self.transformed_lengthscale_matern.value) + + @property + def period(self): + return softplus(self.transformed_period.value) + + def K(self, X, X2): + raise NotImplementedError + + def kernel_to_state_space(self, R=None): + var_p = 1. + ell_p = self.lengthscale_periodic + a = self.b_fmK_2igrid * ell_p ** (-2. * self.igrid) * np.exp(-1. / ell_p ** 2.) * var_p + q2 = np.sum(a, axis=0) + # The angular frequency + omega = 2 * np.pi / self.period + # The model + F_p = np.kron(np.diag(np.arange(self.order + 1)), np.array([[0., -omega], [omega, 0.]])) + L_p = np.eye(2 * (self.order + 1)) + # Qc_p = np.zeros(2 * (self.N + 1)) + Pinf_p = np.kron(np.diag(q2), np.eye(2)) + H_p = np.kron(np.ones([1, self.order + 1]), np.array([1., 0.])) + lam = 3.0 ** 0.5 / self.lengthscale_matern + F_m = np.array([[0.0, 1.0], + [-lam ** 2, -2 * lam]]) + L_m = np.array([[0], + [1]]) + Qc_m = np.array([[12.0 * 3.0 ** 0.5 / self.lengthscale_matern ** 3.0 * self.variance]]) + H_m = np.array([[1.0, 0.0]]) + Pinf_m = np.array([[self.variance, 0.0], + [0.0, 3.0 * self.variance / self.lengthscale_matern ** 2.0]]) + # F = np.kron(F_p, np.eye(2)) + np.kron(np.eye(14), F_m) + F = np.kron(F_m, np.eye(2 * (self.order + 1))) + np.kron(np.eye(2), F_p) + L = np.kron(L_m, L_p) + Qc = np.kron(Qc_m, Pinf_p) + H = np.kron(H_m, H_p) + # Pinf = np.kron(Pinf_m, Pinf_p) + Pinf = block_diag( + np.kron(Pinf_m, q2[0] * np.eye(2)), + np.kron(Pinf_m, q2[1] * np.eye(2)), + np.kron(Pinf_m, q2[2] * np.eye(2)), + np.kron(Pinf_m, q2[3] * np.eye(2)), + np.kron(Pinf_m, q2[4] * np.eye(2)), + np.kron(Pinf_m, q2[5] * np.eye(2)), + np.kron(Pinf_m, q2[6] * np.eye(2)), + ) + return F, L, Qc, H, Pinf + + def stationary_covariance(self): + var_p = 1. + ell_p = self.lengthscale_periodic + a = self.b_fmK_2igrid * ell_p ** (-2. * self.igrid) * np.exp(-1. / ell_p ** 2.) * var_p + q2 = np.sum(a, axis=0) + Pinf_m = np.array([[self.variance, 0.0], + [0.0, 3.0 * self.variance / self.lengthscale_matern ** 2.0]]) + Pinf = block_diag( + np.kron(Pinf_m, q2[0] * np.eye(2)), + np.kron(Pinf_m, q2[1] * np.eye(2)), + np.kron(Pinf_m, q2[2] * np.eye(2)), + np.kron(Pinf_m, q2[3] * np.eye(2)), + np.kron(Pinf_m, q2[4] * np.eye(2)), + np.kron(Pinf_m, q2[5] * np.eye(2)), + np.kron(Pinf_m, q2[6] * np.eye(2)), + ) + return Pinf + + def measurement_model(self): + H_p = np.kron(np.ones([1, self.order + 1]), np.array([1., 0.])) + H_m = np.array([[1.0, 0.0]]) + H = np.kron(H_m, H_p) + return H + + def state_transition(self, dt): + """ + Calculation of the closed form discrete-time state + transition matrix A = expm(Fฮ”t) for the Quasi-Periodic Matern-3/2 prior + :param dt: step size(s), ฮ”t = tโ‚™ - tโ‚™โ‚‹โ‚ [M+1, 1] + :return: state transition matrix A [M+1, D, D] + """ + lam = np.sqrt(3.0) / self.lengthscale_matern + # The angular frequency + omega = 2 * np.pi / self.period + harmonics = np.arange(self.order + 1) * omega + R0 = self.subband_mat32(dt, lam, harmonics[0]) + R1 = self.subband_mat32(dt, lam, harmonics[1]) + R2 = self.subband_mat32(dt, lam, harmonics[2]) + R3 = self.subband_mat32(dt, lam, harmonics[3]) + R4 = self.subband_mat32(dt, lam, harmonics[4]) + R5 = self.subband_mat32(dt, lam, harmonics[5]) + R6 = self.subband_mat32(dt, lam, harmonics[6]) + A = np.exp(-dt * lam) * block_diag(R0, R1, R2, R3, R4, R5, R6) + return A + + @staticmethod + def subband_mat32(dt, lam, omega): + R = rotation_matrix(dt, omega) + Ri = np.block([ + [(1. + dt * lam) * R, dt * R], + [-dt * lam ** 2 * R, (1. - dt * lam) * R] + ]) + return Ri + + +class SubbandMatern12(Kernel): + """ + Subband Matern-1/2 (i.e. Exponential) kernel in SDE form (product of Cosine and Matern-1/2). + Hyperparameters: + variance, ฯƒยฒ + lengthscale, l + radial frequency, ฯ‰ + The associated continuous-time state space model matrices are constructed via + kronecker sums and products of the exponential and cosine components: + F = F_exp โŠ• F_cos = ( -1/l -ฯ‰ + ฯ‰ -1/l ) + L = L_exp โŠ— I = ( 1 0 + 0 1 ) + Qc = I โŠ— Qc_exp = ( 2ฯƒยฒ/l 0 + 0 2ฯƒยฒ/l ) + H = H_exp โŠ— H_cos = ( 1 0 ) + Pinf = Pinf_exp โŠ— I = ( ฯƒยฒ 0 + 0 ฯƒยฒ ) + and the discrete-time transition matrix is (for step size ฮ”t), + A = exp(-ฮ”t/l) ( cos(ฯ‰ฮ”t) -sin(ฯ‰ฮ”t) + sin(ฯ‰ฮ”t) cos(ฯ‰ฮ”t) ) + """ + def __init__(self, variance=1.0, lengthscale=1.0, radial_frequency=1.0, fix_variance=False): + self.transformed_lengthscale = objax.TrainVar(np.array(softplus_inv(lengthscale))) + if fix_variance: + self.transformed_variance = objax.StateVar(np.array(softplus_inv(variance))) + else: + self.transformed_variance = objax.TrainVar(np.array(softplus_inv(variance))) + self.transformed_radial_frequency = objax.TrainVar(np.array(softplus_inv(radial_frequency))) + super().__init__() + self.name = 'Subband Matern-1/2' + + @property + def variance(self): + return softplus(self.transformed_variance.value) + + @property + def lengthscale(self): + return softplus(self.transformed_lengthscale.value) + + @property + def radial_frequency(self): + return softplus(self.transformed_radial_frequency.value) + + def K(self, X, X2): + raise NotImplementedError + + def kernel_to_state_space(self, R=None): + F_mat = np.array([[-1.0 / self.lengthscale]]) + L_mat = np.array([[1.0]]) + Qc_mat = np.array([[2.0 * self.variance / self.lengthscale]]) + H_mat = np.array([[1.0]]) + Pinf_mat = np.array([[self.variance]]) + F_cos = np.array([[0.0, -self.radial_frequency], + [self.radial_frequency, 0.0]]) + H_cos = np.array([[1.0, 0.0]]) + # F = (-1/l -ฯ‰ + # ฯ‰ -1/l) + F = np.kron(F_mat, np.eye(2)) + F_cos + L = np.kron(L_mat, np.eye(2)) + Qc = np.kron(np.eye(2), Qc_mat) + H = np.kron(H_mat, H_cos) + Pinf = np.kron(Pinf_mat, np.eye(2)) + return F, L, Qc, H, Pinf + + def stationary_covariance(self): + Pinf_mat = np.array([[self.variance]]) + Pinf = np.kron(Pinf_mat, np.eye(2)) + return Pinf + + def measurement_model(self): + H_mat = np.array([[1.0]]) + H_cos = np.array([[1.0, 0.0]]) + H = np.kron(H_mat, H_cos) + return H + + def state_transition(self, dt): + """ + Calculation of the closed form discrete-time state + transition matrix A = expm(Fฮ”t) for the Subband Matern-1/2 prior: + A = exp(-ฮ”t/l) ( cos(ฯ‰ฮ”t) -sin(ฯ‰ฮ”t) + sin(ฯ‰ฮ”t) cos(ฯ‰ฮ”t) ) + :param dt: step size(s), ฮ”t = tโ‚™ - tโ‚™โ‚‹โ‚ [1] + :return: state transition matrix A [2, 2] + """ + R = rotation_matrix(dt, self.radial_frequency) + A = np.exp(-dt / self.lengthscale) * R # [2, 2] + return A + + +class Independent(Kernel): + """ + A stack of independent GP priors. 'kernels' is a list of GP kernels, and this class stacks + the state space models such that each component is fed to the likelihood. + This class differs from Sum only in the measurement model. + """ + def __init__(self, kernels): + self.num_kernels = len(kernels) + for i in range(self.num_kernels): + selfdotkerneli = "self.kernel" + str(i) + exec(selfdotkerneli + " = kernels[i]") + self.name = 'Independent' + + def K(self, X, X2): + Kstack = [self.kernel0.K(X, X2)] + for i in range(1, self.num_kernels): + kerneli = eval("self.kernel" + str(i)) + Kstack = Kstack + [kerneli.K(X, X2)] + return Kstack + + def kernel_to_state_space(self, R=None): + F, L, Qc, H, Pinf = self.kernel0.kernel_to_state_space(R) + for i in range(1, self.num_kernels): + kerneli = eval("self.kernel" + str(i)) + F_, L_, Qc_, H_, Pinf_ = kerneli.kernel_to_state_space(R) + F = block_diag(F, F_) + L = block_diag(L, L_) + Qc = block_diag(Qc, Qc_) + H = block_diag(H, H_) + Pinf = block_diag(Pinf, Pinf_) + return F, L, Qc, H, Pinf + + def measurement_model(self): + H = self.kernel0.measurement_model() + for i in range(1, self.num_kernels): + kerneli = eval("self.kernel" + str(i)) + H_ = kerneli.measurement_model() + H = block_diag(H, H_) + return H + + def stationary_covariance(self): + Pinf = self.kernel0.stationary_covariance() + for i in range(1, self.num_kernels): + kerneli = eval("self.kernel" + str(i)) + Pinf_ = kerneli.stationary_covariance() + Pinf = block_diag(Pinf, Pinf_) + return Pinf + + def inducing_precision(self): + Qzz0, Lzz0 = self.kernel0.inducing_precision() + Qzz, Lzz = [Qzz0], [Lzz0] + for i in range(1, self.num_kernels): + kerneli = eval("self.kernel" + str(i)) + Qzz_, Lzz_ = kerneli.inducing_precision() + Qzz, Lzz = Qzz + [Qzz_], Lzz + [Lzz_] + return Qzz, Lzz + + def state_transition(self, dt): + """ + Calculation of the discrete-time state transition matrix A = expm(Fฮ”t) for a sum of GPs + :param dt: step size(s), ฮ”t = tโ‚™ - tโ‚™โ‚‹โ‚ [1] + :return: state transition matrix A [D, D] + """ + A = self.kernel0.state_transition(dt) + for i in range(1, self.num_kernels): + kerneli = eval("self.kernel" + str(i)) + A_ = kerneli.state_transition(dt) + A = block_diag(A, A_) + return A + + +class Separate(Independent): + pass + + +class Stack(Independent): + pass + + +class Sum(Independent): + """ + A sum of GP priors. 'components' is a list of GP kernels, and this class stacks + the state space models to produce their sum. + This class differs from Independent only in the measurement model. + """ + def __init__(self, kernels): + super().__init__(kernels=kernels) + self.name = 'Sum' + + def K(self, X, X2): + Ksum = self.kernel0.K(X, X2) + for i in range(1, self.num_kernels): + kerneli = eval("self.kernel" + str(i)) + Ksum = Ksum + kerneli.K(X, X2) + return Ksum + + def kernel_to_state_space(self, R=None): + F, L, Qc, H, Pinf = self.kernel0.kernel_to_state_space(R) + for i in range(1, self.num_kernels): + kerneli = eval("self.kernel" + str(i)) + F_, L_, Qc_, H_, Pinf_ = kerneli.kernel_to_state_space(R) + F = block_diag(F, F_) + L = block_diag(L, L_) + Qc = block_diag(Qc, Qc_) + H = np.block([ + H, H_ + ]) + Pinf = block_diag(Pinf, Pinf_) + return F, L, Qc, H, Pinf + + def measurement_model(self): + H = self.kernel0.measurement_model() + for i in range(1, self.num_kernels): + kerneli = eval("self.kernel" + str(i)) + H_ = kerneli.measurement_model() + H = np.block([ + H, H_ + ]) + return H diff --git a/newt/likelihoods.py b/newt/likelihoods.py new file mode 100644 index 0000000..49339c5 --- /dev/null +++ b/newt/likelihoods.py @@ -0,0 +1,927 @@ +import objax +import jax.numpy as np +from jax import grad, jacrev, vmap +from jax.scipy.special import erf, gammaln +from jax.scipy.linalg import cholesky +from .cubature import ( + gauss_hermite, + variational_expectation_cubature, + moment_match_cubature, + statistical_linear_regression_cubature, + log_density_cubature, + predict_cubature +) +from .utils import ( + solve, + transpose, + softplus, + softplus_inv, + sigmoid, + pep_constant, + mvn_logpdf, + mvn_logpdf_and_derivs +) +import math + +LOG2PI = math.log(2 * math.pi) + + +class Likelihood(objax.Module): + """ + The likelihood model class, p(yโ‚™|fโ‚™). Each likelihood implements its own methods used during inference: + Moment matching is used for EP + Variational expectation is used for VI + Statistical linearisation is used for PL + Analytical linearisation is used for EKS + Log-likelihood gradients are used for Laplace + If no custom parameter update method is provided, cubature is used (Gauss-Hermite by default). + The requirement for all inference methods to work is the implementation of the following methods: + evaluate_likelihood(), which simply evaluates the likelihood given the latent function + evaluate_log_likelihood() + conditional_moments(), which return E[y|f] and Cov[y|f] + """ + + def __call__(self, y, f): + return self.evaluate_likelihood(y, f) + + def evaluate_likelihood(self, y, f): + raise NotImplementedError + + def evaluate_log_likelihood(self, y, f): + raise NotImplementedError + + def conditional_moments(self, f): + raise NotImplementedError + + def log_likelihood_gradients_(self, y, f): + """ + Evaluate the Jacobian and Hessian of the log-likelihood + """ + log_lik = self.evaluate_log_likelihood(y, f) + f = np.squeeze(f) + J = jacrev(self.evaluate_log_likelihood, argnums=1) + H = jacrev(J, argnums=1) + return log_lik, J(y, f), H(y, f) + + def log_likelihood_gradients(self, y, f): + """ + Most likelihoods factorise across data points. For multi-latent models, a custom method must be implemented. + """ + # align shapes and compute mask + y = y.reshape(-1, 1) + f = f.reshape(-1, 1) + mask = np.isnan(y) + y = np.where(mask, f, y) + + # compute gradients of the log likelihood + log_lik, J, H = vmap(self.log_likelihood_gradients_)(y, f) + + # apply mask + mask = np.squeeze(mask) + log_lik = np.where(mask, 0., log_lik) + J = np.where(mask, np.nan, J) + H = np.where(mask, np.nan, H) + + return log_lik, J, np.diag(H) + + def variational_expectation_(self, y, m, v, cubature=None): + """ + If no custom variational expectation method is provided, we use cubature. + """ + return variational_expectation_cubature(self, y, m, v, cubature) + + def variational_expectation(self, y, m, v, cubature=None): + """ + Most likelihoods factorise across data points. For multi-latent models, a custom method must be implemented. + """ + + # align shapes and compute mask + y = y.reshape(-1, 1, 1) + m = m.reshape(-1, 1, 1) + v = np.diag(v).reshape(-1, 1, 1) + mask = np.isnan(y) + y = np.where(mask, m, y) + + # compute variational expectations and their derivatives + var_exp, dE_dm, d2E_dm2 = vmap(self.variational_expectation_, (0, 0, 0, None))(y, m, v, cubature) + + # apply mask + var_exp = np.where(np.squeeze(mask), 0., np.squeeze(var_exp)) + dE_dm = np.where(mask, np.nan, dE_dm) + d2E_dm2 = np.where(mask, np.nan, d2E_dm2) + + return var_exp, np.squeeze(dE_dm, axis=2), np.diag(np.squeeze(d2E_dm2, axis=(1, 2))) + + def log_density(self, y, mean, cov, cubature=None): + """ + """ + return log_density_cubature(self, y, mean, cov, cubature) + + def moment_match_(self, y, cav_mean, cav_cov, power=1.0, cubature=None): + """ + If no custom moment matching method is provided, we use cubature. + """ + return moment_match_cubature(self, y, cav_mean, cav_cov, power, cubature) + + def moment_match(self, y, m, v, power=1.0, cubature=None): + """ + """ + # align shapes and compute mask + y = y.reshape(-1, 1) + m = m.reshape(-1, 1) + mask = np.isnan(y) + y = np.where(mask, m, y) + + lZ, dlZ, d2lZ = self.moment_match_(y, m, v, power, cubature) + + return lZ, dlZ, d2lZ + + def statistical_linear_regression_(self, m, v, cubature=None): + """ + If no custom SLR method is provided, we use cubature. + """ + return statistical_linear_regression_cubature(self, m, v, cubature) + + def statistical_linear_regression(self, m, v, cubature=None): + """ + Most likelihoods factorise across data points. For multi-latent models, a custom method must be implemented. + TODO: multi-dim case + """ + + # align shapes and compute mask + m = m.reshape(-1, 1, 1) + v = np.diag(v).reshape(-1, 1, 1) + + # compute SLR + mu, S, C, d_mu = vmap(self.statistical_linear_regression_, (0, 0, None))(m, v, cubature) + + omega = S - transpose(C) @ (1 / v) @ C + + return np.squeeze(mu, axis=2), np.diag(np.squeeze(d_mu, axis=(1, 2))), np.diag(np.squeeze(omega, axis=(1, 2))) + + def observation_model(self, f, sigma): + """ + The implicit observation model is: + h(fโ‚™,rโ‚™) = E[yโ‚™|fโ‚™] + โˆšCov[yโ‚™|fโ‚™] ฯƒโ‚™ + """ + conditional_expectation, conditional_covariance = self.conditional_moments(f) + obs_model = conditional_expectation + cholesky(conditional_covariance) @ sigma + return np.squeeze(obs_model) + + def analytical_linearisation(self, m, sigma=None): + """ + Compute the Jacobian of the state space observation model w.r.t. the + function fโ‚™ and the noise term ฯƒโ‚™. + The implicit observation model is: + h(fโ‚™,rโ‚™) = E[yโ‚™|fโ‚™] + โˆšCov[yโ‚™|fโ‚™] ฯƒโ‚™ + The Jacobians are evaluated at the means, fโ‚™=m, ฯƒโ‚™=0, to be used during + Extended Kalman filtering and Extended EP. + """ + sigma = np.array([[0.0]]) if sigma is None else sigma + + m = m.reshape(-1, 1, 1) + sigma = sigma.reshape(-1, 1, 1) + + Jf, Jsigma = vmap(jacrev(self.observation_model, argnums=(0, 1)))(m, sigma) + + return np.diag(np.squeeze(Jf, axis=(1, 2))), np.diag(np.squeeze(Jsigma, axis=(1, 2))) + + def predict(self, mean_f, var_f, cubature=None): + """ + predict in data space given predictive mean and var of the latent function + TODO: multi-latent case + """ + if mean_f.shape[0] > 1: + return vmap(predict_cubature, [None, 0, 0, None])( + self, + mean_f.reshape(-1, 1, 1), + var_f.reshape(-1, 1, 1), + cubature + ) + else: + return predict_cubature(self, mean_f, var_f, cubature) + + +class Gaussian(Likelihood): + """ + The Gaussian likelihood: + p(yโ‚™|fโ‚™) = ๐“(yโ‚™|fโ‚™,ฯƒยฒ) + """ + def __init__(self, + variance=0.1): + """ + :param variance: The observation noise variance, ฯƒยฒ + """ + self.transformed_variance = objax.TrainVar(np.array(softplus_inv(variance))) + super().__init__() + self.name = 'Gaussian' + self.link_fn = lambda f: f + + @property + def variance(self): + return softplus(self.transformed_variance.value) + + def evaluate_likelihood(self, y, f): + """ + Evaluate the Gaussian function ๐“(yโ‚™|fโ‚™,ฯƒยฒ). + Can be used to evaluate Q cubature points. + :param y: observed data yโ‚™ [scalar] + :param f: mean, i.e. the latent function value fโ‚™ [Q, 1] + :return: + ๐“(yโ‚™|fโ‚™,ฯƒยฒ), where ฯƒยฒ is the observation noise [Q, 1] + """ + return (2 * np.pi * self.variance) ** -0.5 * np.exp(-0.5 * (y - f) ** 2 / self.variance) + + def evaluate_log_likelihood(self, y, f): + """ + Evaluate the log-Gaussian function log๐“(yโ‚™|fโ‚™,ฯƒยฒ). + Can be used to evaluate Q cubature points. + :param y: observed data yโ‚™ [scalar] + :param f: mean, i.e. the latent function value fโ‚™ [Q, 1] + :return: + log๐“(yโ‚™|fโ‚™,ฯƒยฒ), where ฯƒยฒ is the observation noise [Q, 1] + """ + return np.squeeze(-0.5 * np.log(2 * np.pi * self.variance) - 0.5 * (y - f) ** 2 / self.variance) + + def conditional_moments(self, f): + """ + The first two conditional moments of a Gaussian are the mean and variance: + E[y|f] = f + Var[y|f] = ฯƒยฒ + """ + return f, np.array([[self.variance]]) + + def variational_expectation_(self, y, post_mean, post_cov, cubature=None): + """ + Computes the "variational expectation", i.e. the + expected log-likelihood, and its derivatives w.r.t. the posterior mean + E[log p(yโ‚™|fโ‚™)] = โˆซ log p(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + :param y: observed data (yโ‚™) [scalar] + :param post_mean: posterior mean (mโ‚™) [scalar] + :param post_cov: posterior variance (vโ‚™) [scalar] + :param cubature: the function to compute sigma points and weights to use during cubature + :return: + exp_log_lik: the expected log likelihood, E[log p(yโ‚™|fโ‚™)] [scalar] + dE_dm: derivative of E[log p(yโ‚™|fโ‚™)] w.r.t. mโ‚™ [scalar] + d2E_dm2: 2nd derivative of E[log p(yโ‚™|fโ‚™)] w.r.t. mโ‚™ [scalar] + """ + # TODO: multi-dim case + # Compute expected log likelihood: + # E[log p(yโ‚™|fโ‚™)] = โˆซ log p(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + exp_log_lik = ( + -0.5 * np.log(2 * np.pi) + - 0.5 * np.log(self.variance) + - 0.5 * ((y - post_mean) ** 2 + post_cov) / self.variance + ) + # Compute first derivative: + dE_dm = (y - post_mean) / self.variance + # Compute second derivative: + d2E_dm2 = -1 / self.variance + return exp_log_lik, dE_dm, d2E_dm2.reshape(-1, 1) + + def moment_match_(self, y, cav_mean, cav_cov, power=1.0, cubature=None): + """ + Closed form Gaussian moment matching. + Calculates the log partition function of the EP tilted distribution: + logZโ‚™ = log โˆซ ๐“(yโ‚™|fโ‚™,ฯƒยฒ) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ = E[๐“(yโ‚™|fโ‚™,ฯƒยฒ)] + and its derivatives w.r.t. mโ‚™, which are required for moment matching. + :param y: observed data (yโ‚™) + :param cav_mean: cavity mean (mโ‚™) + :param cav_cov: cavity covariance (vโ‚™) + :param power: EP power [scalar] + :param cubature: not used + :return: + lZ: the log partition function, logZโ‚™ [scalar] + dlZ: first derivative of logZโ‚™ w.r.t. mโ‚™ (if derivatives=True) [scalar] + d2lZ: second derivative of logZโ‚™ w.r.t. mโ‚™ (if derivatives=True) [scalar] + """ + lik_cov = self.variance * np.eye(cav_cov.shape[0]) + # log partition function, lZ: + # logZโ‚™ = log โˆซ ๐“(yโ‚™|fโ‚™,ฯƒยฒ) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # = log ๐“(yโ‚™|mโ‚™,ฯƒยฒ+vโ‚™) + lZ, dlZ, d2lZ = mvn_logpdf_and_derivs( + y, + cav_mean, + lik_cov / power + cav_cov + ) + constant = pep_constant(lik_cov, power) + lZ += constant + return lZ, dlZ, d2lZ + + def log_density(self, y, mean, cov, cubature=None): + """ + logZโ‚™ = log โˆซ ๐“(yโ‚™|fโ‚™,ฯƒยฒ) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ = E[๐“(yโ‚™|fโ‚™,ฯƒยฒ)] + :param y: observed data (yโ‚™) + :param mean: cavity mean (mโ‚™) + :param cov: cavity variance (vโ‚™) + :param cubature: not used + :return: + lZ: the log density, logZโ‚™ [scalar] + """ + # logZโ‚™ = log โˆซ ๐“(yโ‚™|fโ‚™,ฯƒยฒ) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ = log ๐“(yโ‚™|mโ‚™,ฯƒยฒ+vโ‚™) + lZ = mvn_logpdf( + y, + mean, + self.variance * np.eye(cov.shape[0]) + cov + ) + return lZ + + def predict(self, mean_f, var_f, cubature=None): + return mean_f, var_f + self.variance + + +class Bernoulli(Likelihood): + """ + Bernoulli likelihood is p(yโ‚™|fโ‚™) = Pสธ(1-P)โฝยนโปสธโพ, where P = E[yโ‚™=1|fโ‚™]. + Link function maps latent GP to [0,1]. + The Probit link function, i.e. the Error Function Likelihood: + i.e. the Gaussian (Normal) cumulative density function: + P = E[yโ‚™=1|fโ‚™] = ฮฆ(fโ‚™) + = โˆซ ๐“(x|0,1) dx, where the integral is over (-โˆž, fโ‚™], + The Normal CDF is calulcated using the error function: + = (1 + erf(fโ‚™ / โˆš2)) / 2 + for erf(z) = (2/โˆšฯ€) โˆซ exp(-xยฒ) dx, where the integral is over [0, z] + The logit link function: + P = E[yโ‚™=1|fโ‚™] = 1 / 1 + exp(-fโ‚™) + """ + def __init__(self, + link='probit'): + super().__init__() + if link == 'logit': + self.link_fn = lambda f: 1 / (1 + np.exp(-f)) + self.dlink_fn = lambda f: np.exp(f) / (1 + np.exp(f)) ** 2 + self.link = link + elif link == 'probit': + jitter = 1e-3 + self.link_fn = lambda f: 0.5 * (1.0 + erf(f / np.sqrt(2.0))) * (1 - 2 * jitter) + jitter + self.dlink_fn = lambda f: grad(self.link_fn)(np.squeeze(f)).reshape(-1, 1) + self.link = link + else: + raise NotImplementedError('link function not implemented') + self.name = 'Bernoulli' + + def evaluate_likelihood(self, y, f): + """ + :param y: observed data yโ‚™ ฯต {-1, +1} [scalar] + :param f: latent function value fโ‚™ ฯต โ„ + :return: + p(yโ‚™|fโ‚™) = Pสธ(1-P)โฝยนโปสธโพ + """ + return np.where(np.equal(y, 1), self.link_fn(f), 1 - self.link_fn(f)) + + def evaluate_log_likelihood(self, y, f): + """ + :param y: observed data yโ‚™ ฯต {-1, +1} [scalar] + :param f: latent function value fโ‚™ ฯต โ„ + :return: + log p(yโ‚™|fโ‚™) + """ + return np.squeeze(np.log(self.evaluate_likelihood(y, f))) + + def conditional_moments(self, f): + """ + The first two conditional moments of a Probit likelihood are: + E[yโ‚™|fโ‚™] = ฮฆ(fโ‚™) + Var[yโ‚™|fโ‚™] = ฮฆ(fโ‚™) (1 - ฮฆ(fโ‚™)) + """ + return self.link_fn(f), self.link_fn(f)-(self.link_fn(f)**2) + + +class Probit(Bernoulli): + """ + The probit likelihood = Bernoulli likelihood with probit link. + """ + def __init__(self): + super().__init__(link='probit') + + +class Erf(Probit): + """ + The error function likelihood = probit = Bernoulli likelihood with probit link. + """ + pass + + +class Logit(Bernoulli): + """ + The logit likelihood = Bernoulli likelihood with logit link. + """ + def __init__(self): + super().__init__(link='logit') + + +class Logistic(Logit): + """ + The logistic likelihood = logit = Bernoulli likelihood with logit link. + """ + pass + + +class Poisson(Likelihood): + """ + TODO: tidy docstring + The Poisson likelihood: + p(yโ‚™|fโ‚™) = Poisson(fโ‚™) = ฮผสธ exp(-ฮผ) / yโ‚™! + where ฮผ = g(fโ‚™) = mean = variance is the Poisson intensity. + yโ‚™ is non-negative integer count data. + No closed form moment matching is available, se we default to using cubature. + + Letting Zy = gamma(yโ‚™+1) = yโ‚™!, we get log p(yโ‚™|fโ‚™) = log(g(fโ‚™))yโ‚™ - g(fโ‚™) - log(Zy) + The larger the intensity ฮผ, the stronger the likelihood resembles a Gaussian + since skewness = 1/sqrt(ฮผ) and kurtosis = 1/ฮผ. + Two possible link functions: + 'exp': link(fโ‚™) = exp(fโ‚™), we have p(yโ‚™|fโ‚™) = exp(fโ‚™yโ‚™-exp(fโ‚™)) / Zy. + 'logistic': link(fโ‚™) = log(1+exp(fโ‚™))), we have p(yโ‚™|fโ‚™) = logสธ(1+exp(fโ‚™)))(1+exp(fโ‚™)) / Zy. + """ + def __init__(self, + binsize=1, + link='exp'): + """ + :param link: link function, either 'exp' or 'logistic' + """ + super().__init__() + if link == 'exp': + self.link_fn = lambda mu: np.exp(mu) + self.dlink_fn = lambda mu: np.exp(mu) + elif link == 'logistic': + self.link_fn = lambda mu: softplus(mu) + self.dlink_fn = lambda mu: sigmoid(mu) + else: + raise NotImplementedError('link function not implemented') + self.binsize = np.array(binsize) + self.name = 'Poisson' + + def evaluate_likelihood(self, y, f): + """ + Evaluate the Poisson likelihood: + p(yโ‚™|fโ‚™) = Poisson(fโ‚™) = ฮผสธ exp(-ฮผ) / yโ‚™! + for ฮผ = g(fโ‚™), where g() is the link function (exponential or logistic). + We use the gamma function to evaluate yโ‚™! = gamma(yโ‚™ + 1). + Can be used to evaluate Q cubature points when performing moment matching. + :param y: observed data (yโ‚™) [scalar] + :param f: latent function value (fโ‚™) [Q, 1] + :return: + Poisson(fโ‚™) = ฮผสธ exp(-ฮผ) / yโ‚™! [Q, 1] + """ + mu = self.link_fn(f) * self.binsize + return np.exp(y * np.log(mu) - mu - gammaln(y + 1)) + + def evaluate_log_likelihood(self, y, f): + """ + Evaluate the Poisson log-likelihood: + log p(yโ‚™|fโ‚™) = log Poisson(fโ‚™) = log(ฮผสธ exp(-ฮผ) / yโ‚™!) + for ฮผ = g(fโ‚™), where g() is the link function (exponential or logistic). + We use the gamma function to evaluate yโ‚™! = gamma(yโ‚™ + 1). + Can be used to evaluate Q cubature points when performing moment matching. + :param y: observed data (yโ‚™) [scalar] + :param f: latent function value (fโ‚™) [Q, 1] + :return: + log Poisson(fโ‚™) = log(ฮผสธ exp(-ฮผ) / yโ‚™!) [Q, 1] + """ + mu = self.link_fn(f) * self.binsize + return np.squeeze(y * np.log(mu) - mu - gammaln(y + 1)) + + def observation_model(self, f, sigma): + """ + TODO: sort out broadcasting so we don't need this additional function (only difference is the transpose) + The implicit observation model is: + h(fโ‚™,rโ‚™) = E[yโ‚™|fโ‚™] + โˆšCov[yโ‚™|fโ‚™] ฯƒโ‚™ + """ + conditional_expectation, conditional_covariance = self.conditional_moments(f) + obs_model = conditional_expectation + cholesky(conditional_covariance.T) @ sigma + return np.squeeze(obs_model) + + def conditional_moments(self, f): + """ + The first two conditional moments of a Poisson distribution are equal to the intensity: + E[yโ‚™|fโ‚™] = link(fโ‚™) + Var[yโ‚™|fโ‚™] = link(fโ‚™) + """ + # TODO: multi-dim case + return self.link_fn(f) * self.binsize, self.link_fn(f) * self.binsize + # return self.link_fn(f) * self.binsize, vmap(np.diag, 1, 2)(self.link_fn(f) * self.binsize) + + def analytical_linearisation(self, m, sigma=None): + """ + Compute the Jacobian of the state space observation model w.r.t. the + function fโ‚™ and the noise term ฯƒโ‚™. + """ + link_fm = self.link_fn(m) * self.binsize + dlink_fm = self.dlink_fn(m) * self.binsize + Jf = np.diag(np.squeeze(link_fm + 0.5 * link_fm ** -0.5 * dlink_fm * sigma, axis=-1)) + Jsigma = np.diag(np.squeeze(link_fm ** 0.5, axis=-1)) + return Jf, Jsigma + + def variational_expectation_(self, y, post_mean, post_cov, cubature=None): + """ + Computes the "variational expectation", i.e. the + expected log-likelihood, and its derivatives w.r.t. the posterior mean + Let a = E[f] = m and b = E[exp(f)] = exp(m+v/2) then + E[log Poisson(y | exp(f)*binsize)] = Y log binsize + E[Y * log exp(f)] - E[binsize * exp(f)] - log Y! + = Y log binsize + Y * m - binsize * exp(m + v/2) - log Y! + :param y: observed data (yโ‚™) [scalar] + :param post_mean: posterior mean (mโ‚™) [scalar] + :param post_cov: posterior variance (vโ‚™) [scalar] + :param cubature: the function to compute sigma points and weights to use during cubature + :return: + exp_log_lik: the expected log likelihood, E[log p(yโ‚™|fโ‚™)] [scalar] + dE_dm: derivative of E[log p(yโ‚™|fโ‚™)] w.r.t. mโ‚™ [scalar] + d2E_dm2: 2nd derivative of E[log p(yโ‚™|fโ‚™)] w.r.t. mโ‚™ [scalar] + """ + # TODO: multi-dim case + exp_mean_cov = self.binsize * np.exp(post_mean + post_cov / 2) + # Compute expected log likelihood: + exp_log_lik = ( + y * np.log(self.binsize) + + y * post_mean + - exp_mean_cov + - gammaln(y + 1.0) + ) + # Compute first derivative: + dE_dm = y - exp_mean_cov + # Compute second derivative: + d2E_dm2 = -exp_mean_cov + return exp_log_lik, dE_dm, d2E_dm2.reshape(-1, 1) + + +class HeteroscedasticNoise(Likelihood): + """ + The Heteroscedastic Noise likelihood: + p(y|f1,f2) = N(y|f1,link(f2)^2) + """ + def __init__(self, link='softplus'): + """ + :param link: link function, either 'exp' or 'softplus' (note that the link is modified with an offset) + """ + super().__init__() + if link == 'exp': + self.link_fn = lambda mu: np.exp(mu) + self.dlink_fn = lambda mu: np.exp(mu) + elif link == 'softplus': + self.link_fn = lambda mu: softplus(mu) + 1e-10 + self.dlink_fn = lambda mu: sigmoid(mu) + else: + raise NotImplementedError('link function not implemented') + self.name = 'Heteroscedastic Noise' + + def evaluate_likelihood(self, y, f): + """ + Evaluate the likelihood + """ + mu, var = self.conditional_moments(f) + return (2 * np.pi * var) ** -0.5 * np.exp(-0.5 * (y - mu) ** 2 / var) + + def evaluate_log_likelihood(self, y, f): + """ + Evaluate the log-likelihood + """ + mu, var = self.conditional_moments(f) + return np.squeeze(-0.5 * np.log(2 * np.pi * var) - 0.5 * (y - mu) ** 2 / var) + + def conditional_moments(self, f, hyp=None): + """ + """ + return f[:1], self.link_fn(f[1:2]) ** 2 + + def log_likelihood_gradients(self, y, f): + log_lik, J, H = self.log_likelihood_gradients_(y, f) + # H = -ensure_positive_precision(-H) + return log_lik, J, H + + def log_density(self, y, mean, cov, cubature=None): + """ + """ + if cubature is None: + x, w = gauss_hermite(1, 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(1) + # sigma_points = np.sqrt(2) * np.sqrt(v) * x + m # scale locations according to cavity dist. + sigma_points = np.sqrt(cov[1, 1]) * x + mean[1] # fsigแตข=xแตขโˆšcโ‚™ + mโ‚™: scale locations according to cavity + f2 = self.link_fn(sigma_points) ** 2. + obs_var = f2 + cov[0, 0] + normpdf = (2 * np.pi * obs_var) ** -0.5 * np.exp(-0.5 * (y - mean[0, 0]) ** 2 / obs_var) + Z = np.sum(w * normpdf) + lZ = np.log(np.maximum(Z, 1e-8)) + return lZ + + def moment_match(self, y, cav_mean, cav_cov, power=1.0, cubature=None): + """ + TODO: implement proper Hessian approx., as done in variational_expectation() + """ + if cubature is None: + x, w = gauss_hermite(1, 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(1) + # sigma_points = np.sqrt(2) * np.sqrt(v) * x + m # scale locations according to cavity dist. + sigma_points = np.sqrt(cav_cov[1, 1]) * x + cav_mean[1] # fsigแตข=xแตขโˆšcโ‚™ + mโ‚™: scale locations according to cavity + + f2 = self.link_fn(sigma_points) ** 2. / power + obs_var = f2 + cav_cov[0, 0] + const = power ** -0.5 * (2 * np.pi * self.link_fn(sigma_points) ** 2.) ** (0.5 - 0.5 * power) + normpdf = const * (2 * np.pi * obs_var) ** -0.5 * np.exp(-0.5 * (y - cav_mean[0, 0]) ** 2 / obs_var) + Z = np.sum(w * normpdf) + Zinv = 1. / np.maximum(Z, 1e-8) + lZ = np.log(np.maximum(Z, 1e-8)) + + dZ_integrand1 = (y - cav_mean[0, 0]) / obs_var * normpdf + dlZ1 = Zinv * np.sum(w * dZ_integrand1) + + dZ_integrand2 = (sigma_points - cav_mean[1, 0]) / cav_cov[1, 1] * normpdf + dlZ2 = Zinv * np.sum(w * dZ_integrand2) + + d2Z_integrand1 = (-(f2 + cav_cov[0, 0]) ** -1 + ((y - cav_mean[0, 0]) / obs_var) ** 2) * normpdf + d2lZ1 = -dlZ1 ** 2 + Zinv * np.sum(w * d2Z_integrand1) + + d2Z_integrand2 = (-cav_cov[1, 1] ** -1 + ((sigma_points - cav_mean[1, 0]) / cav_cov[1, 1]) ** 2) * normpdf + d2lZ2 = -dlZ2 ** 2 + Zinv * np.sum(w * d2Z_integrand2) + + dlZ = np.block([[dlZ1], + [dlZ2]]) + d2lZ = np.block([[d2lZ1, 0], + [0., d2lZ2]]) + + return lZ, dlZ, d2lZ + + def log_expected_likelihood(self, y, x, w, cav_mean, cav_var, power): + sigma_points = np.sqrt(cav_var[1]) * x + cav_mean[1] + f2 = self.link_fn(sigma_points) ** 2. / power + obs_var = f2 + cav_var[0] + const = power ** -0.5 * (2 * np.pi * self.link_fn(sigma_points) ** 2.) ** (0.5 - 0.5 * power) + normpdf = const * (2 * np.pi * obs_var) ** -0.5 * np.exp(-0.5 * (y - cav_mean[0]) ** 2 / obs_var) + Z = np.sum(w * normpdf) + lZ = np.log(Z + 1e-8) + return lZ + + def expected_log_likelihood(self, y, m, v, cubature=None): + """ + """ + if cubature is None: + x, w = gauss_hermite(2, 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(2) + v = (v + v.T) / 2 + sigma_points = cholesky(v) @ x + m # fsigแตข=xแตขโˆš(2vโ‚™) + mโ‚™: scale locations according to cavity dist. + # Compute expected log likelihood via cubature: + # E[log p(yโ‚™|fโ‚™)] = โˆซ log p(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข log p(yโ‚™|fsigแตข) + exp_log_lik = np.sum(w * self.evaluate_log_likelihood(y, sigma_points)) + return exp_log_lik + + def expected_log_likelihood_dm(self, y, m, v, cubature=None): + """ + """ + dE_dm = grad(self.expected_log_likelihood, argnums=1)(y, m, v, cubature) + return dE_dm + + def expected_log_likelihood_dm2(self, y, m, v, cubature=None): + """ + """ + d2E_dm2 = jacrev(self.expected_log_likelihood_dm, argnums=1)(y, m, v, cubature) + return np.squeeze(d2E_dm2) + + def variational_expectation(self, y, m, v, cubature=None): + """ + Compute expected log likelihood via cubature: + E[log p(yโ‚™|fโ‚™)] = โˆซ log p(yโ‚™|fโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + """ + E = self.expected_log_likelihood(y, m, v, cubature) + dE_dm = self.expected_log_likelihood_dm(y, m, v, cubature) + d2E_dm2 = self.expected_log_likelihood_dm2(y, m, v, cubature) + # d2E_dm2 = -ensure_positive_precision(-d2E_dm2) + # return E, dE_dm, np.diag(np.diag(d2E_dm2)) # TODO: check this is the same as above + return E, dE_dm, d2E_dm2 + + def statistical_linear_regression(self, mean, cov, cubature=None): + """ + Perform statistical linear regression (SLR) using cubature. + We aim to find a likelihood approximation p(yโ‚™|fโ‚™) โ‰ˆ ๐“(yโ‚™|Afโ‚™+b,ฮฉ+Var[yโ‚™|fโ‚™]). + """ + if cubature is None: + x, w = gauss_hermite(mean.shape[0], 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(mean.shape[0]) + m0, m1, v0, v1 = mean[0, 0], mean[1, 0], cov[0, 0], cov[1, 1] + # fsigแตข=xแตขโˆš(vโ‚™) + mโ‚™: scale locations according to cavity dist. + sigma_points = cholesky(cov) @ x + mean + var = self.link_fn(sigma_points[1]) ** 2 + # Compute zโ‚™ via cubature: + # zโ‚™ = โˆซ E[yโ‚™|fโ‚™] ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข E[yโ‚™|fsigแตข] + mu = m0.reshape(1, 1) + # Compute variance S via cubature: + # S = โˆซ [(E[yโ‚™|fโ‚™]-zโ‚™) (E[yโ‚™|fโ‚™]-zโ‚™)' + Cov[yโ‚™|fโ‚™]] ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข [(E[yโ‚™|fsigแตข]-zโ‚™) (E[yโ‚™|fsigแตข]-zโ‚™)' + Cov[yโ‚™|fโ‚™]] + S = v0 + np.sum( + w * var + ) + S = S.reshape(1, 1) + # Compute cross covariance C via cubature: + # C = โˆซ (fโ‚™-mโ‚™) (E[yโ‚™|fโ‚™]-zโ‚™)' ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข (fsigแตข -mโ‚™) (E[yโ‚™|fsigแตข]-zโ‚™)' + C = np.sum( + w * (sigma_points - mean) * (sigma_points[0] - m0), axis=-1 + ).reshape(2, 1) + # Compute derivative of z via cubature: + # d_mu = โˆซ E[yโ‚™|fโ‚™] vโ‚™โปยน (fโ‚™-mโ‚™) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข E[yโ‚™|fsigแตข] vโ‚™โปยน (fsigแตข-mโ‚™) + d_mu = np.block([[1., 0.]]) + omega = S - transpose(C) @ solve(cov, C) + return mu, d_mu, omega + + def analytical_linearisation(self, m, sigma=None): + """ + Compute the Jacobian of the state space observation model w.r.t. the + function fโ‚™ and the noise term ฯƒโ‚™. + """ + return np.block([[np.array(1.0), self.dlink_fn(m[1]) * sigma]]), self.link_fn(np.array([m[1]])) + + +class AudioAmplitudeDemodulation(Likelihood): + """ + The Audio Amplitude Demodulation likelihood + """ + def __init__(self, variance=0.1): + """ + param hyp: observation noise + """ + self.transformed_variance = objax.TrainVar(np.array(softplus_inv(variance))) + super().__init__() + self.name = 'Audio Amplitude Demodulation' + # self.link_fn = lambda f: softplus(f) + self.link_fn = softplus + # self.dlink_fn = lambda f: sigmoid(f) # derivative of the link function + self.dlink_fn = sigmoid # derivative of the link function + + @property + def variance(self): + return softplus(self.transformed_variance.value) + + def evaluate_likelihood(self, y, f): + """ + Evaluate the likelihood + """ + mu, var = self.conditional_moments(f) + return (2 * np.pi * var) ** -0.5 * np.exp(-0.5 * (y - mu) ** 2 / var) + + def evaluate_log_likelihood(self, y, f): + """ + Evaluate the log-likelihood + """ + mu, var = self.conditional_moments(f) + return -0.5 * np.log(2 * np.pi * var) - 0.5 * (y - mu) ** 2 / var + + def conditional_moments(self, f): + """ + """ + num_components = int(f.shape[0] / 2) + subbands, modulators = f[:num_components], self.link_fn(f[num_components:]) + return np.sum(subbands * modulators).reshape(-1, 1), np.array([[self.variance]]) + # return np.atleast_2d(modulators.T @ subbands), np.atleast_2d(obs_noise_var) + + def moment_match(self, y, cav_mean, cav_cov, power=1.0, cubature=None): + """ + """ + num_components = int(cav_mean.shape[0] / 2) + if cubature is None: + x, w = gauss_hermite(num_components, 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(num_components) + + # subband_mean, modulator_mean = cav_mean[:num_components], self.link_fn(cav_mean[num_components:]) + subband_mean, modulator_mean = cav_mean[:num_components], cav_mean[num_components:] # TODO: CHECK + subband_cov, modulator_cov = cav_cov[:num_components, :num_components], cav_cov[num_components:, num_components:] + sigma_points = cholesky(modulator_cov) @ x + modulator_mean + const = power ** -0.5 * (2 * np.pi * self.variance) ** (0.5 - 0.5 * power) + mu = (self.link_fn(sigma_points).T @ subband_mean)[:, 0] + var = self.variance / power + (self.link_fn(sigma_points).T ** 2 @ np.diag(subband_cov)[..., None])[:, 0] + normpdf = const * (2 * np.pi * var) ** -0.5 * np.exp(-0.5 * (y - mu) ** 2 / var) + Z = np.sum(w * normpdf) + Zinv = 1. / (Z + 1e-8) + lZ = np.log(Z + 1e-8) + + dZ1 = np.sum(w * self.link_fn(sigma_points) * (y - mu) / var * normpdf, axis=-1) + dZ2 = np.sum(w * (sigma_points - modulator_mean) * np.diag(modulator_cov)[..., None] ** -1 * normpdf, axis=-1) + dlZ = Zinv * np.block([dZ1, dZ2]) + + d2Z1 = np.sum(w * self.link_fn(sigma_points) ** 2 * ( + ((y - mu) / var) ** 2 + - var ** -1 + ) * normpdf, axis=-1) + d2Z2 = np.sum(w * ( + ((sigma_points - modulator_mean) * np.diag(modulator_cov)[..., None] ** -1) ** 2 + - np.diag(modulator_cov)[..., None] ** -1 + ) * normpdf, axis=-1) + d2lZ = np.diag(-dlZ ** 2 + Zinv * np.block([d2Z1, d2Z2])) + + # id2lZ = inv_any(d2lZ + 1e-10 * np.eye(d2lZ.shape[0])) + # site_mean = cav_mean - id2lZ @ dlZ[..., None] # approx. likelihood (site) mean (see Rasmussen & Williams p75) + # site_cov = -power * (cav_cov + id2lZ) # approx. likelihood (site) variance + return lZ, dlZ[..., None], d2lZ + + def log_density(self, y, mean, cov, cubature=None): + """ + """ + num_components = int(mean.shape[0] / 2) + if cubature is None: + x, w = gauss_hermite(num_components, 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(num_components) + + # subband_mean, modulator_mean = mean[:num_components], self.link_fn(mean[num_components:]) + subband_mean, modulator_mean = mean[:num_components], mean[num_components:] # TODO: CHECK + subband_cov, modulator_cov = cov[:num_components, :num_components], cov[num_components:, + num_components:] + sigma_points = cholesky(modulator_cov) @ x + modulator_mean + mu = (self.link_fn(sigma_points).T @ subband_mean)[:, 0] + var = self.variance + (self.link_fn(sigma_points).T ** 2 @ np.diag(subband_cov)[..., None])[:, 0] + normpdf = (2 * np.pi * var) ** -0.5 * np.exp(-0.5 * (y - mu) ** 2 / var) + Z = np.sum(w * normpdf) + lZ = np.log(Z + 1e-8) + return lZ + + def statistical_linear_regression(self, mean, cov, cubature=None): + """ + This gives the same result as above - delete + """ + num_components = int(mean.shape[0] / 2) + if cubature is None: + x, w = gauss_hermite(num_components, 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(num_components) + + # subband_mean, modulator_mean = mean[:num_components], self.link_fn(mean[num_components:]) + subband_mean, modulator_mean = mean[:num_components], mean[num_components:] # TODO: CHECK + subband_cov, modulator_cov = cov[:num_components, :num_components], cov[num_components:, + num_components:] + sigma_points = cholesky(modulator_cov) @ x + modulator_mean + lik_expectation, lik_covariance = (self.link_fn(sigma_points).T @ subband_mean).T, self.variance + # Compute zโ‚™ via cubature: + # muโ‚™ = โˆซ E[yโ‚™|fโ‚™] ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข E[yโ‚™|fsigแตข] + mu = np.sum( + w * lik_expectation, axis=-1 + )[:, None] + # Compute variance S via cubature: + # S = โˆซ [(E[yโ‚™|fโ‚™]-zโ‚™) (E[yโ‚™|fโ‚™]-zโ‚™)' + Cov[yโ‚™|fโ‚™]] ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข [(E[yโ‚™|fsigแตข]-zโ‚™) (E[yโ‚™|fsigแตข]-zโ‚™)' + Cov[yโ‚™|fโ‚™]] + S = np.sum( + w * ((lik_expectation - mu) * (lik_expectation - mu) + lik_covariance), axis=-1 + )[:, None] + # Compute cross covariance C via cubature: + # C = โˆซ (fโ‚™-mโ‚™) (E[yโ‚™|fโ‚™]-zโ‚™)' ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + # โ‰ˆ โˆ‘แตข wแตข (fsigแตข -mโ‚™) (E[yโ‚™|fsigแตข]-zโ‚™)' + C = np.sum( + w * np.block([[self.link_fn(sigma_points) * np.diag(subband_cov)[..., None]], + [sigma_points - modulator_mean]]) * (lik_expectation - mu), axis=-1 + )[:, None] + # Compute derivative of mu via cubature: + d_mu = np.sum( + w * np.block([[self.link_fn(sigma_points)], + [np.diag(modulator_cov)[..., None] ** -1 * (sigma_points - modulator_mean) * lik_expectation]]), axis=-1 + )[None, :] + omega = S - transpose(C) @ solve(cov, C) + return mu, d_mu, omega + + def variational_expectation(self, y, post_mean, post_cov, cubature=None): + """ + """ + num_components = int(post_mean.shape[0] / 2) + if cubature is None: + x, w = gauss_hermite(num_components, 20) # Gauss-Hermite sigma points and weights + else: + x, w = cubature(num_components) + + # subband_mean, modulator_mean = post_mean[:num_components], self.link_fn(post_mean[num_components:]) + subband_mean, modulator_mean = post_mean[:num_components], post_mean[num_components:] # TODO: CHECK + subband_cov, modulator_cov = post_cov[:num_components, :num_components], post_cov[num_components:, + num_components:] + sigma_points = cholesky(modulator_cov) @ x + modulator_mean + + modulator_var = np.diag(subband_cov)[..., None] + mu = (self.link_fn(sigma_points).T @ subband_mean)[:, 0] + lognormpdf = -0.5 * np.log(2 * np.pi * self.variance) - 0.5 * (y - mu) ** 2 / self.variance + const = -0.5 / self.variance * (self.link_fn(sigma_points).T ** 2 @ modulator_var)[:, 0] + exp_log_lik = np.sum(w * (lognormpdf + const)) + + dE1 = np.sum(w * self.link_fn(sigma_points) * (y - mu) / self.variance, axis=-1) + dE2 = np.sum(w * (sigma_points - modulator_mean) * modulator_var ** -1 + * (lognormpdf + const), axis=-1) + dE_dm = np.block([dE1, dE2])[..., None] + + d2E1 = np.sum(w * - 0.5 * self.link_fn(sigma_points) ** 2 / self.variance, axis=-1) + d2E2 = np.sum(w * 0.5 * ( + ((sigma_points - modulator_mean) * modulator_var ** -1) ** 2 + - modulator_var ** -1 + ) * (lognormpdf + const), axis=-1) + dE_dv = np.diag(np.block([d2E1, d2E2])) + return exp_log_lik, dE_dm, dE_dv + + def analytical_linearisation(self, m, sigma=None): + """ + Compute the Jacobian of the state space observation model w.r.t. the + function fโ‚™ and the noise term ฯƒโ‚™. + """ + num_components = int(m.shape[0] / 2) + Jf = np.block([[self.link_fn(m[num_components:])], [m[:num_components] * self.dlink_fn(m[num_components:])]]).T + Jsigma = np.array([[self.variance ** 0.5]]) + return Jf, Jsigma diff --git a/newt/models.py b/newt/models.py new file mode 100644 index 0000000..234f2fe --- /dev/null +++ b/newt/models.py @@ -0,0 +1,831 @@ +import objax +import jax.numpy as np +from .kernels import Independent +from jax import vmap +from jax.lax import scan +from jax.ops import index, index_update +from jax.scipy.linalg import cho_factor, cho_solve +from jax.random import multivariate_normal, PRNGKey +from .utils import ( + inv, + diag, + solve, + transpose, + input_admin, + compute_conditional_statistics, + build_joint, + set_z_stats, + temporal_conditional, + sum_natural_params_by_group, + gaussian_expected_log_lik, + compute_cavity +) +from .ops import ( + gaussian_conditional, + sparse_gaussian_conditional, + sparse_conditional_post_to_data, + kalman_filter, + kalman_filter_pairs, + rauch_tung_striebel_smoother +) +import math +from jax.config import config +config.update("jax_enable_x64", True) + +LOG2PI = math.log(2 * math.pi) + + +class Model(objax.Module): + """ + The parent model class: initialises all the common model features and implements shared methods + """ + def __init__(self, + kernel, + likelihood, + X, + Y, + func_dim=1): + if X.ndim < 2: + X = X[:, None] + if Y.ndim < 2: + Y = Y[:, None] + self.X = np.array(X) + self.Y = np.array(Y) + self.kernel = kernel + self.likelihood = likelihood + self.num_data = self.X.shape[0] # number of data + self.func_dim = func_dim # number of latent dimensions + self.obs_dim = Y.shape[1] # dimensionality of the observations, Y + self.mask = np.isnan(self.Y).reshape(Y.shape[0], Y.shape[1]) + if isinstance(self.kernel, Independent): + pseudo_lik_size = self.func_dim # the multi-latent case + else: + pseudo_lik_size = self.obs_dim + self.pseudo_likelihood_nat1 = objax.StateVar(np.zeros([self.num_data, pseudo_lik_size, 1])) + self.pseudo_likelihood_nat2 = objax.StateVar(1e-2 * np.tile(np.eye(pseudo_lik_size), [self.num_data, 1, 1])) + self.pseudo_y = objax.StateVar(np.zeros([self.num_data, pseudo_lik_size, 1])) + self.pseudo_var = objax.StateVar(1e2 * np.tile(np.eye(pseudo_lik_size), [self.num_data, 1, 1])) + self.posterior_mean = objax.StateVar(np.zeros([self.num_data, self.func_dim, 1])) + self.posterior_variance = objax.StateVar(np.tile(np.eye(self.func_dim), [self.num_data, 1, 1])) + self.ind = np.arange(self.num_data) + self.num_neighbours = np.ones(self.num_data) + + def __call__(self, X=None): + if X is None: + self.update_posterior() + else: + return self.predict(X) + + def set_pseudo_likelihood(self): + self.pseudo_var.value = vmap(inv)(self.pseudo_likelihood_nat2.value) + self.pseudo_y.value = self.pseudo_var.value @ self.pseudo_likelihood_nat1.value + + def prior_sample(self, num_samps=1): + raise NotImplementedError + + def update_posterior(self): + raise NotImplementedError + + def compute_log_lik(self, pseudo_y=None, pseudo_var=None): + """ Compute the log likelihood of the pseudo model, i.e. the log normaliser of the approximate posterior """ + raise NotImplementedError + + def predict(self, X, R=None): + raise NotImplementedError + + def predict_y(self, X, R=None): + """ + predict y at new test locations X + TODO: check non-Gaussian likelihoods + """ + mean_f, var_f = self.predict(X, R) + mean_f, var_f = mean_f.reshape(mean_f.shape[0], -1, 1), var_f.reshape(var_f.shape[0], -1, 1) + mean_y, var_y = vmap(self.likelihood.predict)(mean_f, var_f) + return np.squeeze(mean_y), np.squeeze(var_y) + + def negative_log_predictive_density(self, X, Y, R=None): + predict_mean, predict_var = self.predict(X, R) + if predict_mean.ndim > 1: # multi-latent case + pred_mean, pred_var, Y = predict_mean[..., None], diag(predict_var), Y.reshape(-1, 1) + else: + pred_mean, pred_var, Y = predict_mean.reshape(-1, 1, 1), predict_var.reshape(-1, 1, 1), Y.reshape(-1, 1) + log_density = vmap(self.likelihood.log_density)(Y, pred_mean, pred_var) + return -np.nanmean(log_density) + + def group_natural_params(self, nat1, nat2, batch_ind=None): + if (batch_ind is not None) and (batch_ind.shape[0] != self.num_data): + nat1 = index_update(self.pseudo_likelihood_nat1.value, index[batch_ind], nat1) + nat2 = index_update(self.pseudo_likelihood_nat2.value, index[batch_ind], nat2) + return nat1, nat2 + + def conditional_posterior_to_data(self, batch_ind=None, post_mean=None, post_cov=None): + if batch_ind is None: + batch_ind = np.arange(self.num_data) + ind = self.ind[batch_ind] + if post_mean is None: + post_mean = self.posterior_mean.value[ind] + if post_cov is None: + post_cov = self.posterior_variance.value[ind] + return post_mean, post_cov + + def conditional_data_to_posterior(self, mean_f, cov_f): + return mean_f, cov_f + + def expected_density_pseudo(self): + expected_density = vmap(gaussian_expected_log_lik)( # parallel operation + self.pseudo_y.value, + self.posterior_mean.value, + self.posterior_variance.value, + self.pseudo_var.value, + self.mask + ) + return np.sum(expected_density) + + def compute_kl(self): + """ + KL divergence between the approximate posterior q(u) and the prior p(u) + """ + # log int p(u) prod_n N(pseudo_y_n | u, pseudo_var_n) du + log_lik_pseudo = self.compute_log_lik() + # E_q[log N(pseudo_y_n | u, pseudo_var_n)] + expected_density_pseudo = self.expected_density_pseudo() + kl = expected_density_pseudo - log_lik_pseudo # KL[approx_post || prior] + return kl + + def compute_full_pseudo_lik(self): + return self.pseudo_y.value, self.pseudo_var.value + + def compute_full_pseudo_nat(self, batch_ind): + return self.pseudo_likelihood_nat1.value[batch_ind], self.pseudo_likelihood_nat2.value[batch_ind] + + def cavity_distribution(self, batch_ind=None, power=None): + """ Compute the power EP cavity for the given data points """ + if batch_ind is None: + batch_ind = np.arange(self.num_data) + + nat1lik_full, nat2lik_full = self.compute_full_pseudo_nat(batch_ind) + + # then compute the cavity + cavity_mean, cavity_cov = vmap(compute_cavity, [0, 0, 0, 0, None])( + self.posterior_mean.value[batch_ind], + self.posterior_variance.value[batch_ind], + nat1lik_full, + nat2lik_full, + power + ) + return cavity_mean, cavity_cov + + +class GP(Model): + """ + A standard (kernel-based) GP model with prior of the form + f(t) ~ GP(0,k(t,t')) + TODO: implement multi-latents + """ + def __init__(self, + kernel, + likelihood, + X, + Y): + super().__init__(kernel=kernel, + likelihood=likelihood, + X=X, + Y=Y) + self.obs_ind = np.array(np.squeeze(np.where(~self.mask)[0])) # index into observed values + + def update_posterior(self): + """ + Compute the approximate posterior distribution using standard Gaussian identities + """ + mean, covariance = gaussian_conditional(self.kernel, + self.pseudo_y.value, + self.pseudo_var.value, + self.X) + self.posterior_mean.value = mean.reshape(self.num_data, 1, 1) + self.posterior_variance.value = np.diag(covariance).reshape(self.num_data, 1, 1) + + def compute_log_lik(self, pseudo_y=None, pseudo_var=None): + """ + Compute the log marginal likelihood of the pseudo model, i.e. the log normaliser of the approximate posterior + """ + dim = 1 # TODO: implement multivariate case + # TODO: won't match MarkovGP for batching with missings or for multidim input + + X = self.X[self.obs_ind] # only compute log lik for observed values # TODO: check use of obs_ind (remove?) + if pseudo_y is None: + pseudo_y = self.pseudo_y.value + pseudo_var = self.pseudo_var.value + pseudo_y = pseudo_y[self.obs_ind] + pseudo_var = pseudo_var[self.obs_ind] + + Knn = self.kernel(X, X) + Ky = Knn + np.diag(np.squeeze(pseudo_var)) # TODO: this will break for multi-latents + + # ---- compute the marginal likelihood, i.e. the normaliser, of the pseudo model ---- + pseudo_y = diag(pseudo_y) + Ly, low = cho_factor(Ky) + log_lik_pseudo = ( + - 0.5 * np.sum(pseudo_y.T @ cho_solve((Ly, low), pseudo_y)) + - np.sum(np.log(np.diag(Ly))) + - 0.5 * pseudo_y.shape[0] * dim * LOG2PI + ) + + return log_lik_pseudo + + def predict(self, X, R=None): + """ + predict f at new test locations X + """ + if len(X.shape) < 2: + X = X[:, None] + mean, covariance = gaussian_conditional(self.kernel, + self.pseudo_y.value, + self.pseudo_var.value, + self.X, + X) + predict_mean = np.squeeze(mean) + predict_variance = np.diag(covariance) + return predict_mean, predict_variance + + def prior_sample(self, X=None, num_samps=1, key=0): + if X is None: + X = self.X + N = X.shape[0] + m = np.zeros(N) + K = self.kernel(X, X) + 1e-12 * np.eye(N) + s = multivariate_normal(PRNGKey(key), m, K, shape=[num_samps]) + return s.T + + +class SparseGP(GP): + """ + A standard (kernel-based) GP model with prior of the form + f(t) ~ GP(0,k(t,t')) + :param opt_z: flag whether to optimise the inducing inputs Z + TODO: write test comparing to gpflow + TODO: implement multi-latents + """ + def __init__(self, + kernel, + likelihood, + X, + Y, + Z, + opt_z=False): + super().__init__(kernel=kernel, + likelihood=likelihood, + X=X, + Y=Y) + if Z.ndim < 2: + Z = Z[:, None] + if opt_z: + self.Z = objax.TrainVar(Z) + else: + self.Z = objax.StateVar(Z) + self.num_inducing = Z.shape[0] + self.posterior_mean = objax.StateVar(np.zeros([self.num_inducing, self.func_dim, 1])) + self.posterior_variance = objax.StateVar(np.tile(np.eye(self.func_dim), [self.num_inducing, 1, 1])) + self.posterior_covariance = objax.StateVar(np.eye(self.num_inducing)) + + def update_posterior(self): + """ + Compute the approximate posterior distribution using standard Gaussian identities + """ + mean, covariance = sparse_gaussian_conditional(self.kernel, + self.pseudo_likelihood_nat1.value, + self.pseudo_likelihood_nat2.value, + self.X, + self.Z.value) + self.posterior_mean.value = mean.reshape(self.num_inducing, 1, 1) + self.posterior_variance.value = np.diag(covariance).reshape(self.num_inducing, 1, 1) + self.posterior_covariance.value = covariance.reshape(self.num_inducing, self.num_inducing) + + def compute_full_pseudo_lik(self): + """ The pseudo-likelihoods are currently stored as N Gaussians in f - convert to M Gaussian in u """ + Kuf = self.kernel(self.Z.value, self.X[self.obs_ind]) # only compute log lik for observed values + Kuu = self.kernel(self.Z.value, self.Z.value) + Wuf = solve(Kuu, Kuf) # conditional mapping, Kuu^-1 Kuf + + # TODO: more efficient way to do this? + nat1lik_full = Wuf @ np.squeeze(self.pseudo_likelihood_nat1.value[self.obs_ind], axis=-1) + nat2lik_full = Wuf @ np.diag(np.squeeze(self.pseudo_likelihood_nat2.value[self.obs_ind])) @ transpose(Wuf) + pseudo_var_full = inv(nat2lik_full + 1e-12 * np.eye(Kuu.shape[0])) + pseudo_y_full = pseudo_var_full @ nat1lik_full + return pseudo_y_full, pseudo_var_full + + def compute_kl(self): + """ + KL divergence between the approximate posterior q(u) and the prior p(u) + """ + pseudo_y_full, pseudo_var_full = self.compute_full_pseudo_lik() + + # ---- compute the log marginal likelihood, i.e. the normaliser, of the pseudo model ---- + # log int p(u) prod_n N(pseudo_y_n | u, pseudo_var_n) du + log_lik_pseudo = self.compute_log_lik(pseudo_y_full, pseudo_var_full) + + # E_q[log N(pseudo_y_n | u, pseudo_var_n)] + expected_density_pseudo = gaussian_expected_log_lik( # this term does not depend on the prior, use stored q(u) + pseudo_y_full, + np.squeeze(self.posterior_mean.value, axis=-1), + self.posterior_covariance.value, + pseudo_var_full + ) + + kl = expected_density_pseudo - log_lik_pseudo # KL[approx_post || prior] + return kl + + def compute_log_lik(self, pseudo_y=None, pseudo_var=None): + """ log int p(u) prod_n N(pseudo_y_n | u, pseudo_var_n) du """ + dim = 1 # TODO: implement multivariate case + Kuu = self.kernel(self.Z.value, self.Z.value) + + Ky = Kuu + pseudo_var + Ly, low = cho_factor(Ky) + log_lik_pseudo = ( # this term depends on the prior + - 0.5 * np.sum(pseudo_y.T @ cho_solve((Ly, low), pseudo_y)) + - np.sum(np.log(np.diag(Ly))) + - 0.5 * pseudo_y.shape[0] * dim * LOG2PI + ) + return log_lik_pseudo + + def predict(self, X, R=None): + """ + predict at new test locations X + """ + if len(X.shape) < 2: + X = X[:, None] + self.update_posterior() + mean, covariance = sparse_conditional_post_to_data(self.kernel, + self.posterior_mean.value, + self.posterior_covariance.value, + X, + self.Z.value) + predict_mean = np.squeeze(mean) + predict_variance = np.diag(covariance) + return predict_mean, predict_variance + + def conditional_posterior_to_data(self, batch_ind=None, post_mean=None, post_cov=None): + """ + compute + q(f) = int p(f | u) q(u) du + where + q(u) = N(u | post_mean, post_cov) + """ + if batch_ind is None: + batch_ind = np.arange(self.num_data) + if post_mean is None: + post_mean = self.posterior_mean.value + if post_cov is None: + post_cov = self.posterior_covariance.value + + mean_f, cov_f = sparse_conditional_post_to_data(self.kernel, + post_mean, + post_cov, + self.X[batch_ind], + self.Z.value) + + Nbatch = batch_ind.shape[0] + return mean_f.reshape(Nbatch, 1, 1), np.diag(cov_f).reshape(Nbatch, 1, 1) + + def prior_sample(self, X=None, num_samps=1, key=0): + # TODO: implement using objax.random + raise NotImplementedError + + +class MarkovGP(Model): + """ + The stochastic differential equation (SDE) form of a Gaussian process (GP) model. + Implements methods for inference and learning using state space methods, i.e. Kalman filtering and smoothing. + Constructs a linear time-invariant (LTI) stochastic differential equation (SDE) of the following form: + dx(t)/dt = F x(t) + L w(t) + yโ‚™ ~ p(yโ‚™ | f(t_n)=H x(t_n)) + where w(t) is a white noise process and where the state x(t) is Gaussian distributed with initial + state distribution x(t)~๐“(0,Pinf). + """ + def __init__(self, + kernel, + likelihood, + X, + Y, + R=None): + (X, Y, self.R, self.dt) = input_admin(X, Y, R) + H = kernel.measurement_model() + func_dim = H.shape[0] # number of latent dimensions + super().__init__(kernel=kernel, + likelihood=likelihood, + X=X, + Y=Y, + func_dim=func_dim) + self.state_dim = self.kernel.stationary_covariance().shape[0] + self.minf = np.zeros([self.state_dim, 1]) # stationary state mean + self.spatio_temporal = np.any(~np.isnan(self.R)) + + @staticmethod + def filter(*args, **kwargs): + return kalman_filter(*args, **kwargs) + + @staticmethod + def smoother(*args, **kwargs): + return rauch_tung_striebel_smoother(*args, **kwargs) + + @staticmethod + def temporal_conditional(*args, **kwargs): + return temporal_conditional(*args, **kwargs) + + def compute_full_pseudo_nat(self, batch_ind): + if self.spatio_temporal: # spatio-temporal case + B, C = self.kernel.spatial_conditional(self.X[batch_ind], self.R[batch_ind]) + nat1lik_full = transpose(B) @ self.pseudo_likelihood_nat1.value[batch_ind] + nat2lik_full = transpose(B) @ self.pseudo_likelihood_nat2.value[batch_ind] @ B + return nat1lik_full, nat2lik_full + else: # temporal case + return self.pseudo_likelihood_nat1.value[batch_ind], self.pseudo_likelihood_nat2.value[batch_ind] + + def compute_full_pseudo_lik(self): + # TODO: running this 3 times per training loop is wasteful - store in memory? + if self.spatio_temporal: # spatio-temporal case + B, C = self.kernel.spatial_conditional(self.X, self.R) + # TODO: more efficient way to do this? + nat1lik_full = transpose(B) @ self.pseudo_likelihood_nat1.value + nat2lik_full = transpose(B) @ self.pseudo_likelihood_nat2.value @ B + pseudo_var_full = vmap(inv)(nat2lik_full + 1e-12 * np.eye(nat2lik_full.shape[1])) # <---------- bottleneck + pseudo_y_full = pseudo_var_full @ nat1lik_full + return pseudo_y_full, pseudo_var_full + else: # temporal case + return self.pseudo_y.value, self.pseudo_var.value + + def update_posterior(self): + """ + Compute the posterior via filtering and smoothing + """ + pseudo_y, pseudo_var = self.compute_full_pseudo_lik() + mask = self.mask + if mask.shape[1] != pseudo_y.shape[1]: # TODO: store in memory? + mask = np.tile(self.mask, [1, pseudo_y.shape[1]]) + log_lik, (filter_mean, filter_cov) = self.filter(self.dt, + self.kernel, + pseudo_y, + pseudo_var, + mask) + dt = np.concatenate([self.dt[1:], np.array([0.0])], axis=0) + smoother_mean, smoother_cov, _ = self.smoother(dt, + self.kernel, + filter_mean, + filter_cov) + self.posterior_mean.value, self.posterior_variance.value = smoother_mean, smoother_cov + + def compute_kl(self): + """ + KL[q()|p()] + """ + pseudo_y, pseudo_var = self.compute_full_pseudo_lik() + log_lik_pseudo = self.compute_log_lik(pseudo_y, pseudo_var) + + mask = self.mask + if mask.shape[1] != pseudo_y.shape[1]: # TODO: store in memory? + mask = np.tile(self.mask, [1, pseudo_y.shape[1]]) + + expected_density_pseudo = vmap(gaussian_expected_log_lik)( # parallel operation + pseudo_y, + self.posterior_mean.value, + self.posterior_variance.value, + pseudo_var, + mask + ) + + kl = np.sum(expected_density_pseudo) - log_lik_pseudo # KL[approx_post || prior] + return kl + + def compute_log_lik(self, pseudo_y=None, pseudo_var=None): + """ + int p(f) N(pseudo_y | f, pseudo_var) df + """ + if pseudo_y is None: + pseudo_y, pseudo_var = self.compute_full_pseudo_lik() + + mask = self.mask + if mask.shape[1] != pseudo_y.shape[1]: # TODO: store in memory? + mask = np.tile(self.mask, [1, pseudo_y.shape[1]]) + + log_lik_pseudo, (_, _) = self.filter( + self.dt, + self.kernel, + pseudo_y, + pseudo_var, + mask + ) + return log_lik_pseudo + + def conditional_posterior_to_data(self, batch_ind=None, post_mean=None, post_cov=None): + """ + compute + q(f) = int p(f | u) q(u) du = N(f | B post_mean, B post_cov B' + C) + where + q(u) = N(u | post_mean, post_cov) + p(f | u) = N(f | Bu, C) + """ + if batch_ind is None: + batch_ind = np.arange(self.num_data) + if post_mean is None: + post_mean = self.posterior_mean.value[batch_ind] + if post_cov is None: + post_cov = self.posterior_variance.value[batch_ind] + + if self.spatio_temporal: + B, C = self.kernel.spatial_conditional(self.X[batch_ind], self.R[batch_ind]) + mean_f = B @ post_mean + cov_f = B @ post_cov @ transpose(B) + C + return mean_f, cov_f + else: + return post_mean, post_cov + + def predict(self, X, R=None): + """ + predict at new test locations X + """ + if len(X.shape) < 2: + X = X[:, None] + if R is None: + R = X[:, 1:] + X = X[:, :1] # take only the temporal component + + pseudo_y, pseudo_var = self.compute_full_pseudo_lik() + _, (filter_mean, filter_cov) = self.filter(self.dt, + self.kernel, + pseudo_y, + pseudo_var) + dt = np.concatenate([self.dt[1:], np.array([0.0])], axis=0) + smoother_mean, smoother_cov, gain = self.smoother(dt, + self.kernel, + filter_mean, + filter_cov, + return_full=True) + + # add dummy states at either edge + inf = 1e10 * np.ones_like(self.X[0, :1]) + X_aug = np.block([[-inf], [self.X[:, :1]], [inf]]) + + # predict the state distribution at the test time steps: + state_mean, state_cov = self.temporal_conditional(X_aug, X, smoother_mean, smoother_cov, gain, self.kernel) + # extract function values from the state: + H = self.kernel.measurement_model() + if self.spatio_temporal: + # TODO: if R is fixed, only compute B, C once + B, C = self.kernel.spatial_conditional(X, R) + W = B @ H + test_mean = W @ state_mean + test_var = W @ state_cov @ transpose(W) + C + else: + test_mean, test_var = H @ state_mean, H @ state_cov @ transpose(H) + + if np.squeeze(test_var).ndim > 2: # deal with spatio-temporal case (discard spatial covariance) + test_var = diag(np.squeeze(test_var)) + return np.squeeze(test_mean), np.squeeze(test_var) + + def prior_sample(self, X=None, num_samps=1, key=0): + # TODO: implement using objax.random + raise NotImplementedError + + def filter_energy(self): + pseudo_y, pseudo_var = self.compute_full_pseudo_lik() + _, (filter_mean, filter_cov) = self.filter(self.dt, + self.kernel, + pseudo_y, + pseudo_var, + return_predict=True) + H = self.kernel.measurement_model() + mean = H @ filter_mean + var = H @ filter_cov @ transpose(H) + filter_energy = -np.sum(vmap(self.likelihood.log_density)(self.Y, mean, var)) + return filter_energy + + +class SparseMarkovGP(MarkovGP): + """ + A sparse Markovian GP. + TODO: implement version with non-tied sites + """ + def __init__(self, + kernel, + likelihood, + X, + Y, + R=None, + Z=None): + super().__init__(kernel=kernel, + likelihood=likelihood, + X=X, + Y=Y, + R=R) + if Z is None: + Z = self.X + else: + if Z.ndim < 2: + Z = Z[:, None] + Z = np.sort(Z, axis=0) + inf = np.array([[1e10]]) + self.Z = objax.StateVar(np.concatenate([-inf, Z, inf], axis=0)) + self.dz = np.array(np.diff(self.Z.value[:, 0])) + self.num_transitions = self.dz.shape[0] + zeros = np.zeros([self.num_transitions, 2 * self.state_dim, 1]) + eyes = np.tile(np.eye(2 * self.state_dim), [self.num_transitions, 1, 1]) + + # nat2 = 1e-8 * eyes + + # initialise to match MarkovGP / GP on first step (when Z=X): + nat2 = index_update(1e-8 * eyes, index[:-1, self.state_dim, self.state_dim], 1e-2) + + # initialise to match old implementation: + # nat2 = (1 / 99) * eyes + + self.pseudo_likelihood_nat1 = objax.StateVar(zeros) + self.pseudo_likelihood_nat2 = objax.StateVar(nat2) + self.pseudo_y = objax.StateVar(zeros) + self.pseudo_var = objax.StateVar(vmap(inv)(nat2)) + self.posterior_mean = objax.StateVar(zeros) + self.posterior_variance = objax.StateVar(eyes) + self.mask = None + self.conditional_mean = None + # TODO: if training Z this needs to be done at every training step (as well as sorting and computing dz) + self.ind, self.num_neighbours = set_z_stats(self.X, self.Z.value) + + @staticmethod + def filter(*args, **kwargs): + return kalman_filter_pairs(*args, **kwargs) + + @staticmethod + def smoother(*args, **kwargs): + return rauch_tung_striebel_smoother(*args, **kwargs) + + def compute_full_pseudo_lik(self): + return self.pseudo_y.value, self.pseudo_var.value + + def update_posterior(self): + """ + Compute the posterior via filtering and smoothing + """ + log_lik, (filter_mean, filter_cov) = self.filter(self.dz, + self.kernel, + self.pseudo_y.value, + self.pseudo_var.value) + dz = self.dz[1:] + smoother_mean, smoother_cov, gain = self.smoother(dz, + self.kernel, + filter_mean, + filter_cov, + return_full=True) + + minf, Pinf = self.minf[None, ...], self.kernel.stationary_covariance()[None, ...] + mean_aug = np.concatenate([minf, smoother_mean, minf]) + cov_aug = np.concatenate([Pinf, smoother_cov, Pinf]) + gain = np.concatenate([np.zeros_like(gain[:1]), gain]) + # construct the joint distribution between neighbouring pairs of states + post_mean, post_cov = vmap(build_joint, [0, None, None, None])( + np.arange(self.num_transitions), mean_aug, cov_aug, gain + ) + + self.posterior_mean.value, self.posterior_variance.value = post_mean, post_cov + + def compute_log_lik(self, pseudo_y=None, pseudo_var=None): + """ + Compute the log marginal likelihood of the pseudo model, i.e. the log normaliser of the approximate posterior + """ + log_lik, (_, _) = self.filter(self.dz, + self.kernel, + self.pseudo_y.value, + self.pseudo_var.value) + return log_lik + + def compute_kl(self): + """ + KL divergence between the approximate posterior q(u) and the prior p(u) + """ + # log int p(u) prod_n N(pseudo_y_n | u, pseudo_var_n) du + log_lik_pseudo = self.compute_log_lik() + # E_q[log N(pseudo_y_n | u, pseudo_var_n)] + expected_density_pseudo = self.expected_density_pseudo() + kl = expected_density_pseudo - log_lik_pseudo # KL[approx_post || prior] + return kl + + def predict(self, X, R=None): + """ + predict at new test locations X + """ + if len(X.shape) < 2: + X = X[:, None] + if R is None: + R = X[:, 1:] + X = X[:, :1] # take only the temporal component + + _, (filter_mean, filter_cov) = self.filter(self.dz, + self.kernel, + self.pseudo_y.value, + self.pseudo_var.value) + dz = self.dz[1:] + smoother_mean, smoother_cov, gain = self.smoother(dz, + self.kernel, + filter_mean, + filter_cov, + return_full=True) + + # predict the state distribution at the test time steps + state_mean, state_cov = self.temporal_conditional(self.Z.value, X, smoother_mean, smoother_cov, + gain, self.kernel) + # extract function values from the state: + H = self.kernel.measurement_model() + if self.spatio_temporal: + # TODO: if R is fixed, only compute B, C once + B, C = self.kernel.spatial_conditional(X, R) + W = B @ H + test_mean = W @ state_mean + test_var = W @ state_cov @ transpose(W) + C + else: + test_mean, test_var = H @ state_mean, H @ state_cov @ transpose(H) + + if np.squeeze(test_var).ndim > 2: # deal with spatio-temporal case (discard spatial covariance) + test_var = diag(np.squeeze(test_var)) + return np.squeeze(test_mean), np.squeeze(test_var) + + def conditional_posterior_to_data(self, batch_ind=None, post_mean=None, post_cov=None): + """ + compute + q(f) = int p(f | u) q(u) du + where + q(u) = N(u | post_mean, post_cov) + """ + if batch_ind is None: + batch_ind = np.arange(self.num_data) + if post_mean is None: + post_mean = self.posterior_mean.value + if post_cov is None: + post_cov = self.posterior_variance.value + ind = self.ind[batch_ind] + post_mean, post_cov = post_mean[ind], post_cov[ind] + + P, T = vmap(compute_conditional_statistics, [0, None, None, 0])( + self.X[batch_ind, :1], self.Z.value, self.kernel, ind + ) + + H = self.kernel.measurement_model() + if self.spatio_temporal: + B, C = self.kernel.spatial_conditional(self.X[batch_ind], self.R[batch_ind]) + BH = B @ H + self.conditional_mean = BH @ P # W + conditional_cov = BH @ T @ transpose(BH) + C # nu + else: + self.conditional_mean = H @ P # W + conditional_cov = H @ T @ transpose(H) # nu + + mean_f = self.conditional_mean @ post_mean + cov_f = self.conditional_mean @ post_cov @ transpose(self.conditional_mean) + conditional_cov + + return mean_f, cov_f + + def conditional_data_to_posterior(self, mean_f, cov_f): + """ + conditional_posterior_to_data() must be run first so that self.conditional_mean is set + """ + mean_q = transpose(self.conditional_mean) @ mean_f + cov_q = transpose(self.conditional_mean) @ cov_f @ self.conditional_mean + return mean_q, cov_q + + def group_natural_params(self, nat1_n, nat2_n, batch_ind=None): + + if batch_ind is None: + ind = self.ind + else: + ind = self.ind[batch_ind] + + old_nat1 = self.pseudo_likelihood_nat1.value + old_nat2 = self.pseudo_likelihood_nat2.value + + (new_nat1, new_nat2, counter), _ = scan(f=sum_natural_params_by_group, + init=(np.zeros_like(old_nat1), + np.zeros_like(old_nat2), + np.zeros(old_nat1.shape[0])), + xs=(ind, nat1_n, nat2_n)) + + num_neighbours = np.maximum(self.num_neighbours, 1).reshape(-1, 1, 1) + counter = counter.reshape(-1, 1, 1) + nat1 = new_nat1 + (1. - counter / num_neighbours) * old_nat1 + nat2 = new_nat2 + (1. - counter / num_neighbours) * old_nat2 + + nat2 += 1e-8 * np.eye(nat2.shape[1]) # prevent zeros + + return nat1, nat2 + + def cavity_distribution(self, batch_ind=None, power=None): + """ Compute the power EP cavity for the given data points """ + fraction = power / np.maximum(self.num_neighbours, 1) + cavity_mean, cavity_cov = vmap(compute_cavity)( + self.posterior_mean.value, + self.posterior_variance.value, + self.pseudo_likelihood_nat1.value, + self.pseudo_likelihood_nat2.value, + fraction + ) + return cavity_mean, cavity_cov + + def prior_sample(self, X=None, num_samps=1, key=0): + # TODO: implement using objax.random + raise NotImplementedError diff --git a/newt/notebooks/classification.py b/newt/notebooks/classification.py new file mode 100644 index 0000000..00a3ef5 --- /dev/null +++ b/newt/notebooks/classification.py @@ -0,0 +1,95 @@ +import newt +import objax +import numpy as np +import matplotlib.pyplot as plt +import time + +print('generating some data ...') +np.random.seed(99) +N = 500 # number of training points +M = 20 +# x = 100 * np.random.rand(N) +x0 = 40 * np.random.rand(N//2) +x1 = 40 * np.random.rand(N//2) + 60 +x = np.concatenate([x0, np.array([50]), x1], axis=0) +# x = np.linspace(np.min(x), np.max(x), N) +f = lambda x_: 6 * np.sin(np.pi * x_ / 10.0) / (np.pi * x_ / 10.0 + 1) +y_ = f(x) + np.math.sqrt(0.05)*np.random.randn(x.shape[0]) +y = np.sign(y_) +y[y == -1] = 0 +x_test = np.linspace(np.min(x)-5.0, np.max(x)+5.0, num=500) +y_test = np.sign(f(x_test) + np.math.sqrt(0.05)*np.random.randn(x_test.shape[0])) +y_test[y_test == -1] = 0 +x_plot = np.linspace(np.min(x)-10.0, np.max(x)+10.0, num=500) +z = np.linspace(min(x), max(x), num=M) + +x = x[:, None] +x_plot = x_plot[:, None] + +var_f = 1. # GP variance +len_f = 5.0 # GP lengthscale + +kern = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) +lik = newt.likelihoods.Bernoulli(link='logit') +# model = newt.models.GP(kernel=kern, likelihood=lik, X=x, Y=y) +model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=x, Y=y) +# model = newt.models.InfiniteHorizonGP(kernel=kern, likelihood=lik, X=x, Y=y) +# model = newt.models.SparseInfiniteHorizonGP(kernel=kern, likelihood=lik, X=x, Y=y, Z=z) + +# inf = newt.inference.VariationalInference() +# inf = newt.inference.ExpectationPropagation(power=0.5) +inf = newt.inference.PosteriorLinearisation() +# inf = newt.inference.Laplace() +# inf = newt.inference.LaplaceQuasiNewton(num_data=N, dim=model.func_dim) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.1 +lr_newton = 1 +iters = 20 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +# calculate posterior predictive distribution via filtering and smoothing at train & test locations: +print('calculating the posterior predictive distribution ...') +t0 = time.time() +posterior_mean, posterior_var = model.predict(X=x_plot) +nlpd = model.negative_log_predictive_density(X=x_test, Y=y_test) +t1 = time.time() +print('prediction time: %2.2f secs' % (t1-t0)) +print('nlpd: %2.3f' % nlpd) +lb = posterior_mean - 1.96 * posterior_var ** 0.5 +ub = posterior_mean + 1.96 * posterior_var ** 0.5 +link_fn = lik.link_fn + +print('plotting ...') +plt.figure(1, figsize=(12, 5)) +plt.clf() +plt.plot(x, y, 'b+', label='training observations') +plt.plot(x_test, y_test, 'r+', alpha=0.4, label='test observations') +plt.plot(x_plot, link_fn(posterior_mean), 'm', label='posterior mean') +plt.fill_between(x_plot[:, 0], link_fn(lb), link_fn(ub), color='m', alpha=0.05, label='95% confidence') +if hasattr(model, 'Z'): + plt.plot(model.Z.value[:, 0], +0.03 * np.ones_like(model.Z.value[:, 0]), 'm^', markersize=5) +plt.xlim(x_plot[0], x_plot[-1]) +plt.legend(loc=3) +plt.title('GP classification.') +plt.xlabel('$X$') +plt.show() diff --git a/newt/notebooks/heteroscedastic.py b/newt/notebooks/heteroscedastic.py new file mode 100644 index 0000000..e0b846c --- /dev/null +++ b/newt/notebooks/heteroscedastic.py @@ -0,0 +1,125 @@ +import newt +import objax +import numpy as np +import matplotlib.pyplot as plt +import time +from sklearn.preprocessing import StandardScaler + +print('loading data ...') +D = np.loadtxt('../data/mcycle.csv', delimiter=',') +X = D[:, 1:2] +Y = D[:, 2:] + +# Standardize +X_scaler = StandardScaler().fit(X) +y_scaler = StandardScaler().fit(Y) +Xall = X_scaler.transform(X) +Yall = y_scaler.transform(Y) +x_plot = np.linspace(np.min(Xall)-0.2, np.max(Xall)+0.2, 200) + +# Load cross-validation indices +cvind = np.loadtxt('../experiments/motorcycle/cvind.csv').astype(int) + +# 10-fold cross-validation setup +nt = np.floor(cvind.shape[0]/10).astype(int) +cvind = np.reshape(cvind[:10*nt], (10, nt)) + +np.random.seed(123) +fold = 0 + +# Get training and test indices +test = cvind[fold, :] +train = np.setdiff1d(cvind, test) + +# Set training and test data +X = Xall[train, :] +Y = Yall[train, :] +XT = Xall[test, :] +YT = Yall[test, :] +N = X.shape[0] +M = 20 +batch_size = N # 100 +Z = np.linspace(np.min(Xall), np.max(Xall), M) + +var_f1 = 1. # GP variance +len_f1 = 1. # GP lengthscale +var_f2 = 1. # GP variance +len_f2 = 1. # GP lengthscale + +kern1 = newt.kernels.Matern32(variance=var_f1, lengthscale=len_f1) +kern2 = newt.kernels.Matern32(variance=var_f2, lengthscale=len_f2) +kern = newt.kernels.Independent([kern1, kern2]) +lik = newt.likelihoods.HeteroscedasticNoise() +model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=X, Y=Y) +# model = newt.models.SparseMarkovGP(kernel=kern, likelihood=lik, X=X, Y=Y, Z=Z) + +# inf = newt.inference.VariationalInference() +inf = newt.inference.ExpectationPropagation() +# inf = newt.inference.Laplace() +# inf = newt.inference.PosteriorLinearisation() +# inf = newt.inference.Taylor() +# inf = newt.inference.VariationalQuasiNewton(num_data=N, dim=model.func_dim) +# inf = newt.inference.ExpectationPropagationQuasiNewton(power=0.5, num_data=N, dim=model.func_dim) +# inf = newt.inference.VariationalInferencePSD() +# inf = newt.inference.ExpectationPropagationPSD() +# inf = newt.inference.VariationalGaussNewton() + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.01 +lr_newton = 0.01 +iters = 200 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +t0 = time.time() +posterior_mean, posterior_var = model.predict(X=x_plot) +nlpd = model.negative_log_predictive_density(X=XT, Y=YT) +t1 = time.time() +print('prediction time: %2.2f secs' % (t1-t0)) +print('NLPD: %1.2f' % nlpd) + + +x_pred = X_scaler.inverse_transform(x_plot) +link = model.likelihood.link_fn +lb = posterior_mean[:, 0] - np.sqrt(posterior_var[:, 0] + link(posterior_mean[:, 1]) ** 2) * 1.96 +ub = posterior_mean[:, 0] + np.sqrt(posterior_var[:, 0] + link(posterior_mean[:, 1]) ** 2) * 1.96 +post_mean = y_scaler.inverse_transform(posterior_mean[:, 0]) +lb = y_scaler.inverse_transform(lb) +ub = y_scaler.inverse_transform(ub) + +print('plotting ...') +plt.figure(1, figsize=(12, 5)) +plt.clf() +plt.plot(X_scaler.inverse_transform(X), y_scaler.inverse_transform(Y), 'k.', label='train') +plt.plot(X_scaler.inverse_transform(XT), y_scaler.inverse_transform(YT), 'r.', label='test') +plt.plot(x_pred, post_mean, 'c', label='posterior mean') +plt.fill_between(x_pred, lb, ub, color='c', alpha=0.05, label='95% confidence') +plt.xlim(x_pred[0], x_pred[-1]) +if hasattr(model, 'Z'): + plt.plot(X_scaler.inverse_transform(model.Z.value[:, 0]), + (np.min(lb)-5)*np.ones_like(model.Z.value[:, 0]), + 'c^', + markersize=4) +plt.legend() +plt.title('Heteroscedastic Noise Model via Kalman smoothing (motorcycle crash data)') +plt.xlabel('time (milliseconds)') +plt.ylabel('accelerometer reading') +plt.show() diff --git a/newt/notebooks/log_gaussian_cox_process.py b/newt/notebooks/log_gaussian_cox_process.py new file mode 100644 index 0000000..dc15e07 --- /dev/null +++ b/newt/notebooks/log_gaussian_cox_process.py @@ -0,0 +1,95 @@ +import newt +import objax +import numpy as np +import pandas as pd +import matplotlib.pyplot as plt +import time + +print('loading coal data ...') +disaster_timings = pd.read_csv('../data/coal.txt', header=None).values[:, 0] + +# Discretization +num_time_bins = 200 +# Discretize the data +x = np.linspace(min(disaster_timings), max(disaster_timings), num_time_bins).T +y = np.histogram(disaster_timings, np.concatenate([[-1e10], x[:-1] + np.diff(x)/2, [1e10]]))[0][:, None] +# Test points +x_test = x +x_plot = np.linspace(np.min(x_test)-5, np.max(x_test)+5, 200) +M = 15 +z = np.linspace(np.min(x), np.max(x), M) + +x = x[:, None] + +meanval = np.log(len(disaster_timings)/num_time_bins) # TODO: incorporate mean +binsize = (max(x) - min(x)) / num_time_bins + +var_f = 1.0 # GP variance +len_f = 4. # GP lengthscale + +kern = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) +lik = newt.likelihoods.Poisson(binsize=binsize) +# model = newt.models.GP(kernel=kern, likelihood=lik, X=x, Y=y) +model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=x, Y=y) +# model = newt.models.SparseMarkovGP(kernel=kern, likelihood=lik, X=x, Y=y, Z=z) + +# inf = newt.inference.VariationalInference() +inf = newt.inference.ExpectationPropagation(power=0.01) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.1 +lr_newton = 1 +iters = 100 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + inf(model, lr=lr_newton) # perform inference and update variational params + dE, E = energy(model) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +# calculate posterior predictive distribution via filtering and smoothing at train & test locations: +print('calculating the posterior predictive distribution ...') +t0 = time.time() +posterior_mean, posterior_var = model.predict(X=x_plot) +# posterior_mean_y, posterior_var_y = model.predict_y(X=x_plot) +t1 = time.time() +print('prediction time: %2.2f secs' % (t1-t0)) + +link_fn = lik.link_fn + +post_mean_lgcp = link_fn(posterior_mean + posterior_var / 2) +lb_lgcp = link_fn(posterior_mean - np.sqrt(posterior_var) * 1.645) +ub_lgcp = link_fn(posterior_mean + np.sqrt(posterior_var) * 1.645) + +# lb_y = posterior_mean_y - 1.96 * np.sqrt(posterior_var_y) +# ub_y = posterior_mean_y + 1.96 * np.sqrt(posterior_var_y) + +print('plotting ...') +plt.figure(1, figsize=(12, 5)) +plt.clf() +plt.plot(disaster_timings, 0*disaster_timings, 'k+', label='observations', clip_on=False) +plt.plot(x_plot, post_mean_lgcp, 'g', label='posterior mean') +# plt.plot(x_plot, posterior_mean_y, 'r', label='posterior mean (y)') +plt.fill_between(x_plot, lb_lgcp, ub_lgcp, color='g', alpha=0.05, label='95% confidence') +# plt.fill_between(x_plot, lb_y, ub_y, color='r', alpha=0.05, label='95% confidence (y)') +plt.xlim(x_plot[0], x_plot[-1]) +plt.ylim(0.0) +plt.legend() +plt.title('log-Gaussian Cox process via Kalman smoothing (coal mining disasters)') +plt.xlabel('year') +plt.ylabel('accident intensity') +plt.show() diff --git a/newt/notebooks/regression.py b/newt/notebooks/regression.py new file mode 100644 index 0000000..ea3bbd8 --- /dev/null +++ b/newt/notebooks/regression.py @@ -0,0 +1,111 @@ +import newt +import objax +import numpy as np +import matplotlib.pyplot as plt +import time + + +def wiggly_time_series(x_): + noise_var = 0.15 # true observation noise + return (np.cos(0.04*x_+0.33*np.pi) * np.sin(0.2*x_) + + np.math.sqrt(noise_var) * np.random.normal(0, 1, x_.shape) + + 0.0 * x_) # 0.02 * x_) + + +print('generating some data ...') +np.random.seed(12345) +N = 100 +# x0 = np.random.permutation(np.linspace(-25.0, 30.0, num=N//2) + 1*np.random.randn(N//2)) # unevenly spaced +# x1 = np.random.permutation(np.linspace(60.0, 150.0, num=N//2) + 1*np.random.randn(N//2)) # unevenly spaced +# x = np.concatenate([x0, x1], axis=0) +x = np.linspace(-17, 147, num=N) +x = np.sort(x, axis=0) +y = wiggly_time_series(x) +x_test = np.linspace(np.min(x)-15.0, np.max(x)+15.0, num=500) +# x_test = np.linspace(-32.5, 157.5, num=250) +y_test = wiggly_time_series(x_test) +x_plot = np.linspace(np.min(x)-20.0, np.max(x)+20.0, 200) +M = 20 +batch_size = N # TODO: why does using smaller batch_size result in longer compile time? +z = np.linspace(-30, 155, num=M) +# z = x +# z = np.linspace(-10, 140, num=M) + + +z = z[:, None] +x = x[:, None] +x_plot = x_plot[:, None] + +var_f = 1.0 # GP variance +len_f = 5.0 # GP lengthscale +var_y = 0.5 # observation noise + +kern = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) +lik = newt.likelihoods.Gaussian(variance=var_y) +# model = newt.models.GP(kernel=kern, likelihood=lik, X=x, Y=y) +# model = newt.models.SparseGP(kernel=kern, likelihood=lik, X=x, Y=y, Z=z, opt_z=True) +model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=x, Y=y) +# model = newt.models.InfiniteHorizonGP(kernel=kern, likelihood=lik, X=x, Y=y) +# model = newt.models.SparseMarkovGP(kernel=kern, likelihood=lik, X=x, Y=y, Z=z) +# model = newt.models.SparseInfiniteHorizonGP(kernel=kern, likelihood=lik, X=x, Y=y, Z=z) + +inf = newt.inference.VariationalInference() +# inf = newt.inference.Laplace() +# inf = newt.inference.PosteriorLinearisation() +# inf = newt.inference.Taylor() +# inf = newt.inference.ExpectationPropagation(power=0.5) +# inf = newt.inference.LaplaceQuasiNewton(num_data=N, dim=model.func_dim) +# inf = newt.inference.VariationalQuasiNewton(num_data=N, dim=model.func_dim) + +trainable_vars = model.vars() + inf.vars() +energy = objax.GradValues(inf.energy, trainable_vars) + +lr_adam = 0.1 +lr_newton = 1 +iters = 20 +opt = objax.optimizer.Adam(trainable_vars) + + +def train_op(): + batch = np.random.permutation(N)[:batch_size] + inf(model, lr=lr_newton, batch_ind=batch) # perform inference and update variational params + dE, E = energy(model, batch_ind=batch) # compute energy and its gradients w.r.t. hypers + return dE, E + + +train_op = objax.Jit(train_op, trainable_vars) + +t0 = time.time() +for i in range(1, iters + 1): + grad, loss = train_op() + opt(lr_adam, grad) + print('iter %2d, energy: %1.4f' % (i, loss[0])) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) + +t0 = time.time() +posterior_mean, posterior_var = model.predict_y(X=x_plot) +nlpd = model.negative_log_predictive_density(X=x_test, Y=y_test) +t1 = time.time() +print('prediction time: %2.2f secs' % (t1-t0)) +print('nlpd: %2.3f' % nlpd) +lb = posterior_mean - 1.96 * posterior_var ** 0.5 +ub = posterior_mean + 1.96 * posterior_var ** 0.5 + +print('plotting ...') +plt.figure(1, figsize=(12, 5)) +plt.clf() +plt.plot(x, y, 'k.', label='training observations') +plt.plot(x_test, y_test, 'r.', alpha=0.4, label='test observations') +plt.plot(x_plot, posterior_mean, 'b', label='posterior mean') +plt.fill_between(x_plot[:, 0], lb, ub, color='b', alpha=0.05, label='95% confidence') +# plt.plot(x_plot, posterior_samp, 'b', alpha=0.15) +plt.xlim([x_plot[0], x_plot[-1]]) +if hasattr(model, 'Z'): + plt.plot(model.Z.value[:, 0], -2 * np.ones_like(model.Z.value[:, 0]), 'b^', markersize=5) +# plt.xlim([x_test[0], x_test[-1]]) +# plt.ylim([-2, 5]) +plt.legend() +plt.title('GP regression') +plt.xlabel('$X$') +plt.show() diff --git a/newt/ops.py b/newt/ops.py new file mode 100644 index 0000000..8bc0753 --- /dev/null +++ b/newt/ops.py @@ -0,0 +1,323 @@ +import jax.numpy as np +from jax import vmap +from jax.scipy.linalg import cho_factor, cho_solve +from .utils import diag, mvn_logpdf, solve, transpose, inv +from jax.lax import scan +import math + +INV2PI = (2 * math.pi) ** -1 + + +def gaussian_conditional(kernel, y, noise_cov, X, X_star=None): + """ + Compute the GP posterior / predictive distribution using standard Gaussian identities + :param kernel: an instantiation of the kernel class + :param y: observations [N, 1] + :param noise_cov: observation noise covariance [N, 1] + :param X: training inputs [N, D] + :param X_star: test inputs [N*, D] + :return: + mean: posterior mean [N, 1] + covariance: posterior covariance [N, N] + """ + Kff = kernel(X, X) + if X_star is None: # inference / learning + Kfs = Kff + Kss = Kff + else: # prediction + Kfs = kernel(X, X_star) + Kss = kernel(X_star, X_star) + + Ky = Kff + np.diag(np.squeeze(noise_cov)) # TODO: will break for multi-latents + # ---- compute approximate posterior using standard Gaussian conditional formula ---- + Ly, low = cho_factor(Ky) + Kfs_iKy = cho_solve((Ly, low), Kfs).T + mean = Kfs_iKy @ diag(y) + covariance = Kss - Kfs_iKy @ Kfs + return mean, covariance + + +def sparse_gaussian_conditional(kernel, nat1lik, nat2lik, X, Z): + """ + Compute q(u) + :param kernel: an instantiation of the kernel class + :param nat1lik: likelihood first natural parameter [N, 1] + :param nat2lik: likelihood noise precision [N, 1] + :param X: training inputs [N, D] + :param Z: inducing inputs [N*, D] + :return: + mean: posterior mean [N, 1] + covariance: posterior covariance [N, N] + """ + Kuf = kernel(Z, X) + Kuu = kernel(Z, Z) + nat2prior = inv(Kuu) + Wuf = solve(Kuu, Kuf) # conditional mapping, Kuu^-1 Kuf + + nat1lik_fullrank = Wuf @ np.squeeze(nat1lik, axis=-1) # TODO: will break for multi-latents + nat2lik_fullrank = Wuf @ np.diag(np.squeeze(nat2lik)) @ transpose(Wuf) + + nat1post = nat1lik_fullrank # prior nat1 is zero + nat2post = nat2prior + nat2lik_fullrank + + covariance = inv(nat2post) + mean = covariance @ nat1post + return mean, covariance + + +def sparse_conditional_post_to_data(kernel, post_mean, post_cov, X, Z): + """ + Compute int p(f|u) q(u) du + :param kernel: an instantiation of the kernel class + :param post_mean: posterior mean [M, 1] + :param post_cov: posterior covariance [M, M] + :param X: training inputs [N, D] + :param Z: inducing inputs [N*, D] + :return: + mean: posterior mean [N, 1] + covariance: posterior covariance [N, N] + """ + Kff = kernel(X, X) + Kuf = kernel(Z, X) + Kuu = kernel(Z, Z) + Wuf = solve(Kuu, Kuf) # conditional mapping, Kuu^-1 Kuf + Qff = transpose(Kuf) @ Wuf # Kfu Kuu^-1 Kuf + + conditional_cov = Kff - Qff + + mean_f = transpose(Wuf) @ np.squeeze(post_mean, axis=-1) + cov_f = conditional_cov + transpose(Wuf) @ post_cov @ Wuf + + return mean_f, cov_f + + +def process_noise_covariance(A, Pinf): + Q = Pinf - A @ Pinf @ transpose(A) + return Q + + +def _sequential_kf(As, Qs, H, ys, noise_covs, m0, P0, masks, return_predict=False): + n_obs = ys.shape[0] + + def repeat_elems(matrices): + if np.ndim(matrices) == 3: + if matrices.shape[0] == n_obs: + return matrices + else: + return np.repeat(matrices, n_obs, axis=0) + elif np.ndim(matrices) == 2: + return np.repeat(np.expand_dims(matrices, 0), n_obs, axis=0) + else: + raise ValueError(f"Expected ndim of {matrices} is 2 or 3, f{np.ndim(matrices)} was passed") + + As, Qs, noise_covs = list(map(repeat_elems, (As, Qs, noise_covs))) + + def body(carry, inputs): + y, A, Q, obs_cov, mask = inputs + m, P, ell = carry + m_ = A @ m + P_ = A @ P @ A.T + Q + + obs_mean = H @ m_ + HP = H @ P_ + S = HP @ H.T + obs_cov + + ell_n = mvn_logpdf(y, obs_mean, S, mask) + ell = ell + ell_n + + K = solve(S, HP).T + m = m_ + K @ (y - obs_mean) + P = P_ - K @ HP + if return_predict: + return (m, P, ell), (m_, P_) + else: + return (m, P, ell), (m, P) + + (_, _, loglik), (fms, fPs) = scan(f=body, + init=(m0, P0, 0.), + xs=(ys, As, Qs, noise_covs, masks)) + return loglik, fms, fPs + + +def kalman_filter(dt, kernel, y, noise_cov, mask=None, use_sequential=True, return_predict=False): + """ + Run the Kalman filter to get p(fโ‚™|yโ‚,...,yโ‚™). + Assumes a heteroscedastic Gaussian observation model, i.e. var is vector valued + :param dt: step sizes [N, 1] + :param kernel: an instantiation of the kernel class, used to determine the state space model + :param y: observations [N, D, 1] + :param noise_cov: observation noise covariances [N, D, D] + :param mask: boolean mask for the observations (to indicate missing data locations) [N, D, 1] + :param use_sequential: flag to switch between parallel and sequential implementation of Kalman filter + :param return_predict: flag whether to return predicted state, rather than updated state + :return: + ell: the log-marginal likelihood log p(y), for hyperparameter optimisation (learning) [scalar] + means: intermediate filtering means [N, state_dim, 1] + covs: intermediate filtering covariances [N, state_dim, state_dim] + """ + if mask is None: + mask = np.zeros_like(y, dtype=bool) + Pinf = kernel.stationary_covariance() + minf = np.zeros([Pinf.shape[0], 1]) + + As = vmap(kernel.state_transition)(dt) + Qs = vmap(process_noise_covariance, [0, None])(As, Pinf) + H = kernel.measurement_model() + + if use_sequential: + ell, means, covs = _sequential_kf(As, Qs, H, y, noise_cov, minf, Pinf, mask, return_predict=return_predict) + else: + raise NotImplementedError("Parallel KF not implemented yet") + return ell, (means, covs) + + +def _sequential_rts(fms, fPs, As, Qs, H, return_full): + n_obs = fms.shape[0] + + def repeat_elems(matrices): + if np.ndim(matrices) == 3: + if matrices.shape[0] == n_obs: + return matrices + else: + return np.repeat(matrices, n_obs, axis=0) + elif np.ndim(matrices) == 2: + return np.repeat(np.expand_dims(matrices, 0), n_obs, axis=0) + else: + raise ValueError(f"Expected ndim of {matrices} is 2 or 3, f{np.ndim(matrices)} was passed") + + As, Qs = list(map(repeat_elems, (As, Qs))) + + def body(carry, inputs): + fm, fP, A, Q = inputs + sm, sP = carry + + pm = A @ fm + AfP = A @ fP + pP = AfP @ A.T + Q + + C = solve(pP, AfP).T + + sm = fm + C @ (sm - pm) + sP = fP + C @ (sP - pP) @ C.T + if return_full: + return (sm, sP), (sm, sP, C) + else: + return (sm, sP), (H @ sm, H @ sP @ H.T, C) + + _, (sms, sPs, gains) = scan(f=body, + init=(fms[-1], fPs[-1]), + xs=(fms, fPs, As, Qs), + reverse=True) + return sms, sPs, gains + + +def rauch_tung_striebel_smoother(dt, kernel, filter_mean, filter_cov, return_full=False, use_sequential=True): + """ + Run the RTS smoother to get p(fโ‚™|yโ‚,...,y_N), + :param dt: step sizes [N, 1] + :param kernel: an instantiation of the kernel class, used to determine the state space model + :param filter_mean: the intermediate distribution means computed during filtering [N, state_dim, 1] + :param filter_cov: the intermediate distribution covariances computed during filtering [N, state_dim, state_dim] + :param return_full: a flag determining whether to return the full state distribution or just the function(s) + :param use_sequential: flag to switch between parallel and sequential implementation of smoother + :return: + smoothed_mean: the posterior marginal means [N, obs_dim] + smoothed_var: the posterior marginal variances [N, obs_dim] + """ + Pinf = kernel.stationary_covariance() + + As = vmap(kernel.state_transition)(dt) + Qs = vmap(process_noise_covariance, [0, None])(As, Pinf) + H = kernel.measurement_model() + + if use_sequential: + means, covs, gains = _sequential_rts(filter_mean, filter_cov, As, Qs, H, return_full) + else: + raise NotImplementedError("Parallel RTS not implemented yet") + return means, covs, gains + + +def _sequential_kf_pairs(As, Qs, ys, noise_covs, m0, P0): + n_obs = ys.shape[0] + + def repeat_elems(matrices): + if np.ndim(matrices) == 3: + if matrices.shape[0] == n_obs: + return matrices + else: + return np.repeat(matrices, n_obs, axis=0) + elif np.ndim(matrices) == 2: + return np.repeat(np.expand_dims(matrices, 0), n_obs, axis=0) + else: + raise ValueError(f"Expected ndim of {matrices} is 2 or 3, f{np.ndim(matrices)} was passed") + + As, Qs, noise_covs = list(map(repeat_elems, (As, Qs, noise_covs))) + + state_dim = As[0].shape[0] + + def body(carry, inputs): + y, A, Q, obs_cov = inputs + m_left, P_left, ell = carry + + # predict + m_right = A @ m_left + P_right = A @ P_left @ A.T + Q + + # construct the joint distribution p(uโ‚™โ‚‹โ‚,uโ‚™) = p(uโ‚™โ‚‹โ‚)p(uโ‚™|uโ‚™โ‚‹โ‚) + PA_ = P_left @ A.T + m_joint = np.block([[m_left], + [m_right]]) + P_joint = np.block([[P_left, PA_], + [PA_.T, P_right]]) + + S = P_joint + obs_cov + + ell_n = mvn_logpdf(y, m_joint, S) + ell = ell + ell_n + + K = solve(S, P_joint).T + + # perform update + m = m_joint + K @ (y - m_joint) + P = P_joint - P_joint @ K.T + + # marginalise and store the now fully updated left state, uโ‚™โ‚‹โ‚ + m_left = m[:state_dim] + P_left = P[:state_dim, :state_dim] + # marginalise and propagate the right state, uโ‚™ + m_right = m[state_dim:] + P_right = P[state_dim:, state_dim:] + + return (m_right, P_right, ell), (m_left, P_left) + + (_, _, loglik), (fms, fPs) = scan(f=body, + init=(m0, P0, 0.), + xs=(ys, As, Qs, noise_covs)) + return loglik, fms[1:], fPs[1:] # discard intial dummy state + + +def kalman_filter_pairs(dt, kernel, y, noise_cov, use_sequential=True): + """ + A Kalman filter over pairs of states, in which y is [2state_dim, 1] and noise_cov is [2state_dim, 2state_dim] + :param dt: step sizes [N, 1] + :param kernel: an instantiation of the kernel class, used to determine the state space model + :param y: observations [N, 2state_dim, 1] + :param noise_cov: observation noise covariances [N, 2state_dim, 2state_dim] + :param use_sequential: flag to switch between parallel and sequential implementation of Kalman filter + :return: + ell: the log-marginal likelihood log p(y), for hyperparameter optimisation (learning) [scalar] + means: marginal state filtering means [N, state_dim, 1] + covs: marginal state filtering covariances [N, state_dim, state_dim] + """ + + Pinf = kernel.stationary_covariance() + minf = np.zeros([Pinf.shape[0], 1]) + + As = vmap(kernel.state_transition)(dt) + Qs = vmap(process_noise_covariance, [0, None])(As, Pinf) + + if use_sequential: + ell, means, covs = _sequential_kf_pairs(As, Qs, y, noise_cov, minf, Pinf) + else: + raise NotImplementedError("Parallel KF not implemented yet") + return ell, (means, covs) diff --git a/newt/tests/spatiotemporal_test.py b/newt/tests/spatiotemporal_test.py new file mode 100644 index 0000000..85563aa --- /dev/null +++ b/newt/tests/spatiotemporal_test.py @@ -0,0 +1,89 @@ +import newt +import objax +import numpy as np +import time + +np.random.seed(3) + + +def create_grid(x1, x2, y1, y2, n1=10, n2=10): + y = np.linspace(y1, y2, n2) + x = np.linspace(x1, x2, n1) + + grid = [] + for i in x: + for j in y: + grid.append([i, j]) + + return np.array(grid) + + +Nt_train = 5 +Ns = 5 +X = create_grid(0, 1, 0, 1, Nt_train, Ns) +t = np.linspace(0, 1, Nt_train, dtype=float) +R = np.tile(np.linspace(0, 1, Ns, dtype=float)[None, ...], [Nt_train, 1]) + +N = X.shape[0] +y = np.sin(10*X[:, 0]) + np.sin(10*X[:, 1]) + 0.01*np.random.randn(N) + +# Y = y[:, None] +Y = y.reshape(Nt_train, Ns) + +# print(R.shape) +# print(Y.shape) +# print(R[0].shape) +# print(X) +# print(R) +# print(R[0]) + +kernel_ls = [0.1, 0.2] +kernel_var = [2.2, 0.4] +likelihood_noise = 0.1 + +lik = newt.likelihoods.Gaussian(variance=likelihood_noise) +kern_time = newt.kernels.Matern32(variance=kernel_var[0], lengthscale=kernel_ls[0]) +kern_space = newt.kernels.Matern32(variance=kernel_var[1], lengthscale=kernel_ls[1]) +kern = newt.kernels.SpatioTemporalKernel(temporal_kernel=kern_time, + spatial_kernel=kern_space, + z=R[0], + sparse=True, + opt_z=False, + conditional='Full') +inf = newt.inference.VariationalInference() + +markov = True + +if markov: + model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=t, R=R, Y=Y) + # model = newt.models.MarkovGP(kernel=kern, likelihood=lik, X=X, Y=y) +else: + model = newt.models.GP(kernel=kern, likelihood=lik, X=X, Y=y) + +compute_energy_and_update = objax.GradValues(inf, model.vars()) + +lr_adam = 0. +lr_newton = 1. +epochs = 2 +opt = objax.optimizer.Adam(model.vars()) + + +def train_op(): + model.update_posterior() + grads, loss_ = compute_energy_and_update(model, lr=lr_newton) + # print(grads) + for g, var_name in zip(grads, model.vars().keys()): # TODO: this gives wrong label to likelihood variance + print(g, ' w.r.t. ', var_name) + # print(model.kernel.temporal_kernel.variance) + opt(lr_adam, grads) + return loss_[0] + + +# train_op = objax.Jit(train_op, model.vars()) + +t0 = time.time() +for i in range(1, epochs+1): + loss = train_op() + print('epoch %2d: loss: %1.4f' % (i, loss)) +t1 = time.time() +print('optimisation time: %2.2f secs' % (t1-t0)) diff --git a/newt/tests/test_gp_vs_markovgp_class.py b/newt/tests/test_gp_vs_markovgp_class.py new file mode 100644 index 0000000..7f850c3 --- /dev/null +++ b/newt/tests/test_gp_vs_markovgp_class.py @@ -0,0 +1,130 @@ +import newt +import objax +import numpy as np +from jax.config import config +config.update("jax_enable_x64", True) +import pytest + +inf = newt.inference.VariationalInference() + + +def build_data(N): + # np.random.seed(12345) + x = 100 * np.random.rand(N) + x = np.sort(x) # since MarkovGP sorts the inputs, they must also be sorted for GP + f = lambda x_: 6 * np.sin(np.pi * x_ / 10.0) / (np.pi * x_ / 10.0 + 1) + y_ = f(x) + np.math.sqrt(0.05) * np.random.randn(x.shape[0]) + y = np.sign(y_) + y[y == -1] = 0 + x = x[:, None] + return x, y + + +def initialise_gp_model(var_f, len_f, x, y): + kernel = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) + likelihood = newt.likelihoods.Bernoulli() + model = newt.models.GP(kernel=kernel, likelihood=likelihood, X=x, Y=y) + return model + + +def initialise_markovgp_model(var_f, len_f, x, y): + kernel = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) + likelihood = newt.likelihoods.Bernoulli() + model = newt.models.MarkovGP(kernel=kernel, likelihood=likelihood, X=x, Y=y) + return model + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_initial_loss(var_f, len_f, N): + """ + test whether VI with newt's GP and MarkovGP give the same initial ELBO and posterior + """ + + x, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, x, y) + markovgp_model = initialise_markovgp_model(var_f, len_f, x, y) + + gp_model.update_posterior() + loss_gp = inf(gp_model) + print(loss_gp) + + markovgp_model.update_posterior() + loss_markovgp = inf(markovgp_model) + print(loss_markovgp) + + # print(posterior_mean - f_mean[:, 0]) + + np.testing.assert_allclose(gp_model.posterior_mean.value, markovgp_model.posterior_mean.value, rtol=1e-4) + np.testing.assert_allclose(gp_model.posterior_variance.value, markovgp_model.posterior_variance.value, rtol=1e-4) + np.testing.assert_almost_equal(loss_gp, loss_markovgp, decimal=2) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_gradient_step(var_f, len_f, N): + """ + test whether VI with newt's GP and MarkovGP provide the same initial gradient step in the hyperparameters + """ + + x, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, x, y) + markovgp_model = initialise_markovgp_model(var_f, len_f, x, y) + + gv = objax.GradValues(inf, gp_model.vars()) + gv_markov = objax.GradValues(inf, markovgp_model.vars()) + + lr_adam = 0.1 + lr_newton = 1. + opt = objax.optimizer.Adam(gp_model.vars()) + opt_markov = objax.optimizer.Adam(markovgp_model.vars()) + + gp_model.update_posterior() + gp_grads, gp_value = gv(gp_model, lr=lr_newton) + gp_loss_ = gp_value[0] + opt(lr_adam, gp_grads) + gp_hypers = np.array([gp_model.kernel.lengthscale, gp_model.kernel.variance]) + print(gp_hypers) + print(gp_grads) + + markovgp_model.update_posterior() + markovgp_grads, markovgp_value = gv_markov(markovgp_model, lr=lr_newton) + markovgp_loss_ = markovgp_value[0] + opt_markov(lr_adam, markovgp_grads) + markovgp_hypers = np.array([markovgp_model.kernel.lengthscale, markovgp_model.kernel.variance]) + print(markovgp_hypers) + print(markovgp_grads) + + np.testing.assert_allclose(gp_grads[0], markovgp_grads[0], rtol=1e-4) + np.testing.assert_allclose(gp_grads[1], markovgp_grads[1], rtol=1e-4) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_inference_step(var_f, len_f, N): + """ + test whether VI with newt's GP and MarkovGP give the same posterior after one natural gradient step + """ + + x, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, x, y) + markovgp_model = initialise_markovgp_model(var_f, len_f, x, y) + + lr_newton = 1. + + gp_model.update_posterior() + gp_loss = inf(gp_model, lr=lr_newton) # update variational params + gp_model.update_posterior() + + markovgp_model.update_posterior() + markovgp_loss = inf(markovgp_model, lr=lr_newton) # update variational params + markovgp_model.update_posterior() + + np.testing.assert_allclose(gp_model.posterior_mean.value, markovgp_model.posterior_mean.value, rtol=1e-4) + np.testing.assert_allclose(gp_model.posterior_variance.value, markovgp_model.posterior_variance.value, rtol=1e-4) diff --git a/newt/tests/test_gp_vs_markovgp_reg.py b/newt/tests/test_gp_vs_markovgp_reg.py new file mode 100644 index 0000000..cd895a5 --- /dev/null +++ b/newt/tests/test_gp_vs_markovgp_reg.py @@ -0,0 +1,144 @@ +import newt +import objax +import numpy as np +from jax.config import config +config.update("jax_enable_x64", True) +import pytest + +inf = newt.inference.VariationalInference() + + +def wiggly_time_series(x_): + noise_var = 0.15 # true observation noise + return (np.cos(0.04*x_+0.33*np.pi) * np.sin(0.2*x_) + + np.math.sqrt(noise_var) * np.random.normal(0, 1, x_.shape)) + + +def build_data(N): + # np.random.seed(12345) + x = np.random.permutation(np.linspace(-25.0, 150.0, num=N) + 0.5*np.random.randn(N)) # unevenly spaced + x = np.sort(x) # since MarkovGP sorts the inputs, they must also be sorted for GP + y = wiggly_time_series(x) + # x_test = np.linspace(np.min(x)-15.0, np.max(x)+15.0, num=500) + # y_test = wiggly_time_series(x_test) + # x_plot = np.linspace(np.min(x)-20.0, np.max(x)+20.0, 200) + + x = x[:, None] + # y = y[:, None] + # x_plot = x_plot[:, None] + return x, y + + +def initialise_gp_model(var_f, len_f, var_y, x, y): + kernel = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) + likelihood = newt.likelihoods.Gaussian(variance=var_y) + model = newt.models.GP(kernel=kernel, likelihood=likelihood, X=x, Y=y) + return model + + +def initialise_markovgp_model(var_f, len_f, var_y, x, y): + kernel = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) + likelihood = newt.likelihoods.Gaussian(variance=var_y) + model = newt.models.MarkovGP(kernel=kernel, likelihood=likelihood, X=x, Y=y) + return model + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_initial_loss(var_f, len_f, var_y, N): + """ + test whether VI with newt's GP and MarkovGP give the same initial ELBO and posterior + """ + + x, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y) + markovgp_model = initialise_markovgp_model(var_f, len_f, var_y, x, y) + + gp_model.update_posterior() + loss_gp = inf(gp_model) + print(loss_gp) + + markovgp_model.update_posterior() + loss_markovgp = inf(markovgp_model) + print(loss_markovgp) + + # print(posterior_mean - f_mean[:, 0]) + + np.testing.assert_allclose(gp_model.posterior_mean.value, markovgp_model.posterior_mean.value, rtol=1e-4) + np.testing.assert_allclose(gp_model.posterior_variance.value, markovgp_model.posterior_variance.value, rtol=1e-4) + np.testing.assert_almost_equal(loss_gp, loss_markovgp, decimal=2) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_gradient_step(var_f, len_f, var_y, N): + """ + test whether VI with newt's GP and MarkovGP provide the same initial gradient step in the hyperparameters + """ + + x, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y) + markovgp_model = initialise_markovgp_model(var_f, len_f, var_y, x, y) + + gv = objax.GradValues(inf, gp_model.vars()) + gv_markov = objax.GradValues(inf, markovgp_model.vars()) + + lr_adam = 0.1 + lr_newton = 1. + opt = objax.optimizer.Adam(gp_model.vars()) + opt_markov = objax.optimizer.Adam(markovgp_model.vars()) + + gp_model.update_posterior() + gp_grads, gp_value = gv(gp_model, lr=lr_newton) + gp_loss_ = gp_value[0] + opt(lr_adam, gp_grads) + gp_hypers = np.array([gp_model.kernel.lengthscale, gp_model.kernel.variance, gp_model.likelihood.variance]) + print(gp_hypers) + print(gp_grads) + + markovgp_model.update_posterior() + markovgp_grads, markovgp_value = gv_markov(markovgp_model, lr=lr_newton) + markovgp_loss_ = markovgp_value[0] + opt_markov(lr_adam, markovgp_grads) + markovgp_hypers = np.array([markovgp_model.kernel.lengthscale, markovgp_model.kernel.variance, + markovgp_model.likelihood.variance]) + print(markovgp_hypers) + print(markovgp_grads) + + np.testing.assert_allclose(gp_grads[0], markovgp_grads[0], rtol=1e-4) + np.testing.assert_allclose(gp_grads[1], markovgp_grads[1], rtol=1e-4) + np.testing.assert_allclose(gp_grads[2], markovgp_grads[2], rtol=1e-4) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_inference_step(var_f, len_f, var_y, N): + """ + test whether VI with newt's GP and MarkovGP give the same posterior after one natural gradient step + """ + + x, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y) + markovgp_model = initialise_markovgp_model(var_f, len_f, var_y, x, y) + + lr_newton = 1. + + gp_model.update_posterior() + gp_loss = inf(gp_model, lr=lr_newton) # update variational params + gp_model.update_posterior() + + markovgp_model.update_posterior() + markovgp_loss = inf(markovgp_model, lr=lr_newton) # update variational params + markovgp_model.update_posterior() + + np.testing.assert_allclose(gp_model.posterior_mean.value, markovgp_model.posterior_mean.value, rtol=1e-4) + np.testing.assert_allclose(gp_model.posterior_variance.value, markovgp_model.posterior_variance.value, rtol=1e-4) diff --git a/newt/tests/test_gp_vs_markovgp_spacetime.py b/newt/tests/test_gp_vs_markovgp_spacetime.py new file mode 100644 index 0000000..ad13e7b --- /dev/null +++ b/newt/tests/test_gp_vs_markovgp_spacetime.py @@ -0,0 +1,196 @@ +import newt +import objax +import numpy as np +from jax.config import config +config.update("jax_enable_x64", True) +import pytest + + +inf = newt.inference.VariationalInference() + + +def create_grid(x1, x2, y1, y2, n1=10, n2=10): + y_ = np.linspace(y1, y2, n2) + x_ = np.linspace(x1, x2, n1) + + grid = [] + for i in x_: + for j in y_: + grid.append([i, j]) + + return np.array(grid) + + +def build_data(N_): + Nt_train = N_ + Ns = N_ + X_ = create_grid(0, 1, 0, 1, Nt_train, Ns) + t_ = np.linspace(0, 1, Nt_train, dtype=float) + R_ = np.tile(np.linspace(0, 1, Ns, dtype=float)[None, ...], [Nt_train, 1]) + + y_ = np.sin(10 * X_[:, 0]) + np.sin(10 * X_[:, 1]) + 0.01 * np.random.randn(X_.shape[0]) + + # Y = y[:, None] + Y_ = y_.reshape(Nt_train, Ns) + return X_, Y_, t_, R_, y_ + + +def initialise_gp_model(var_f, len_f, var_y, x_, y_, z_): + kernel = newt.kernels.SpatialMatern52(variance=var_f, lengthscale=len_f, + z=z_, sparse=True, opt_z=False, conditional='Full') + likelihood = newt.likelihoods.Gaussian(variance=var_y) + + # the sort during utils.input_admin() sometimes results in different sorting of inputs + # so this step ensures everything is aligned + model_ = newt.models.MarkovGP(kernel=kernel, likelihood=likelihood, X=x_, Y=y_) + x_sorted = model_.X + r_sorted = model_.R + x_ = np.vstack([x_sorted.T, r_sorted.T]).T + y_ = model_.Y + + model = newt.models.GP(kernel=kernel, likelihood=likelihood, X=x_, Y=y_) + return model + + +def initialise_markovgp_model(var_f, len_f, var_y, x_, y_, z_): + kernel = newt.kernels.SpatialMatern52(variance=var_f, lengthscale=len_f, + z=z_, sparse=True, opt_z=False, conditional='Full') + likelihood = newt.likelihoods.Gaussian(variance=var_y) + model = newt.models.MarkovGP(kernel=kernel, likelihood=likelihood, X=x_, Y=y_) + return model + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [8, 16]) +def test_initial_loss(var_f, len_f, var_y, N): + """ + test whether VI with newt's GP and MarkovGP give the same initial ELBO and posterior + """ + + x, Y, t, R, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y, R[0]) + markovgp_model = initialise_markovgp_model(var_f, len_f, var_y, x, y, R[0]) + + gp_model.update_posterior() + loss_gp = inf(gp_model) + print(loss_gp) + + markovgp_model.update_posterior() + loss_markovgp = inf(markovgp_model) + print(loss_markovgp) + + # print(gp_model.posterior_variance.value - markovgp_model.posterior_variance.value) + + np.testing.assert_allclose(gp_model.posterior_mean.value, markovgp_model.posterior_mean.value, rtol=1e-4) + np.testing.assert_allclose(gp_model.posterior_variance.value, markovgp_model.posterior_variance.value, rtol=1e-4) + np.testing.assert_almost_equal(loss_gp, loss_markovgp, decimal=2) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [8, 16]) +def test_gradient_step(var_f, len_f, var_y, N): + """ + test whether VI with newt's GP and MarkovGP provide the same initial gradient step in the hyperparameters + """ + + x, Y, t, R, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y, R[0]) + markovgp_model = initialise_markovgp_model(var_f, len_f, var_y, x, y, R[0]) + + gv = objax.GradValues(inf, gp_model.vars()) + gv_markov = objax.GradValues(inf, markovgp_model.vars()) + + lr_adam = 0.1 + lr_newton = 1. + opt = objax.optimizer.Adam(gp_model.vars()) + opt_markov = objax.optimizer.Adam(markovgp_model.vars()) + + gp_model.update_posterior() + gp_grads, gp_value = gv(gp_model, lr=lr_newton) + gp_loss_ = gp_value[0] + opt(lr_adam, gp_grads) + gp_hypers = np.array([gp_model.kernel.temporal_kernel.lengthscale, + gp_model.kernel.temporal_kernel.variance, + gp_model.kernel.spatial_kernel.lengthscale, + gp_model.likelihood.variance]) + print(gp_hypers) + print(gp_grads) + + markovgp_model.update_posterior() + markovgp_grads, markovgp_value = gv_markov(markovgp_model, lr=lr_newton) + markovgp_loss_ = markovgp_value[0] + opt_markov(lr_adam, markovgp_grads) + markovgp_hypers = np.array([markovgp_model.kernel.temporal_kernel.lengthscale, + markovgp_model.kernel.temporal_kernel.variance, + markovgp_model.kernel.spatial_kernel.lengthscale, + markovgp_model.likelihood.variance]) + + print(markovgp_hypers) + print(markovgp_grads) + + np.testing.assert_allclose(gp_grads[0], markovgp_grads[0], rtol=1e-4) + np.testing.assert_allclose(gp_grads[1], markovgp_grads[1], rtol=1e-4) + np.testing.assert_allclose(gp_grads[2], markovgp_grads[2], rtol=1e-4) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [8, 16]) +def test_inference_step(var_f, len_f, var_y, N): + """ + test whether VI with newt's GP and MarkovGP give the same posterior after one natural gradient step + """ + + x, Y, t, R, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y, R[0]) + markovgp_model = initialise_markovgp_model(var_f, len_f, var_y, x, y, R[0]) + + lr_newton = 1. + + gp_model.update_posterior() + gp_loss = inf(gp_model, lr=lr_newton) # update variational params + gp_model.update_posterior() + + markovgp_model.update_posterior() + markovgp_loss = inf(markovgp_model, lr=lr_newton) # update variational params + markovgp_model.update_posterior() + + np.testing.assert_allclose(gp_model.posterior_mean.value, markovgp_model.posterior_mean.value, rtol=1e-4) + np.testing.assert_allclose(gp_model.posterior_variance.value, markovgp_model.posterior_variance.value, rtol=1e-4) + + +N = 5 +x, Y, t, R, y = build_data(N) + +var_f = 0.5 +len_f = 0.75 +var_y = 0.1 + +gp_model = initialise_gp_model(var_f, len_f, var_y, x, y, R[0]) +markovgp_model = initialise_markovgp_model(var_f, len_f, var_y, x, y, R[0]) + +lr_newton = 1. + +gp_model.update_posterior() +gp_loss = inf(gp_model, lr=lr_newton) # update variational params +print(gp_loss) +gp_model.update_posterior() + +markovgp_model.update_posterior() +markovgp_loss = inf(markovgp_model, lr=lr_newton) # update variational params +print(markovgp_loss) +markovgp_model.update_posterior() + +# print(gp_model.pseudo_y.value.T) +# print(markovgp_model.pseudo_y.value.T) + +np.testing.assert_allclose(gp_model.posterior_mean.value, markovgp_model.posterior_mean.value, rtol=1e-4) +np.testing.assert_allclose(gp_model.posterior_variance.value, markovgp_model.posterior_variance.value, rtol=1e-4) diff --git a/newt/tests/test_sparsemarkov.py b/newt/tests/test_sparsemarkov.py new file mode 100644 index 0000000..586aeb4 --- /dev/null +++ b/newt/tests/test_sparsemarkov.py @@ -0,0 +1,334 @@ +import newt +import objax +import numpy as np +from jax import vmap +from newt.utils import compute_measurement +import matplotlib.pyplot as plt +from jax.config import config +config.update("jax_enable_x64", True) +import pytest + + +inf = newt.inference.VariationalInference() + + +def wiggly_time_series(x_): + noise_var = 0.15 # true observation noise + return (np.cos(0.04*x_+0.33*np.pi) * np.sin(0.2*x_) + + np.math.sqrt(noise_var) * np.random.normal(0, 1, x_.shape)) + + +def build_data(N): + # np.random.seed(12345) + x = np.random.permutation(np.linspace(-25.0, 150.0, num=N) + 0.5*np.random.randn(N)) # unevenly spaced + x = np.sort(x) # since MarkovGP sorts the inputs, they must also be sorted for GP + y = wiggly_time_series(x) + # x_test = np.linspace(np.min(x)-15.0, np.max(x)+15.0, num=500) + # y_test = wiggly_time_series(x_test) + # x_plot = np.linspace(np.min(x)-20.0, np.max(x)+20.0, 200) + + x = x[:, None] + # y = y[:, None] + # x_plot = x_plot[:, None] + return x, y + + +def initialise_gp_model(var_f, len_f, var_y, x, y): + kernel = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) + likelihood = newt.likelihoods.Gaussian(variance=var_y) + model = newt.models.GP(kernel=kernel, likelihood=likelihood, X=x, Y=y) + return model + + +def initialise_sparsemarkovgp_model(var_f, len_f, var_y, x, y, z=None): + if z is None: + z = x + kernel = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) + likelihood = newt.likelihoods.Gaussian(variance=var_y) + model = newt.models.SparseMarkovGP(kernel=kernel, likelihood=likelihood, X=x, Y=y, Z=z) + return model + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [4.5, 7.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_initial_loss(var_f, len_f, var_y, N): + """ + test whether MarkovGP and SparseMarkovGP give the same initial ELBO and posterior (Z=X) + """ + + x, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y) + sparsemarkovgp_model = initialise_sparsemarkovgp_model(var_f, len_f, var_y, x, y) + + gp_model.update_posterior() + loss_gp = inf(gp_model) + print(loss_gp) + + sparsemarkovgp_model.update_posterior() + loss_markovgp = inf(sparsemarkovgp_model) + print(loss_markovgp) + + # print(posterior_mean - f_mean[:, 0]) + + # measure_func = vmap( + # compute_measurement, (None, 0, 0, 0) + # ) + # post_mean, post_cov = measure_func(sparsemarkovgp_model.kernel, + # sparsemarkovgp_model.Z[:-1, :1], + # sparsemarkovgp_model.posterior_mean.value, + # sparsemarkovgp_model.posterior_variance.value) + post_mean = sparsemarkovgp_model.posterior_mean.value[1:, :1, :1] + post_cov = sparsemarkovgp_model.posterior_variance.value[1:, :1, :1] + + np.testing.assert_allclose(gp_model.posterior_mean.value, post_mean, rtol=1e-4) + np.testing.assert_allclose(gp_model.posterior_variance.value, post_cov, rtol=1e-4) + np.testing.assert_almost_equal(loss_gp, loss_markovgp, decimal=2) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [4.5, 7.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_gradient_step(var_f, len_f, var_y, N): + """ + test whether MarkovGP and SparseMarkovGP provide the same initial gradient step in the hyperparameters (Z=X) + """ + + x, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y) + sparsemarkovgp_model = initialise_sparsemarkovgp_model(var_f, len_f, var_y, x, y) + + gv = objax.GradValues(inf, gp_model.vars()) + gv_markov = objax.GradValues(inf, sparsemarkovgp_model.vars()) + + lr_adam = 0.1 + lr_newton = 1. + opt = objax.optimizer.Adam(gp_model.vars()) + opt_markov = objax.optimizer.Adam(sparsemarkovgp_model.vars()) + + gp_model.update_posterior() + gp_grads, gp_value = gv(gp_model, lr=lr_newton) + gp_loss_ = gp_value[0] + opt(lr_adam, gp_grads) + gp_hypers = np.array([gp_model.kernel.lengthscale, gp_model.kernel.variance, gp_model.likelihood.variance]) + print(gp_hypers) + print(gp_grads) + + sparsemarkovgp_model.update_posterior() + markovgp_grads, markovgp_value = gv_markov(sparsemarkovgp_model, lr=lr_newton) + markovgp_loss_ = markovgp_value[0] + opt_markov(lr_adam, markovgp_grads) + markovgp_hypers = np.array([sparsemarkovgp_model.kernel.lengthscale, sparsemarkovgp_model.kernel.variance, + sparsemarkovgp_model.likelihood.variance]) + print(markovgp_hypers) + print(markovgp_grads) + + np.testing.assert_allclose(gp_grads[0], markovgp_grads[0], atol=1e-3) + np.testing.assert_allclose(gp_grads[1], markovgp_grads[1], rtol=1e-4) + np.testing.assert_allclose(gp_grads[2], markovgp_grads[2], rtol=1e-4) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [4.5, 7.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_inference_step(var_f, len_f, var_y, N): + """ + test whether MarkovGP and SparseMarkovGP give the same posterior after one natural gradient step (Z=X) + """ + + x, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y) + sparsemarkovgp_model = initialise_sparsemarkovgp_model(var_f, len_f, var_y, x, y) + + lr_newton = 1. + + gp_model.update_posterior() + gp_loss = inf(gp_model, lr=lr_newton) # update variational params + gp_model.update_posterior() + + sparsemarkovgp_model.update_posterior() + markovgp_loss = inf(sparsemarkovgp_model, lr=lr_newton) # update variational params + sparsemarkovgp_model.update_posterior() + + # measure_func = vmap( + # compute_measurement, (None, 0, 0, 0) + # ) + # post_mean, post_cov = measure_func(sparsemarkovgp_model.kernel, + # sparsemarkovgp_model.Z[:-1, :1], + # sparsemarkovgp_model.posterior_mean.value, + # sparsemarkovgp_model.posterior_variance.value) + post_mean = sparsemarkovgp_model.posterior_mean.value[1:, :1, :1] + post_cov = sparsemarkovgp_model.posterior_variance.value[1:, :1, :1] + + np.testing.assert_allclose(gp_model.posterior_mean.value, post_mean, rtol=1e-4) + np.testing.assert_allclose(gp_model.posterior_variance.value, post_cov, rtol=1e-4) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [4.5, 7.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_initial_loss(var_f, len_f, var_y, N): + """ + test whether MarkovGP and SparseMarkovGP give the same initial ELBO and posterior (Z=X) + """ + + x, y = build_data(N) + + z = x + np.random.normal(0, .05, x.shape) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y) + sparsemarkovgp_model = initialise_sparsemarkovgp_model(var_f, len_f, var_y, x, y, z) + + gp_model.update_posterior() + loss_gp = inf(gp_model) + print(loss_gp) + + sparsemarkovgp_model.update_posterior() + loss_markovgp = inf(sparsemarkovgp_model) + print(loss_markovgp) + + # print(posterior_mean - f_mean[:, 0]) + + post_mean = sparsemarkovgp_model.posterior_mean.value[1:, :1, :1] + post_cov = sparsemarkovgp_model.posterior_variance.value[1:, :1, :1] + + np.testing.assert_allclose(gp_model.posterior_mean.value, post_mean, rtol=1e-1) + # np.testing.assert_allclose(gp_model.posterior_variance.value, post_cov, rtol=1e-4) + np.testing.assert_almost_equal(loss_gp, loss_markovgp, decimal=-1) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [4.5, 7.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_gradient_step(var_f, len_f, var_y, N): + """ + test whether MarkovGP and SparseMarkovGP provide the same initial gradient step in the hyperparameters (Z=X) + """ + + x, y = build_data(N) + + z = x + np.random.normal(0, .05, x.shape) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y) + sparsemarkovgp_model = initialise_sparsemarkovgp_model(var_f, len_f, var_y, x, y, z) + + gv = objax.GradValues(inf, gp_model.vars()) + gv_markov = objax.GradValues(inf, sparsemarkovgp_model.vars()) + + lr_adam = 0.1 + lr_newton = 1. + opt = objax.optimizer.Adam(gp_model.vars()) + opt_markov = objax.optimizer.Adam(sparsemarkovgp_model.vars()) + + gp_model.update_posterior() + gp_grads, gp_value = gv(gp_model, lr=lr_newton) + gp_loss_ = gp_value[0] + opt(lr_adam, gp_grads) + gp_hypers = np.array([gp_model.kernel.lengthscale, gp_model.kernel.variance, gp_model.likelihood.variance]) + print(gp_hypers) + print(gp_grads) + + sparsemarkovgp_model.update_posterior() + markovgp_grads, markovgp_value = gv_markov(sparsemarkovgp_model, lr=lr_newton) + markovgp_loss_ = markovgp_value[0] + opt_markov(lr_adam, markovgp_grads) + markovgp_hypers = np.array([sparsemarkovgp_model.kernel.lengthscale, sparsemarkovgp_model.kernel.variance, + sparsemarkovgp_model.likelihood.variance]) + print(markovgp_hypers) + print(markovgp_grads) + + np.testing.assert_allclose(gp_grads[0], markovgp_grads[0], atol=3e-1) + np.testing.assert_allclose(gp_grads[1], markovgp_grads[1], rtol=5e-2) + np.testing.assert_allclose(gp_grads[2], markovgp_grads[2], rtol=5e-2) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [4.5, 7.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_inference_step(var_f, len_f, var_y, N): + """ + test whether MarkovGP and SparseMarkovGP give almost the same posterior after one + step when Z is a perturbed version of X + """ + + x, y = build_data(N) + + z = x + np.random.normal(0, 0.5, x.shape) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y) + sparsemarkovgp_model = initialise_sparsemarkovgp_model(var_f, len_f, var_y, x, y, z) + + lr_newton = 1. + + gp_model.update_posterior() + gp_loss = inf(gp_model, lr=lr_newton) # update variational params + gp_model.update_posterior() + + sparsemarkovgp_model.update_posterior() + markovgp_loss = inf(sparsemarkovgp_model, lr=lr_newton) # update variational params + sparsemarkovgp_model.update_posterior() + + post_mean, post_cov = sparsemarkovgp_model(np.sort(x)) + + # post_mean = sparsemarkovgp_model.posterior_mean.value[1:, :1, :1] + # post_cov = sparsemarkovgp_model.posterior_variance.value[1:, :1, :1] + + np.testing.assert_allclose(np.squeeze(gp_model.posterior_mean.value), post_mean, atol=1e-1) + np.testing.assert_allclose(np.squeeze(gp_model.posterior_variance.value), post_cov, atol=1e-1) + + +# N = 30 +# var_f = .5 +# len_f = 5. +# var_y = 0.1 +# +# np.random.seed(123) +# x, y = build_data(N) +# +# z = x + np.random.normal(0., 1, x.shape) +# # z[-1] += 5 +# # z[-2] -= 1 +# # z[9] -= 1 +# # z = np.concatenate([z, np.array([z[9]+1])], axis=0) +# # z -= np.abs(np.random.normal(0, .1, x.shape)) +# +# gp_model = initialise_gp_model(var_f, len_f, var_y, x, y) +# sparsemarkovgp_model = initialise_sparsemarkovgp_model(var_f, len_f, var_y, x, y, z) +# +# lr_newton = 1. +# +# gp_model.update_posterior() +# gp_loss = inf(gp_model, lr=lr_newton) # update variational params +# gp_model.update_posterior() +# +# sparsemarkovgp_model.update_posterior() +# markovgp_loss = inf(sparsemarkovgp_model, lr=lr_newton) # update variational params +# sparsemarkovgp_model.update_posterior() +# +# post_mean, post_cov = sparsemarkovgp_model(np.sort(x)) +# post_mean_ = sparsemarkovgp_model.posterior_mean.value[:, 0, 0] +# +# plt.plot(x, y, 'k.') +# plt.plot(z, np.zeros_like(z), 'b.') +# plt.plot(x, post_mean, 'b-') +# plt.plot(x, post_mean + np.sqrt(post_cov), 'b-', alpha=0.4) +# plt.plot(x, post_mean - np.sqrt(post_cov), 'b-', alpha=0.4) +# plt.plot(x, np.squeeze(gp_model.posterior_mean.value), 'r--') +# plt.plot(x, np.squeeze(gp_model.posterior_mean.value) + np.sqrt(np.squeeze(gp_model.posterior_variance.value)), 'r--', alpha=0.4) +# plt.plot(x, np.squeeze(gp_model.posterior_mean.value) - np.sqrt(np.squeeze(gp_model.posterior_variance.value)), 'r--', alpha=0.4) +# plt.show() +# +# # post_mean = sparsemarkovgp_model.posterior_mean.value[1:, :1, :1] +# # post_cov = sparsemarkovgp_model.posterior_variance.value[1:, :1, :1] +# +# np.testing.assert_allclose(np.squeeze(gp_model.posterior_mean.value), post_mean, atol=1e-1) +# np.testing.assert_allclose(np.squeeze(gp_model.posterior_variance.value), post_cov, atol=1e-1) diff --git a/newt/tests/test_vs_exact_marg_lik.py b/newt/tests/test_vs_exact_marg_lik.py new file mode 100644 index 0000000..7e8fc2e --- /dev/null +++ b/newt/tests/test_vs_exact_marg_lik.py @@ -0,0 +1,70 @@ +import newt +import numpy as np +from newt.utils import solve +from jax.config import config +config.update("jax_enable_x64", True) +import pytest + + +inf = newt.inference.VariationalInference() + + +def wiggly_time_series(x_): + noise_var = 0.15 # true observation noise + return (np.cos(0.04*x_+0.33*np.pi) * np.sin(0.2*x_) + + np.math.sqrt(noise_var) * np.random.normal(0, 1, x_.shape)) + + +def build_data(N): + # np.random.seed(12345) + x = np.random.permutation(np.linspace(-25.0, 150.0, num=N) + 0.5*np.random.randn(N)) # unevenly spaced + x = np.sort(x) # since MarkovGP sorts the inputs, they must also be sorted for GP + y = wiggly_time_series(x) + # x_test = np.linspace(np.min(x)-15.0, np.max(x)+15.0, num=500) + # y_test = wiggly_time_series(x_test) + # x_plot = np.linspace(np.min(x)-20.0, np.max(x)+20.0, 200) + + x = x[:, None] + # y = y[:, None] + # x_plot = x_plot[:, None] + return x, y + + +def initialise_gp_model(var_f, len_f, var_y, x, y): + kernel = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) + likelihood = newt.likelihoods.Gaussian(variance=var_y) + model = newt.models.GP(kernel=kernel, likelihood=likelihood, X=x, Y=y) + return model + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_marg_lik(var_f, len_f, var_y, N): + """ + test whether VI with newt's GP and MarkovGP give the same initial ELBO and posterior + """ + + x, y = build_data(N) + + gp_model = initialise_gp_model(var_f, len_f, var_y, x, y) + + gp_model.update_posterior() + loss_gp = inf(gp_model) + gp_model.update_posterior() + loss_gp = inf(gp_model) + print(loss_gp) + + K_X = gp_model.kernel(x, x) + K_Y = K_X + var_y * np.eye(K_X.shape[0]) + L_Y = np.linalg.cholesky(K_Y) + exact_marg_lik = ( + -0.5 * y.T @ solve(K_Y, y) + - np.sum(np.log(np.diag(L_Y))) + - 0.5 * y.shape[0] * np.log(2 * np.pi) + ) + + print(exact_marg_lik) + + np.testing.assert_almost_equal(loss_gp, -exact_marg_lik, decimal=4) diff --git a/newt/tests/test_vs_gpflow_class.py b/newt/tests/test_vs_gpflow_class.py new file mode 100644 index 0000000..e03b06c --- /dev/null +++ b/newt/tests/test_vs_gpflow_class.py @@ -0,0 +1,179 @@ +import newt +import objax +from newt.utils import inv +import numpy as np +from jax.config import config +config.update("jax_enable_x64", True) +import pytest +import tensorflow as tf +import gpflow + + +inf = newt.inference.VariationalInference() + + +def build_data(N): + # np.random.seed(12345) + x = 100 * np.random.rand(N) + f = lambda x_: 6 * np.sin(np.pi * x_ / 10.0) / (np.pi * x_ / 10.0 + 1) + y_ = f(x) + np.math.sqrt(0.05) * np.random.randn(x.shape[0]) + y = np.sign(y_) + y[y == -1] = 0 + x = x[:, None] + return x, y + + +def initialise_newt_model(var_f, len_f, x, y): + kernel = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) + likelihood = newt.likelihoods.Bernoulli() + model = newt.models.GP(kernel=kernel, likelihood=likelihood, X=x, Y=y) + return model + + +def initialise_gpflow_model(var_f, len_f, x, y): + N = x.shape[0] + k = gpflow.kernels.Matern52(lengthscales=[len_f], variance=var_f, name='matern') + + # find the m and S that correspond to the same natural parameters used by CVI + K_xx = np.array(k(x, x)) + K_xx_inv = inv(K_xx) + + S = inv(K_xx_inv + 1e-2 * np.eye(N)) + S_chol = np.linalg.cholesky(S) + S_chol_init = np.array([S_chol]) + # S_chol_flattened_init = np.array(S_chol[np.tril_indices(N, 0)]) + + lambda_init = np.zeros((N, 1)) + m_init = S @ lambda_init + + lik = gpflow.likelihoods.Bernoulli() + + # data = (x, y) + model = gpflow.models.SVGP( + inducing_variable=x, + whiten=False, + kernel=k, + mean_function=None, + likelihood=lik, + q_mu=m_init, + q_sqrt=S_chol_init + ) + gpflow.utilities.set_trainable(model.inducing_variable.Z, False) + gpflow.utilities.set_trainable(model.q_mu, False) + gpflow.utilities.set_trainable(model.q_sqrt, False) + return model + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [2.5, 5.]) +@pytest.mark.parametrize('N', [30, 60]) +def test_initial_loss(var_f, len_f, N): + """ + test whether newt's VI and gpflow's SVGP (Z=X) give the same initial ELBO and posterior + """ + + x, y = build_data(N) + + newt_model = initialise_newt_model(var_f, len_f, x, y) + gpflow_model = initialise_gpflow_model(var_f, len_f, x, y) + + newt_model.update_posterior() + loss_newt = inf(newt_model) + # _, _, expected_density = newt_model.inference(newt_model) + print(loss_newt) + # print(expected_density) + + data = (x, y[:, None]) + f_mean, f_var = gpflow_model.predict_f(x) + var_exp = np.sum(gpflow_model.likelihood.variational_expectations(f_mean, f_var, y[:, None])) + loss_gpflow = -gpflow_model.elbo(data) + print(loss_gpflow.numpy()) + # print(var_exp) + + # print(posterior_mean - f_mean[:, 0]) + + np.testing.assert_allclose(np.squeeze(newt_model.posterior_mean.value), f_mean[:, 0], rtol=1e-4) + np.testing.assert_allclose(np.squeeze(newt_model.posterior_variance.value), f_var[:, 0], rtol=1e-4) + np.testing.assert_almost_equal(loss_newt, loss_gpflow.numpy(), decimal=2) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [2.5, 5.]) +@pytest.mark.parametrize('N', [30, 60]) +def test_gradient_step(var_f, len_f, N): + """ + test whether newt's VI and gpflow's SVGP (Z=X) provide the same initial gradient step in the hyperparameters + """ + + x, y = build_data(N) + + newt_model = initialise_newt_model(var_f, len_f, x, y) + gpflow_model = initialise_gpflow_model(var_f, len_f, x, y) + + gv = objax.GradValues(inf, newt_model.vars()) + + lr_adam = 0.1 + lr_newton = 1. + opt = objax.optimizer.Adam(newt_model.vars()) + + newt_model.update_posterior() + newt_grads, value = gv(newt_model, lr=lr_newton) + loss_ = value[0] + opt(lr_adam, newt_grads) + newt_hypers = np.array([newt_model.kernel.lengthscale, newt_model.kernel.variance]) + print(newt_hypers) + print(newt_grads) + + adam_opt = tf.optimizers.Adam(lr_adam) + data = (x, y[:, None]) + with tf.GradientTape() as tape: + loss = -gpflow_model.elbo(data) + _vars = gpflow_model.trainable_variables + gpflow_grads = tape.gradient(loss, _vars) + + loss_fn = gpflow_model.training_loss_closure(data) + adam_vars = gpflow_model.trainable_variables + adam_opt.minimize(loss_fn, adam_vars) + gpflow_hypers = np.array([gpflow_model.kernel.lengthscales.numpy()[0], gpflow_model.kernel.variance.numpy()]) + print(gpflow_hypers) + print(gpflow_grads) + + np.testing.assert_allclose(newt_grads[0], gpflow_grads[0], rtol=1e-2) + np.testing.assert_allclose(newt_grads[1], gpflow_grads[1], rtol=1e-2) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [2.5, 5.]) +@pytest.mark.parametrize('N', [30, 60]) +def test_inference_step(var_f, len_f, N): + """ + test whether newt's VI and gpflow's SVGP (Z=X) give the same posterior after one natural gradient step + """ + + x, y = build_data(N) + + newt_model = initialise_newt_model(var_f, len_f, x, y) + gpflow_model = initialise_gpflow_model(var_f, len_f, x, y) + + lr_newton = 1. + + newt_model.update_posterior() + newt_loss = inf(newt_model, lr=lr_newton) # update variational params + newt_model.update_posterior() + + data = (x, y[:, None]) + with tf.GradientTape() as tape: + loss = -gpflow_model.elbo(data) + + variational_vars = [(gpflow_model.q_mu, gpflow_model.q_sqrt)] + natgrad_opt = gpflow.optimizers.NaturalGradient(gamma=lr_newton) + loss_fn = gpflow_model.training_loss_closure(data) + natgrad_opt.minimize(loss_fn, variational_vars) + + f_mean, f_var = gpflow_model.predict_f(x) + + # print(post_mean_) + # print(f_mean[:, 0]) + + np.testing.assert_allclose(np.squeeze(newt_model.posterior_mean.value), f_mean[:, 0], rtol=5e-3) + np.testing.assert_allclose(np.squeeze(newt_model.posterior_variance.value), f_var[:, 0], rtol=5e-3) diff --git a/newt/tests/test_vs_gpflow_reg.py b/newt/tests/test_vs_gpflow_reg.py new file mode 100644 index 0000000..6358d78 --- /dev/null +++ b/newt/tests/test_vs_gpflow_reg.py @@ -0,0 +1,193 @@ +import newt +import objax +from newt.utils import inv +import numpy as np +from jax.config import config +config.update("jax_enable_x64", True) +import pytest +import tensorflow as tf +import gpflow + + +inf = newt.inference.VariationalInference() + + +def wiggly_time_series(x_): + noise_var = 0.15 # true observation noise + return (np.cos(0.04*x_+0.33*np.pi) * np.sin(0.2*x_) + + np.math.sqrt(noise_var) * np.random.normal(0, 1, x_.shape)) + + +def build_data(N): + # np.random.seed(12345) + x = np.random.permutation(np.linspace(-25.0, 150.0, num=N) + 0.5*np.random.randn(N)) # unevenly spaced + y = wiggly_time_series(x) + # x_test = np.linspace(np.min(x)-15.0, np.max(x)+15.0, num=500) + # y_test = wiggly_time_series(x_test) + # x_plot = np.linspace(np.min(x)-20.0, np.max(x)+20.0, 200) + + x = x[:, None] + # y = y[:, None] + # x_plot = x_plot[:, None] + return x, y + + +def initialise_newt_model(var_f, len_f, var_y, x, y): + kernel = newt.kernels.Matern52(variance=var_f, lengthscale=len_f) + likelihood = newt.likelihoods.Gaussian(variance=var_y) + model = newt.models.GP(kernel=kernel, likelihood=likelihood, X=x, Y=y) + return model + + +def initialise_gpflow_model(var_f, len_f, var_y, x, y): + N = x.shape[0] + k = gpflow.kernels.Matern52(lengthscales=[len_f], variance=var_f, name='matern') + + # find the m and S that correspond to the same natural parameters used by CVI + K_xx = np.array(k(x, x)) + K_xx_inv = inv(K_xx) + + S = inv(K_xx_inv + 1e-2 * np.eye(N)) + S_chol = np.linalg.cholesky(S) + S_chol_init = np.array([S_chol]) + # S_chol_flattened_init = np.array(S_chol[np.tril_indices(N, 0)]) + + lambda_init = np.zeros((N, 1)) + m_init = S @ lambda_init + + lik = gpflow.likelihoods.Gaussian(variance=var_y) + + # data = (x, y) + model = gpflow.models.SVGP( + inducing_variable=x, + whiten=False, + kernel=k, + mean_function=None, + likelihood=lik, + q_mu=m_init, + q_sqrt=S_chol_init + ) + gpflow.utilities.set_trainable(model.inducing_variable.Z, False) + gpflow.utilities.set_trainable(model.q_mu, False) + gpflow.utilities.set_trainable(model.q_sqrt, False) + return model + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_initial_loss(var_f, len_f, var_y, N): + """ + test whether newt's VI and gpflow's SVGP (Z=X) give the same initial ELBO and posterior + """ + + x, y = build_data(N) + + newt_model = initialise_newt_model(var_f, len_f, var_y, x, y) + gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, x, y) + + newt_model.update_posterior() + loss_newt = inf.energy(newt_model) + # _, _, expected_density = newt_model.inference(newt_model) + print(loss_newt) + # print(expected_density) + + data = (x, y[:, None]) + f_mean, f_var = gpflow_model.predict_f(x) + var_exp = np.sum(gpflow_model.likelihood.variational_expectations(f_mean, f_var, y[:, None])) + loss_gpflow = -gpflow_model.elbo(data) + print(loss_gpflow.numpy()) + # print(var_exp) + + # print(posterior_mean - f_mean[:, 0]) + + np.testing.assert_allclose(np.squeeze(newt_model.posterior_mean.value), f_mean[:, 0], rtol=1e-4) + np.testing.assert_allclose(np.squeeze(newt_model.posterior_variance.value), f_var[:, 0], rtol=1e-4) + np.testing.assert_almost_equal(loss_newt, loss_gpflow.numpy(), decimal=2) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_gradient_step(var_f, len_f, var_y, N): + """ + test whether newt's VI and gpflow's SVGP (Z=X) provide the same initial gradient step in the hyperparameters + """ + + x, y = build_data(N) + + newt_model = initialise_newt_model(var_f, len_f, var_y, x, y) + gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, x, y) + + gv = objax.GradValues(inf.energy, newt_model.vars()) + + lr_adam = 0.1 + lr_newton = 1. + opt = objax.optimizer.Adam(newt_model.vars()) + + newt_model.update_posterior() + newt_grads, value = gv(newt_model) # , lr=lr_newton) + loss_ = value[0] + opt(lr_adam, newt_grads) + newt_hypers = np.array([newt_model.kernel.lengthscale, newt_model.kernel.variance, newt_model.likelihood.variance]) + print(newt_hypers) + print(newt_grads) + + adam_opt = tf.optimizers.Adam(lr_adam) + data = (x, y[:, None]) + with tf.GradientTape() as tape: + loss = -gpflow_model.elbo(data) + _vars = gpflow_model.trainable_variables + gpflow_grads = tape.gradient(loss, _vars) + + loss_fn = gpflow_model.training_loss_closure(data) + adam_vars = gpflow_model.trainable_variables + adam_opt.minimize(loss_fn, adam_vars) + gpflow_hypers = np.array([gpflow_model.kernel.lengthscales.numpy()[0], gpflow_model.kernel.variance.numpy(), + gpflow_model.likelihood.variance.numpy()]) + print(gpflow_hypers) + print(gpflow_grads) + + np.testing.assert_allclose(newt_grads[0], gpflow_grads[0], atol=1e-2) # use atol since values are so small + np.testing.assert_allclose(newt_grads[1], gpflow_grads[1], rtol=1e-2) + np.testing.assert_allclose(newt_grads[2], gpflow_grads[2], rtol=1e-2) + + +@pytest.mark.parametrize('var_f', [0.5, 1.5]) +@pytest.mark.parametrize('len_f', [0.75, 2.5]) +@pytest.mark.parametrize('var_y', [0.1, 0.5]) +@pytest.mark.parametrize('N', [30, 60]) +def test_inference_step(var_f, len_f, var_y, N): + """ + test whether newt's VI and gpflow's SVGP (Z=X) give the same posterior after one natural gradient step + """ + + x, y = build_data(N) + + newt_model = initialise_newt_model(var_f, len_f, var_y, x, y) + gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, x, y) + + lr_newton = 1. + + newt_model.update_posterior() + newt_loss = inf(newt_model, lr=lr_newton) # update variational params + newt_model.update_posterior() + + data = (x, y[:, None]) + with tf.GradientTape() as tape: + loss = -gpflow_model.elbo(data) + + variational_vars = [(gpflow_model.q_mu, gpflow_model.q_sqrt)] + natgrad_opt = gpflow.optimizers.NaturalGradient(gamma=lr_newton) + loss_fn = gpflow_model.training_loss_closure(data) + natgrad_opt.minimize(loss_fn, variational_vars) + + f_mean, f_var = gpflow_model.predict_f(x) + + # print(post_mean_) + # print(f_mean[:, 0]) + + np.testing.assert_allclose(np.squeeze(newt_model.posterior_mean.value), f_mean[:, 0], rtol=1e-3) + np.testing.assert_allclose(np.squeeze(newt_model.posterior_variance.value), f_var[:, 0], rtol=1e-3) diff --git a/newt/tests/test_vs_gpflow_shutters.py b/newt/tests/test_vs_gpflow_shutters.py new file mode 100644 index 0000000..a3aa4a3 --- /dev/null +++ b/newt/tests/test_vs_gpflow_shutters.py @@ -0,0 +1,245 @@ +import newt +import objax +from newt.utils import inv +import numpy as np +from jax.config import config +config.update("jax_enable_x64", True) +import pytest +import tensorflow as tf +import gpflow +import scipy as sp +import pickle + +gpflow.config.set_default_jitter(1e-20) + +train_data = pickle.load(open(f'../experiments/shutters/data/train_data_0_0.pickle', "rb")) +pred_data = pickle.load(open(f'../experiments/shutters/data/pred_data_0_0.pickle', "rb")) +pred_data = pred_data['grid'] + +X = train_data['X'] +Y = train_data['Y'] +X_test = pred_data['X'] +Y_test = pred_data['Y'] + +inf = newt.inference.VariationalInference() + + +def initialise_newt_model(var_f, len_f, var_y, x, y): + r = np.unique(x[:, 1]) + print(r) + kernel = newt.kernels.SpatioTemporalMatern52(variance=var_f, lengthscale_time=len_f, lengthscale_space=len_f, z=r) + likelihood = newt.likelihoods.Gaussian(variance=var_y) + model = newt.models.GP(kernel=kernel, likelihood=likelihood, X=x, Y=y) + return model + + +# def init_as_cvi(kern, Z_all): +# M = Z_all.shape[0] +# +# Kzz = kern(Z_all, Z_all) +# +# # def inv(K): +# # K_chol = sp.linalg.cholesky(K+1e-3*np.eye(M), lower=True) +# # return sp.linalg.cho_solve((K_chol, True), np.eye(K.shape[0])) +# +# #manual q(u) decompositin +# nat1 = np.zeros([M, 1]) +# nat2 = inv(Kzz) +# +# lam1 = np.zeros([M, 1]) +# lam2 = 1e-2*np.eye(M) +# +# # S = inv(-2*(nat2+lam2)) +# S = inv(nat2+lam2) +# m = S @ (lam1 + nat1) +# +# S_chol = sp.linalg.cholesky(S+1e-8*np.eye(M), lower=True) +# S_flattened = S_chol[np.tril_indices(M, 0)] +# +# q_mu = m +# q_sqrt = np.array([S_chol]) +# return q_mu, q_sqrt + + +def initialise_gpflow_model(var_f, len_f, var_y, x, y): + N = x.shape[0] + k0 = gpflow.kernels.Matern52(lengthscales=[len_f], variance=var_f, active_dims=[0], name='matern1') + k1 = gpflow.kernels.Matern52(lengthscales=[len_f], variance=1., active_dims=[1], name='matern2') + k = k0 * k1 + + # find the m and S that correspond to the same natural parameters used by CVI + K_xx = np.array(k(x, x)) + K_xx_inv = inv(K_xx) + + print(x.shape) + + S = inv(K_xx_inv + 1e-2 * np.eye(N)) + S_chol = np.linalg.cholesky(S) + S_chol_init = np.array([S_chol]) + # S_chol_flattened_init = np.array(S_chol[np.tril_indices(N, 0)]) + + lambda_init = np.zeros((N, 1)) + m_init = S @ lambda_init + + lik = gpflow.likelihoods.Gaussian(variance=var_y) + + # data = (x, y) + # print(x) + + model = gpflow.models.SVGP( + inducing_variable=x, + whiten=False, + kernel=k, + mean_function=None, + likelihood=lik, + q_mu=m_init, + q_sqrt=S_chol_init + ) + gpflow.utilities.set_trainable(model.inducing_variable.Z, False) + gpflow.utilities.set_trainable(model.q_mu, False) + gpflow.utilities.set_trainable(model.q_sqrt, False) + return model + + +@pytest.mark.parametrize('var_f', [1., 5.]) +@pytest.mark.parametrize('len_f', [0.1, 0.025]) +@pytest.mark.parametrize('var_y', [0.1, 0.3]) +def test_initial_loss(var_f, len_f, var_y): + """ + test whether newt's VI and gpflow's SVGP (Z=X) give the same initial ELBO and posterior + """ + + newt_model = initialise_newt_model(var_f, len_f, var_y, X, Y) + gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, X, Y) + + newt_model.update_posterior() + loss_newt = inf(newt_model) + # _, _, expected_density = newt_model.inference(newt_model) + print(loss_newt) + # print(expected_density) + + data = (X, Y) + f_mean, f_var = gpflow_model.predict_f(X) + var_exp = np.sum(gpflow_model.likelihood.variational_expectations(f_mean, f_var, Y)) + loss_gpflow = -gpflow_model.elbo(data) + print(loss_gpflow.numpy()) + # print(var_exp) + + # print(posterior_mean - f_mean[:, 0]) + + np.testing.assert_allclose(np.squeeze(newt_model.posterior_mean.value), f_mean[:, 0], rtol=1e-4) + np.testing.assert_allclose(np.squeeze(newt_model.posterior_variance.value), f_var[:, 0], rtol=1e-4) + np.testing.assert_almost_equal(loss_newt, loss_gpflow.numpy(), decimal=2) + + +@pytest.mark.parametrize('var_f', [1., 5.]) +@pytest.mark.parametrize('len_f', [0.1, 0.025]) +@pytest.mark.parametrize('var_y', [0.1, 0.3]) +def test_gradient_step(var_f, len_f, var_y): + """ + test whether newt's VI and gpflow's SVGP (Z=X) provide the same initial gradient step in the hyperparameters + """ + + # x, y = build_data(N) + + newt_model = initialise_newt_model(var_f, len_f, var_y, X, Y) + gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, X, Y) + + gv = objax.GradValues(inf, newt_model.vars()) + + lr_adam = 0.1 + lr_newton = 1. + opt = objax.optimizer.Adam(newt_model.vars()) + + newt_model.update_posterior() + newt_grads, value = gv(newt_model, lr=lr_newton) + loss_ = value[0] + opt(lr_adam, newt_grads) + newt_hypers = np.array([newt_model.kernel.temporal_lengthscale, newt_model.kernel.spatial_lengthscale, + newt_model.kernel.variance, newt_model.likelihood.variance]) + print(newt_hypers) + print(newt_grads) + + adam_opt = tf.optimizers.Adam(lr_adam) + data = (X, Y) + with tf.GradientTape() as tape: + loss = -gpflow_model.elbo(data) + _vars = gpflow_model.trainable_variables + gpflow_grads = tape.gradient(loss, _vars) + + loss_fn = gpflow_model.training_loss_closure(data) + adam_vars = gpflow_model.trainable_variables + adam_opt.minimize(loss_fn, adam_vars) + gpflow_hypers = np.array([gpflow_model.kernel.lengthscales.numpy()[0], + gpflow_model.kernel.lengthscales.numpy()[1], + gpflow_model.kernel.variance.numpy(), + gpflow_model.likelihood.variance.numpy()]) + print(gpflow_hypers) + print(gpflow_grads) + + np.testing.assert_allclose(newt_grads[0], gpflow_grads[0], atol=1e-2) # use atol since values are so small + np.testing.assert_allclose(newt_grads[1], gpflow_grads[1], rtol=1e-2) + np.testing.assert_allclose(newt_grads[2], gpflow_grads[2], rtol=1e-2) + + +# @pytest.mark.parametrize('var_f', [0.5, 1.5]) +# @pytest.mark.parametrize('len_f', [0.75, 2.5]) +# @pytest.mark.parametrize('var_y', [0.1, 0.5]) +# def test_inference_step(var_f, len_f, var_y): +# """ +# test whether newt's VI and gpflow's SVGP (Z=X) give the same posterior after one natural gradient step +# """ +# +# # x, y = build_data(N) +# +# newt_model = initialise_newt_model(var_f, len_f, var_y, X, Y) +# gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, X, Y) +# +# lr_newton = 1. +# +# newt_model.update_posterior() +# newt_loss = inf(newt_model, lr=lr_newton) # update variational params +# newt_model.update_posterior() +# +# data = (X, Y[:, None) +# with tf.GradientTape() as tape: +# loss = -gpflow_model.elbo(data) +# +# variational_vars = [(gpflow_model.q_mu, gpflow_model.q_sqrt)] +# natgrad_opt = gpflow.optimizers.NaturalGradient(gamma=lr_newton) +# loss_fn = gpflow_model.training_loss_closure(data) +# natgrad_opt.minimize(loss_fn, variational_vars) +# +# f_mean, f_var = gpflow_model.predict_f(X) +# +# # print(post_mean_) +# # print(f_mean[:, 0]) +# +# np.testing.assert_allclose(np.squeeze(newt_model.posterior_mean.value), f_mean[:, 0], rtol=1e-3) +# np.testing.assert_allclose(np.squeeze(newt_model.posterior_variance.value), f_var[:, 0], rtol=1e-3) + +var_f = 1 +len_f = 1 +var_y = 0.1 + +newt_model = initialise_newt_model(var_f, len_f, var_y, X, Y) +gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, X, Y) + +newt_model.update_posterior() +loss_newt = inf(newt_model) +# _, _, expected_density = newt_model.inference(newt_model) +print(loss_newt) +# print(expected_density) + +data = (X, Y) +f_mean, f_var = gpflow_model.predict_f(X) +var_exp = np.sum(gpflow_model.likelihood.variational_expectations(f_mean, f_var, Y)) +loss_gpflow = -gpflow_model.elbo(data) +print(loss_gpflow.numpy()) +# print(var_exp) + +# print(posterior_mean - f_mean[:, 0]) + +# np.testing.assert_allclose(np.squeeze(newt_model.posterior_mean.value), f_mean[:, 0], rtol=1e-4) +# np.testing.assert_allclose(np.squeeze(newt_model.posterior_variance.value), f_var[:, 0], rtol=1e-4) +# np.testing.assert_almost_equal(loss_newt, loss_gpflow.numpy(), decimal=2) diff --git a/newt/tests/test_vs_gpflow_spacetime.py b/newt/tests/test_vs_gpflow_spacetime.py new file mode 100644 index 0000000..afdfdf1 --- /dev/null +++ b/newt/tests/test_vs_gpflow_spacetime.py @@ -0,0 +1,268 @@ +import newt +import objax +import numpy as np +from newt.utils import inv +import gpflow +from jax.config import config +config.update("jax_enable_x64", True) +import pytest +import scipy as sp + +gpflow.config.set_default_jitter(1e-32) + +inf = newt.inference.VariationalInference() + + +# def inv_(K): +# K_chol = sp.linalg.cholesky(K, lower=True) +# return sp.linalg.cho_solve((K_chol, True), np.eye(K.shape[0])) + + +def create_grid(x1, x2, y1, y2, n1=10, n2=10): + y_ = np.linspace(y1, y2, n2) + x_ = np.linspace(x1, x2, n1) + + grid = [] + for i in x_: + for j in y_: + grid.append([i, j]) + + return np.array(grid) + + +def build_data(N_): + Nt_train = N_ + Ns = N_ + X_ = create_grid(0, 1, 0, 1, Nt_train, Ns) + t_ = np.linspace(0, 1, Nt_train, dtype=float) + R_ = np.tile(np.linspace(0, 1, Ns, dtype=float)[None, ...], [Nt_train, 1]) + + y_ = np.sin(10 * X_[:, 0]) + np.sin(10 * X_[:, 1]) + 0.01 * np.random.randn(X_.shape[0]) + + # Y = y[:, None] + Y_ = y_.reshape(Nt_train, Ns) + return X_, Y_, t_, R_, y_ + + +def initialise_gp_model(var_f, len_f, var_y, x_, y_, z_): + kernel = newt.kernels.SpatialMatern52(variance=var_f, lengthscale=len_f, + z=z_, sparse=True, opt_z=False, conditional='Full') + likelihood = newt.likelihoods.Gaussian(variance=var_y) + + # the sort during utils.input_admin() sometimes results in different sorting of inputs + # so this step ensures everything is aligned + # model_ = newt.models.MarkovGP(kernel=kernel, likelihood=likelihood, X=x_, Y=y_) + # x_sorted = model_.X + # r_sorted = model_.R + # x_ = np.vstack([x_sorted.T, r_sorted.T]).T + # y_ = model_.Y + + model = newt.models.GP(kernel=kernel, likelihood=likelihood, X=x_, Y=y_) + return model + + +def initialise_markovgp_model(var_f, len_f, var_y, x_, y_, z_): + kernel = newt.kernels.SpatialMatern52(variance=var_f, lengthscale=len_f, + z=z_, sparse=True, opt_z=False, conditional='Full') + likelihood = newt.likelihoods.Gaussian(variance=var_y) + model = newt.models.MarkovGP(kernel=kernel, likelihood=likelihood, X=x_, Y=y_) + return model + + +def initialise_gpflow_model(var_f_, len_f_, var_y_, x_, y_): + N_ = x_.shape[0] + k0 = gpflow.kernels.Matern52(lengthscales=[len_f_], variance=var_f_, active_dims=[0], name='matern1') + k1 = gpflow.kernels.Matern52(lengthscales=[len_f_], variance=1., active_dims=[1], name='matern2') + k = k0 * k1 + + # find the m and S that correspond to the same natural parameters used by CVI + K_xx = np.array(k(x_, x_)) + K_xx_inv = inv(K_xx) + + # print(x_.shape) + + S = inv(K_xx_inv + 1e-2 * np.eye(N_)) + S_chol = np.linalg.cholesky(S) + S_chol_init = np.array([S_chol]) + # print(np.diag(S)) + # S_chol_flattened_init = np.array(S_chol[np.tril_indices(N, 0)]) + + lambda_init = np.zeros((N_, 1)) + m_init = S @ lambda_init + + lik = gpflow.likelihoods.Gaussian(variance=var_y_) + + # data = (x, y) + # print(x) + + model = gpflow.models.SVGP( + inducing_variable=x_, + whiten=False, + kernel=k, + mean_function=None, + likelihood=lik, + q_mu=m_init, + q_sqrt=S_chol_init + ) + gpflow.utilities.set_trainable(model.inducing_variable.Z, False) + gpflow.utilities.set_trainable(model.q_mu, False) + gpflow.utilities.set_trainable(model.q_sqrt, False) + return model + + +@pytest.mark.parametrize('var_f_', [0.5, 1.5]) +@pytest.mark.parametrize('len_f_', [0.5, 1.]) +@pytest.mark.parametrize('var_y_', [0.1, 0.5]) +@pytest.mark.parametrize('N_', [8, 16]) +def test_initial_loss(var_f_, len_f_, var_y_, N_): + """ + test whether VI with newt's GP and MarkovGP give the same initial ELBO and posterior + """ + + x_, Y_, t_, R_, y_ = build_data(N_) + + gp_model = initialise_gp_model(var_f_, len_f_, var_y_, x_, y_, R_[0]) + markovgp_model = initialise_markovgp_model(var_f_, len_f_, var_y_, x_, y_, R_[0]) + gpflow_model = initialise_gpflow_model(var_f_, len_f_, var_y_, x_, y_) + + gp_model.update_posterior() + f_mean_gp, f_var_gp = gp_model.predict(x_) + loss_gp = inf(gp_model) + print(loss_gp) + + markovgp_model.update_posterior() + f_mean_markovgp, f_var_markovgp = markovgp_model.predict(x_) + loss_markovgp = inf(markovgp_model) + print(loss_markovgp) + data = (x_, y_[..., None]) + f_mean_gpflow, f_var_gpflow = gpflow_model.predict_f(x_[None], full_cov=False, full_output_cov=False) + # var_exp = np.sum(gpflow_model.likelihood.variational_expectations(f_mean, f_var, Y)) + loss_gpflow = -gpflow_model.elbo(data) + print(loss_gpflow.numpy()) + + # print(gp_model.posterior_variance.value - markovgp_model.posterior_variance.value) + + # np.testing.assert_allclose(gp_model.posterior_mean.value, markovgp_model.posterior_mean.value, rtol=1e-4) + # np.testing.assert_allclose(gp_model.posterior_variance.value, markovgp_model.posterior_variance.value, rtol=1e-4) + np.testing.assert_almost_equal(loss_gp, loss_markovgp, decimal=2) + np.testing.assert_almost_equal(loss_gp, loss_gpflow.numpy(), decimal=2) + np.testing.assert_allclose(f_var_gp, f_var_markovgp, rtol=1e-4) + np.testing.assert_allclose(f_var_gp, np.squeeze(f_var_gpflow), rtol=1e-4) + + +# @pytest.mark.parametrize('var_f', [0.5, 1.5]) +# @pytest.mark.parametrize('len_f', [0.75, 1.25]) +# @pytest.mark.parametrize('var_y', [0.1]) # , 0.05]) +# @pytest.mark.parametrize('N', [8]) # , 16]) +# def test_gradient_step(var_f, len_f, var_y, N): +# """ +# test whether VI with newt's GP and MarkovGP provide the same initial gradient step in the hyperparameters +# """ +# +# x, Y, t, R, y = build_data(N) +# +# gp_model = initialise_gp_model(var_f, len_f, var_y, x, y, R[0]) +# markovgp_model = initialise_markovgp_model(var_f, len_f, var_y, x, y, R[0]) +# gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, x, y) +# +# gv = objax.GradValues(inf, gp_model.vars()) +# gv_markov = objax.GradValues(inf, markovgp_model.vars()) +# +# lr_adam = 0.1 +# lr_newton = 1. +# opt = objax.optimizer.Adam(gp_model.vars()) +# opt_markov = objax.optimizer.Adam(markovgp_model.vars()) +# +# gp_model.update_posterior() +# gp_grads, gp_value = gv(gp_model, lr=lr_newton) +# gp_loss_ = gp_value[0] +# opt(lr_adam, gp_grads) +# gp_hypers = np.array([gp_model.kernel.temporal_kernel.lengthscale, +# gp_model.kernel.temporal_kernel.variance, +# gp_model.kernel.spatial_kernel.lengthscale, +# gp_model.likelihood.variance]) +# print(gp_hypers) +# print(gp_grads) +# +# markovgp_model.update_posterior() +# markovgp_grads, markovgp_value = gv_markov(markovgp_model, lr=lr_newton) +# markovgp_loss_ = markovgp_value[0] +# opt_markov(lr_adam, markovgp_grads) +# markovgp_hypers = np.array([markovgp_model.kernel.temporal_kernel.lengthscale, +# markovgp_model.kernel.temporal_kernel.variance, +# markovgp_model.kernel.spatial_kernel.lengthscale, +# markovgp_model.likelihood.variance]) +# +# print(markovgp_hypers) +# print(markovgp_grads) +# +# np.testing.assert_allclose(gp_grads[0], markovgp_grads[0], rtol=1e-4) +# np.testing.assert_allclose(gp_grads[1], markovgp_grads[1], rtol=1e-4) +# np.testing.assert_allclose(gp_grads[2], markovgp_grads[2], rtol=1e-4) + + +# @pytest.mark.parametrize('var_f', [0.5, 1.5]) +# @pytest.mark.parametrize('len_f', [0.75, 2.5]) +# @pytest.mark.parametrize('var_y', [0.1, 0.5]) +# @pytest.mark.parametrize('N', [8, 16]) +# def test_inference_step(var_f, len_f, var_y, N): +# """ +# test whether VI with newt's GP and MarkovGP give the same posterior after one natural gradient step +# """ +# +# x, Y, t, R, y = build_data(N) +# +# gp_model = initialise_gp_model(var_f, len_f, var_y, x, y, R[0]) +# markovgp_model = initialise_markovgp_model(var_f, len_f, var_y, x, y, R[0]) +# +# lr_newton = 1. +# +# gp_model.update_posterior() +# gp_loss = inf(gp_model, lr=lr_newton) # update variational params +# gp_model.update_posterior() +# +# markovgp_model.update_posterior() +# markovgp_loss = inf(markovgp_model, lr=lr_newton) # update variational params +# markovgp_model.update_posterior() +# +# np.testing.assert_allclose(gp_model.posterior_mean.value, markovgp_model.posterior_mean.value, rtol=1e-4) +# np.testing.assert_allclose(gp_model.posterior_variance.value, markovgp_model.posterior_variance.value, rtol=1e-4) + +# N = 5 +# x, Y, t, R, y = build_data(N) +# +# var_f = 0.5 +# len_f = 0.75 +# var_y = 0.1 +# +# gp_model = initialise_gp_model(var_f, len_f, var_y, x, y, R[0]) +# markovgp_model = initialise_markovgp_model(var_f, len_f, var_y, x, y, R[0]) +# gpflow_model = initialise_gpflow_model(var_f, len_f, var_y, x, y) +# +# lr_newton = 1. +# +# gp_model.update_posterior() +# f_mean_gp, f_var_gp = gp_model.predict(x) +# gp_loss = inf(gp_model, lr=lr_newton) # update variational params +# print(gp_loss) +# # gp_model.update_posterior() +# +# markovgp_model.update_posterior() +# f_mean_markovgp, f_var_markobgp = markovgp_model.predict(x) +# markovgp_loss = inf(markovgp_model, lr=lr_newton) # update variational params +# print(markovgp_loss) +# # markovgp_model.update_posterior() +# +# data = (x, y[:, None]) +# # f_mean_gpflow, f_var_gpflow = gpflow_model.predict_f(x) +# f_mean_gpflow, f_var_gpflow = gpflow_model.predict_f(x[None], full_cov=False, full_output_cov=False) +# # var_exp = np.sum(gpflow_model.likelihood.variational_expectations(f_mean_gpflow, f_var_gpflow, y[:, None])) +# loss_gpflow = -gpflow_model.elbo(data) +# print(loss_gpflow.numpy()) +# +# np.testing.assert_almost_equal(gp_loss, markovgp_loss, decimal=2) +# np.testing.assert_almost_equal(gp_loss, loss_gpflow.numpy(), decimal=2) +# +# np.testing.assert_allclose(gp_model.posterior_variance.value, markovgp_model.posterior_variance.value, rtol=1e-4) +# +# np.testing.assert_allclose(f_var_gp, np.squeeze(f_var_gpflow), rtol=1e-4) diff --git a/newt/utils.py b/newt/utils.py new file mode 100644 index 0000000..576a745 --- /dev/null +++ b/newt/utils.py @@ -0,0 +1,615 @@ +import jax.numpy as np +import numpy as nnp +import objax +from jax import vmap +from jax.ops import index_add, index +from jax.scipy.linalg import cho_factor, cho_solve, block_diag +from typing import Optional, Callable, Tuple, Union +from objax.module import Module +from objax.variable import BaseState, TrainVar, VarCollection +import math + +LOG2PI = math.log(2 * math.pi) +INV2PI = (2 * math.pi) ** -1 + + +def solve(P, Q): + """ + Compute P^-1 Q, where P is a PSD matrix, using the Cholesky factorisation + """ + L = cho_factor(P) + return cho_solve(L, Q) + + +def inv(P): + """ + Compute the inverse of a PSD matrix using the Cholesky factorisation + """ + L = cho_factor(P) + return cho_solve(L, np.eye(P.shape[-1])) + + +def diag(P): + """ + a broadcastable version of np.diag, for when P is size [N, D, D] + """ + return vmap(np.diag)(P) + + +def transpose(P): + return np.swapaxes(P, -1, -2) + + +def softplus(x_): + # return np.log(1 + np.exp(x_)) + return np.log(1 + np.exp(-np.abs(x_))) + np.maximum(x_, 0) # safer version + + +def sigmoid(x_): + return np.exp(x_) / (np.exp(x_) + 1.) + + +def softplus_inv(x_): + """ + Inverse of the softplus positiviy mapping, used for transforming parameters. + """ + if x_ is None: + return x_ + else: + # return np.log(np.exp(x_) - 1) + return np.log(1 - np.exp(-np.abs(x_))) + np.maximum(x_, 0) # safer version + + +def ensure_positive_precision(K): + """ + Check whether matrix K has positive diagonal elements. + If not, then replace the negative elements with default value 0.01 + """ + K_diag = diag(diag(K)) + K = np.where(np.any(diag(K) < 0), np.where(K_diag < 0, 1e-2, K_diag), K) + return K + + +def ensure_diagonal_positive_precision(K): + """ + Return a diagonal matrix with all positive values. + """ + K_diag = diag(diag(K)) + K = np.where(K_diag < 0, 1e-2, K_diag) + return K + + +def predict_from_state(x_test, ind, x, post_mean, post_cov, gain, kernel): + """ + wrapper function to vectorise predict_at_t_() + """ + predict_from_state_func = vmap( + predict_from_state_, (0, 0, None, None, None, None, None) + ) + return predict_from_state_func(x_test, ind, x, post_mean, post_cov, gain, kernel) + + +def predict_from_state_(x_test, ind, x, post_mean, post_cov, gain, kernel): + """ + predict the state distribution at time t by projecting from the neighbouring inducing states + """ + P, T = compute_conditional_statistics(x_test, x, kernel, ind) + # joint posterior (i.e. smoothed) mean and covariance of the states [u_, u+] at time t: + mean_joint = np.block([[post_mean[ind]], + [post_mean[ind + 1]]]) + cross_cov = gain[ind] @ post_cov[ind + 1] + cov_joint = np.block([[post_cov[ind], cross_cov], + [cross_cov.T, post_cov[ind + 1]]]) + return P @ mean_joint, P @ cov_joint @ P.T + T + + +def temporal_conditional(X, X_test, mean, cov, gain, kernel): + """ + predict from time X to time X_test give state mean and covariance at X + """ + Pinf = kernel.stationary_covariance()[None, ...] + minf = np.zeros([1, Pinf.shape[1], 1]) + mean_aug = np.concatenate([minf, mean, minf]) + cov_aug = np.concatenate([Pinf, cov, Pinf]) + gain = np.concatenate([np.zeros_like(gain[:1]), gain]) + + # figure out which two training states each test point is located between + ind_test = np.searchsorted(X.reshape(-1, ), X_test.reshape(-1, )) - 1 + + # project from training states to test locations + test_mean, test_cov = predict_from_state(X_test, ind_test, X, mean_aug, cov_aug, gain, kernel) + + return test_mean, test_cov + + +def predict_from_state_infinite_horizon(x_test, ind, x, post_mean, kernel): + """ + wrapper function to vectorise predict_at_t_() + """ + predict_from_state_func = vmap( + predict_from_state_infinite_horizon_, (0, 0, None, None, None) + ) + return predict_from_state_func(x_test, ind, x, post_mean, kernel) + + +def predict_from_state_infinite_horizon_(x_test, ind, x, post_mean, kernel): + """ + predict the state distribution at time t by projecting from the neighbouring inducing states + """ + P, T = compute_conditional_statistics(x_test, x, kernel, ind) + # joint posterior (i.e. smoothed) mean and covariance of the states [u_, u+] at time t: + mean_joint = np.block([[post_mean[ind]], + [post_mean[ind + 1]]]) + return P @ mean_joint + + +def temporal_conditional_infinite_horizon(X, X_test, mean, cov, gain, kernel): + """ + predict from time X to time X_test give state mean and covariance at X + """ + Pinf = kernel.stationary_covariance()[None, ...] + minf = np.zeros([1, Pinf.shape[1], 1]) + mean_aug = np.concatenate([minf, mean, minf]) + + # figure out which two training states each test point is located between + ind_test = np.searchsorted(X.reshape(-1, ), X_test.reshape(-1, )) - 1 + + # project from training states to test locations + test_mean = predict_from_state_infinite_horizon(X_test, ind_test, X, mean_aug, kernel) + + return test_mean, np.tile(cov[0], [test_mean.shape[0], 1, 1]) + + +def compute_conditional_statistics(x_test, x, kernel, ind): + """ + This version uses cho_factor and cho_solve - much more efficient when using JAX + + Predicts marginal states at new time points. (new time points should be sorted) + Calculates the conditional density: + p(xโ‚™|uโ‚‹, uโ‚Š) = ๐“(Pโ‚™ @ [uโ‚‹, uโ‚Š], Tโ‚™) + + :param x_test: time points to generate observations for [N] + :param x: inducing state input locations [M] + :param kernel: prior object providing access to state transition functions + :param ind: an array containing the index of the inducing state to the left of every input [N] + :return: parameters for the conditional mean and covariance + P: [N, D, 2*D] + T: [N, D, D] + """ + dt_fwd = x_test[..., 0] - x[ind, 0] + dt_back = x[ind + 1, 0] - x_test[..., 0] + A_fwd = kernel.state_transition(dt_fwd) + A_back = kernel.state_transition(dt_back) + Pinf = kernel.stationary_covariance() + Q_fwd = Pinf - A_fwd @ Pinf @ A_fwd.T + Q_back = Pinf - A_back @ Pinf @ A_back.T + A_back_Q_fwd = A_back @ Q_fwd + Q_mp = Q_back + A_back @ A_back_Q_fwd.T + + jitter = 1e-8 * np.eye(Q_mp.shape[0]) + chol_Q_mp = cho_factor(Q_mp + jitter) + Q_mp_inv_A_back = cho_solve(chol_Q_mp, A_back) # V = Qโ‚‹โ‚Šโปยน Aโ‚œโ‚Š + + # The conditional_covariance T = Qโ‚‹โ‚œ - Qโ‚‹โ‚œAโ‚œโ‚Šแต€Qโ‚‹โ‚ŠโปยนAโ‚œโ‚ŠQโ‚‹โ‚œ == Qโ‚‹โ‚œ - Qโ‚‹โ‚œแต€Aโ‚œโ‚Šแต€Lโปแต€LโปยนAโ‚œโ‚ŠQโ‚‹โ‚œ + T = Q_fwd - A_back_Q_fwd.T @ Q_mp_inv_A_back @ Q_fwd + # W = Qโ‚‹โ‚œAโ‚œโ‚Šแต€Qโ‚‹โ‚Šโปยน + W = Q_fwd @ Q_mp_inv_A_back.T + P = np.concatenate([A_fwd - W @ A_back @ A_fwd, W], axis=-1) + return P, T + + +def sum_natural_params_by_group(carry, inputs): + ind_m, nat1_m, nat2_m = inputs + nat1s, nat2s, count = carry + nat1s = index_add(nat1s, index[ind_m], nat1_m) + nat2s = index_add(nat2s, index[ind_m], nat2_m) + count = index_add(count, index[ind_m], 1.0) + return (nat1s, nat2s, count), 0. + + +def count_indices(carry, inputs): + ind_m = inputs + count = carry + count = index_add(count, index[ind_m], 1.0) + return count, 0. + + +def input_admin(t, y, r): + """ + Order the inputs. + :param t: training inputs [N, 1] + :param y: observations at the training inputs [N, 1] + :param r: training spatial inputs + :return: + t_train: training inputs [N, 1] + y_train: training observations [N, R] + r_train: training spatial inputs [N, R] + dt_train: training step sizes, ฮ”tโ‚™ = tโ‚™ - tโ‚™โ‚‹โ‚ [N, 1] + """ + assert t.shape[0] == y.shape[0] + if t.ndim < 2: + t = nnp.expand_dims(t, 1) # make 2-D + if y.ndim < 2: + y = nnp.expand_dims(y, 1) # make 2-D + if r is None: + if t.shape[1] > 1: + r = t[:, 1:] + t = t[:, :1] + else: + r = nnp.nan * t # np.empty((1,) + x.shape[1:]) * np.nan + if r.ndim < 2: + r = nnp.expand_dims(r, 1) # make 2-D + ind = nnp.argsort(t[:, 0], axis=0) + t_train = t[ind, ...] + y_train = y[ind, ...] + r_train = r[ind, ...] + dt_train = nnp.concatenate([np.array([0.0]), nnp.diff(t_train[:, 0])]) + return (np.array(t_train, dtype=np.float64), np.array(y_train, dtype=np.float64), + np.array(r_train, dtype=np.float64), np.array(dt_train, dtype=np.float64)) + + +def create_spatiotemporal_grid(X, Y): + """ + create a grid of data sized [T, R1, R2] + note that this function removes full duplicates (i.e. where all dimensions match) + TODO: generalise to >5D + """ + if Y.ndim < 2: + Y = Y[:, None] + num_spatial_dims = X.shape[1] - 1 + if num_spatial_dims == 4: + sort_ind = nnp.lexsort((X[:, 4], X[:, 3], X[:, 2], X[:, 1], X[:, 0])) # sort by 0, 1, 2, 4 + elif num_spatial_dims == 3: + sort_ind = nnp.lexsort((X[:, 3], X[:, 2], X[:, 1], X[:, 0])) # sort by 0, 1, 2, 3 + elif num_spatial_dims == 2: + sort_ind = nnp.lexsort((X[:, 2], X[:, 1], X[:, 0])) # sort by 0, 1, 2 + elif num_spatial_dims == 1: + sort_ind = nnp.lexsort((X[:, 1], X[:, 0])) # sort by 0, 1 + else: + raise NotImplementedError + X = X[sort_ind] + Y = Y[sort_ind] + unique_time = np.unique(X[:, 0]) + unique_space = nnp.unique(X[:, 1:], axis=0) + N_t = unique_time.shape[0] + N_r = unique_space.shape[0] + if num_spatial_dims == 4: + R = np.tile(unique_space, [N_t, 1, 1, 1, 1]) + elif num_spatial_dims == 3: + R = np.tile(unique_space, [N_t, 1, 1, 1]) + elif num_spatial_dims == 2: + R = np.tile(unique_space, [N_t, 1, 1]) + elif num_spatial_dims == 1: + R = np.tile(unique_space, [N_t, 1]) + else: + raise NotImplementedError + R_flat = R.reshape(-1, num_spatial_dims) + Y_dummy = np.nan * np.zeros([N_t * N_r, 1]) + time_duplicate = np.tile(unique_time, [N_r, 1]).T.flatten() + X_dummy = np.block([time_duplicate[:, None], R_flat]) + X_all = np.vstack([X, X_dummy]) + Y_all = np.vstack([Y, Y_dummy]) + X_unique, ind = nnp.unique(X_all, axis=0, return_index=True) + Y_unique = Y_all[ind] + grid_shape = (unique_time.shape[0], ) + unique_space.shape + R_grid = X_unique[:, 1:].reshape(grid_shape) + Y_grid = Y_unique.reshape(grid_shape[:-1] + (1, )) + return unique_time[:, None], R_grid, Y_grid + + +def discretegrid(xy, w, nt): + """ + Convert spatial observations to a discrete intensity grid + :param xy: observed spatial locations as a two-column vector + :param w: observation window, i.e. discrete grid to be mapped to, [xmin xmax ymin ymax] + :param nt: two-element vector defining number of bins in both directions + """ + # Make grid + x = nnp.linspace(w[0], w[1], nt[0] + 1) + y = nnp.linspace(w[2], w[3], nt[1] + 1) + X, Y = nnp.meshgrid(x, y) + + # Count points + N = nnp.zeros([nt[1], nt[0]]) + for i in range(nt[0]): + for j in range(nt[1]): + ind = (xy[:, 0] >= x[i]) & (xy[:, 0] < x[i + 1]) & (xy[:, 1] >= y[j]) & (xy[:, 1] < y[j + 1]) + N[j, i] = nnp.sum(ind) + return X[:-1, :-1].T, Y[:-1, :-1].T, N.T + + +def gaussian_log_expected_lik(y, post_mean, post_cov, obs_var): + """ + Calculates the log partition function: + logZโ‚™ = log โˆซ ๐“(yโ‚™|fโ‚™,ฯƒยฒ) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ = E[๐“(yโ‚™|fโ‚™,ฯƒยฒ)] + :param y: data / observation (yโ‚™) + :param post_mean: posterior mean (mโ‚™) + :param post_cov: posterior variance (vโ‚™) + :param obs_var: variance, ฯƒยฒ, of the Gaussian observation model p(yโ‚™|fโ‚™)=๐“(yโ‚™|fโ‚™,ฯƒยฒ) + :return: + lZ: the log partition function, logZโ‚™ [scalar] + """ + post_mean = post_mean.reshape(-1, 1) + post_var = np.diag(post_cov).reshape(-1, 1) + y = y.reshape(-1, 1) + obs_var = np.diag(obs_var).reshape(-1, 1) + + var = obs_var + post_var + prec = 1 / var + # version which computes sum and outputs scalar + # lZ = ( + # -0.5 * y.shape[-2] * np.log(2 * np.pi) + # - 0.5 * np.sum((y - post_mean) * prec * (y - post_mean)) + # - 0.5 * np.sum(np.log(np.maximum(var, 1e-10))) + # ) + # version which computes individual parts and outputs vector + lZ = ( + -0.5 * np.log(2 * np.pi) + - 0.5 * (y - post_mean) * prec * (y - post_mean) + - 0.5 * np.log(np.maximum(var, 1e-10)) + ) + return lZ + + +def log_chol_matrix_det(chol): + val = np.square(np.diag(chol)) + return np.sum(np.log(val)) + + +def mvn_logpdf(x, mean, cov, mask=None): + """ + evaluate a multivariate Gaussian (log) pdf + """ + if mask is not None: + # build a mask for computing the log likelihood of a partially observed multivariate Gaussian + maskv = mask.reshape(-1, 1) + mean = np.where(maskv, x, mean) + cov_masked = np.where(maskv + maskv.T, 0., cov) # ensure masked entries are independent + cov = np.where(np.diag(mask), INV2PI, cov_masked) # ensure masked entries return log like of 0 + + n = mean.shape[0] + cho, low = cho_factor(cov) + log_det = 2 * np.sum(np.log(np.abs(np.diag(cho)))) + diff = x - mean + scaled_diff = cho_solve((cho, low), diff) + distance = diff.T @ scaled_diff + return np.squeeze(-0.5 * (distance + n * LOG2PI + log_det)) + + +def pep_constant(var, power, mask=None): + dim = var.shape[1] + chol = np.linalg.cholesky(var) + log_diag_chol = np.log(np.abs(np.diag(chol))) + + if mask is not None: + log_diag_chol = np.where(mask, 0., log_diag_chol) + dim -= np.sum(np.array(mask, dtype=int)) + + logdetvar = 2 * np.sum(log_diag_chol) + constant = ( + 0.5 * dim * ((1 - power) * LOG2PI - np.log(power)) + + 0.5 * (1 - power) * logdetvar + ) + return constant + + +def mvn_logpdf_and_derivs(x, mean, cov, mask=None): + """ + evaluate a multivariate Gaussian (log) pdf and compute its derivatives w.r.t. the mean + """ + if mask is not None: + # build a mask for computing the log likelihood of a partially observed multivariate Gaussian + maskv = mask.reshape(-1, 1) + mean = np.where(maskv, x, mean) + cov_masked = np.where(maskv + maskv.T, 0., cov) # ensure masked entries are independent + cov = np.where(np.diag(mask), INV2PI, cov_masked) # ensure masked entries return log like of 0 + + n = mean.shape[0] + cho, low = cho_factor(cov) + precision = cho_solve((cho, low), np.eye(cho.shape[1])) # second derivative + log_det = 2 * np.sum(np.log(np.abs(np.diag(cho)))) + diff = x - mean + scaled_diff = precision @ diff # first derivative + distance = diff.T @ scaled_diff + return np.squeeze(-0.5 * (distance + n * LOG2PI + log_det)), scaled_diff, -precision + + +def _gaussian_expected_log_lik(y, post_mean, post_cov, var): + post_mean = post_mean.reshape(-1, 1) + post_cov = post_cov.reshape(-1, 1) + y = y.reshape(-1, 1) + var = var.reshape(-1, 1) + # version which computes sum and outputs scalar + # exp_log_lik = ( + # -0.5 * y.shape[-2] * np.log(2 * np.pi) # multiplier based on dimensions needed if taking sum of other terms + # - 0.5 * np.sum(np.log(var)) + # - 0.5 * np.sum(((y - post_mean) ** 2 + post_cov) / var) + # ) + # version which computes individual parts and outputs vector + exp_log_lik = ( + -0.5 * np.log(2 * np.pi) + - 0.5 * np.log(var) + - 0.5 * ((y - post_mean) ** 2 + post_cov) / var + ) + return exp_log_lik + + +def gaussian_expected_log_lik_diag(y, post_mean, post_cov, var): + """ + Computes the "variational expectation", i.e. the + expected log-likelihood, and its derivatives w.r.t. the posterior mean + E[log ๐“(yโ‚™|fโ‚™,ฯƒยฒ)] = โˆซ log ๐“(yโ‚™|fโ‚™,ฯƒยฒ) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + :param y: data / observation (yโ‚™) + :param post_mean: posterior mean (mโ‚™) + :param post_cov: posterior variance (vโ‚™) + :param var: variance, ฯƒยฒ, of the Gaussian observation model p(yโ‚™|fโ‚™)=๐“(yโ‚™|fโ‚™,ฯƒยฒ) + :return: + exp_log_lik: the expected log likelihood, E[log ๐“(yโ‚™|fโ‚™,var)] [scalar] + """ + post_cov = diag(post_cov) + var = diag(var) + var_exp = vmap(_gaussian_expected_log_lik)(y, post_mean, post_cov, var) + # return np.sum(var_exp) + return var_exp + + +def gaussian_expected_log_lik(Y, q_mu, q_covar, noise, mask=None): + """ + :param Y: N x 1 + :param q_mu: N x 1 + :param q_covar: N x N + :param noise: N x N + :param mask: N x 1 + :return: + E[log ๐“(yโ‚™|fโ‚™,ฯƒยฒ)] = โˆซ log ๐“(yโ‚™|fโ‚™,ฯƒยฒ) ๐“(fโ‚™|mโ‚™,vโ‚™) dfโ‚™ + """ + + if mask is not None: + # build a mask for computing the log likelihood of a partially observed multivariate Gaussian + maskv = mask.reshape(-1, 1) + q_mu = np.where(maskv, Y, q_mu) + noise = np.where(maskv + maskv.T, 0., noise) # ensure masked entries are independent + noise = np.where(np.diag(mask), INV2PI, noise) # ensure masked entries return log like of 0 + q_covar = np.where(maskv + maskv.T, 0., q_covar) # ensure masked entries are independent + q_covar = np.where(np.diag(mask), 1e-20, q_covar) # ensure masked entries return trace term of 0 + + ml = mvn_logpdf(Y, q_mu, noise) + trace_term = -0.5 * np.trace(solve(noise, q_covar)) + return ml + trace_term + + +def compute_cavity(post_mean, post_cov, site_nat1, site_nat2, power, jitter=1e-8): + """ + remove local likelihood approximation from the posterior to obtain the marginal cavity distribution + """ + post_nat2 = inv(post_cov + jitter * np.eye(post_cov.shape[1])) + cav_cov = inv(post_nat2 - power * site_nat2) # cavity covariance + cav_mean = cav_cov @ (post_nat2 @ post_mean - power * site_nat1) # cavity mean + return cav_mean, cav_cov + + +def build_joint(ind, mean, cov, smoother_gain): + """ + joint posterior (i.e. smoothed) mean and covariance of the states [u_, u+] at time t + """ + mean_joint = np.block([[mean[ind]], + [mean[ind + 1]]]) + cross_cov = smoother_gain[ind] @ cov[ind + 1] + cov_joint = np.block([[cov[ind], cross_cov], + [cross_cov.T, cov[ind + 1]]]) + return mean_joint, cov_joint + + +def set_z_stats(t, z): + ind = (np.searchsorted(z.reshape(-1, ), t[:, :1].reshape(-1, )) - 1) + num_neighbours = np.array([np.sum(ind == m) for m in range(z.shape[0] - 1)]) + return ind, num_neighbours + + +def gaussian_first_derivative_wrt_mean(f, m, C, w): + invC = inv(C) + return invC @ (f - m) * w + + +def gaussian_second_derivative_wrt_mean(f, m, C, w): + invC = inv(C) + return (invC @ (f - m) @ (f - m).T @ invC - invC) * w + + +def scaled_squared_euclid_dist(X, X2, ell): + """ + Returns โ€–(X - X2แต€) / โ„“โ€–ยฒ, i.e. the squared Lโ‚‚-norm. + Adapted from GPflow: https://github.com/GPflow/GPflow + """ + return square_distance(X / ell, X2 / ell) + + +def square_distance(X, X2): + """ + Adapted from GPflow: https://github.com/GPflow/GPflow + + Returns ||X - X2แต€||ยฒ + Due to the implementation and floating-point imprecision, the + result may actually be very slightly negative for entries very + close to each other. + + This function can deal with leading dimensions in X and X2. + In the sample case, where X and X2 are both 2 dimensional, + for example, X is [N, D] and X2 is [M, D], then a tensor of shape + [N, M] is returned. If X is [N1, S1, D] and X2 is [N2, S2, D] + then the output will be [N1, S1, N2, S2]. + """ + Xs = np.sum(np.square(X), axis=-1) + X2s = np.sum(np.square(X2), axis=-1) + dist = -2 * np.tensordot(X, X2, [[-1], [-1]]) + dist += broadcasting_elementwise(np.add, Xs, X2s) + return dist + + +def broadcasting_elementwise(op, a, b): + """ + Adapted from GPflow: https://github.com/GPflow/GPflow + + Apply binary operation `op` to every pair in tensors `a` and `b`. + + :param op: binary operator on tensors, e.g. tf.add, tf.substract + :param a: tf.Tensor, shape [n_1, ..., n_a] + :param b: tf.Tensor, shape [m_1, ..., m_b] + :return: tf.Tensor, shape [n_1, ..., n_a, m_1, ..., m_b] + """ + flatres = op(np.reshape(a, [-1, 1]), np.reshape(b, [1, -1])) + return flatres.reshape(a.shape[0], b.shape[0]) + + +def rotation_matrix(dt, omega): + """ + Discrete time rotation matrix + :param dt: step size [1] + :param omega: frequency [1] + :return: + R: rotation matrix [2, 2] + """ + R = np.array([ + [np.cos(omega * dt), -np.sin(omega * dt)], + [np.sin(omega * dt), np.cos(omega * dt)] + ]) + return R + + +def get_meanfield_block_index(kernel): + Pinf = kernel.stationary_covariance_meanfield() + num_latents = Pinf.shape[0] + sub_state_dim = Pinf.shape[1] + state = np.ones([sub_state_dim, sub_state_dim]) + for i in range(1, num_latents): + state = block_diag(state, np.ones([sub_state_dim, sub_state_dim])) + block_index = np.where(np.array(state, dtype=bool)) + return block_index + + +class GradValuesAux(objax.GradValues): + """ + an exact copy of objax.GradValues, but with the output converted to an array and multiplied by a scale which + accounts for the effect of batching + """ + def __init__(self, f: Union[Module, Callable], + variables: Optional[VarCollection], + input_argnums: Optional[Tuple[int, ...]] = None, + scale: Optional[float] = 1.): + self.scale = scale + super().__init__(f=f, + variables=variables, + input_argnums=input_argnums) + + def __call__(self, *args, **kwargs): + """Returns the computed gradients for the first value returned by `f` and the values returned by `f`. + + Returns: + A tuple (gradients , values of f]), where gradients is a list containing + the input gradients, if any, followed by the variable gradients.""" + inputs = [args[i] for i in self.input_argnums] + g, (outputs, changes) = self._call(inputs + self.vc.subset(TrainVar).tensors(), + self.vc.subset(BaseState).tensors(), + list(args), kwargs) + self.vc.assign(changes) + return g, self.scale * np.asarray(outputs[0]), outputs[1:][0] diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..1b5f1a9 --- /dev/null +++ b/setup.py @@ -0,0 +1,12 @@ +from setuptools import setup, find_packages + +__version__ = "0.0.0" + +setup( + name='newt', + version=__version__, + packages=find_packages(exclude=["examples"]), + python_requires='>=3.6', + url='https://github.com/AaltoML/Newt', + license='Apache-2.0', +)