From 3ee1633207417a3e7d1c8f699bb49e15662923d5 Mon Sep 17 00:00:00 2001 From: Ole Streicher Date: Wed, 24 Sep 2014 13:49:35 +0100 Subject: [PATCH] Import python-astropy_0.4.2.orig.tar.gz [dgit import orig python-astropy_0.4.2.orig.tar.gz] --- CHANGES.rst | 2947 ++ PKG-INFO | 29 + README.rst | 45 + ah_bootstrap.py | 748 + astropy/__init__.py | 270 + astropy/astropy.cfg | 157 + astropy/config/__init__.py | 13 + astropy/config/affiliated.py | 9 + astropy/config/configuration.py | 1067 + astropy/config/paths.py | 185 + astropy/config/setup_package.py | 11 + astropy/config/tests/__init__.py | 2 + astropy/config/tests/data/alias.cfg | 2 + astropy/config/tests/data/astropy.0.3.cfg | 149 + .../config/tests/data/astropy.0.3.windows.cfg | 149 + astropy/config/tests/data/deprecated.cfg | 2 + astropy/config/tests/data/empty.cfg | 15 + astropy/config/tests/data/not_empty.cfg | 15 + astropy/config/tests/test_configs.py | 334 + astropy/conftest.py | 7 + astropy/constants/__init__.py | 54 + astropy/constants/cgs.py | 28 + astropy/constants/constant.py | 220 + astropy/constants/setup_package.py | 5 + astropy/constants/si.py | 154 + astropy/constants/tests/__init__.py | 2 + astropy/constants/tests/test_constant.py | 127 + astropy/convolution/__init__.py | 15 + astropy/convolution/boundary_extend.c | 8608 +++++ astropy/convolution/boundary_extend.pyx | 264 + astropy/convolution/boundary_fill.c | 8737 +++++ astropy/convolution/boundary_fill.pyx | 267 + astropy/convolution/boundary_none.c | 8291 +++++ astropy/convolution/boundary_none.pyx | 233 + astropy/convolution/boundary_wrap.c | 8608 +++++ astropy/convolution/boundary_wrap.pyx | 261 + astropy/convolution/convolve.py | 565 + astropy/convolution/core.py | 381 + astropy/convolution/kernels.py | 949 + astropy/convolution/setup_package.py | 5 + astropy/convolution/tests/__init__.py | 0 astropy/convolution/tests/test_convolve.py | 487 + .../convolution/tests/test_convolve_fft.py | 471 + .../tests/test_convolve_kernels.py | 124 + .../convolution/tests/test_convolve_speeds.py | 184 + astropy/convolution/tests/test_discretize.py | 106 + .../convolution/tests/test_kernel_class.py | 424 + astropy/convolution/utils.py | 275 + astropy/coordinates/__init__.py | 23 + astropy/coordinates/angle_lextab.py | 14 + astropy/coordinates/angle_parsetab.py | 65 + astropy/coordinates/angle_utilities.py | 684 + astropy/coordinates/angles.py | 768 + astropy/coordinates/baseframe.py | 1003 + astropy/coordinates/builtin_frames.py | 619 + astropy/coordinates/distances.py | 473 + astropy/coordinates/earth.py | 282 + astropy/coordinates/earth_orientation.py | 418 + astropy/coordinates/errors.py | 165 + astropy/coordinates/matching.py | 189 + astropy/coordinates/name_resolve.py | 193 + astropy/coordinates/representation.py | 707 + astropy/coordinates/setup_package.py | 9 + astropy/coordinates/sky_coordinate.py | 986 + astropy/coordinates/tests/__init__.py | 2 + .../coordinates/tests/accuracy/__init__.py | 4 + .../tests/accuracy/fk4_no_e_fk4.csv | 202 + .../tests/accuracy/fk4_no_e_fk5.csv | 202 + .../tests/accuracy/galactic_fk4.csv | 202 + .../tests/accuracy/generate_ref_ast.py | 255 + .../coordinates/tests/accuracy/icrs_fk5.csv | 202 + .../tests/accuracy/test_fk4_no_e_fk4.py | 63 + .../tests/accuracy/test_fk4_no_e_fk5.py | 65 + .../tests/accuracy/test_galactic_fk4.py | 61 + .../tests/accuracy/test_icrs_fk5.py | 61 + astropy/coordinates/tests/test_angles.py | 817 + .../tests/test_angular_separation.py | 108 + astropy/coordinates/tests/test_api_ape5.py | 446 + astropy/coordinates/tests/test_arrays.py | 291 + astropy/coordinates/tests/test_distance.py | 258 + astropy/coordinates/tests/test_earth.py | 252 + astropy/coordinates/tests/test_formatting.py | 136 + astropy/coordinates/tests/test_frames.py | 497 + astropy/coordinates/tests/test_matching.py | 116 + .../coordinates/tests/test_name_resolve.py | 160 + astropy/coordinates/tests/test_pickle.py | 15 + .../coordinates/tests/test_representation.py | 933 + astropy/coordinates/tests/test_sky_coord.py | 710 + .../coordinates/tests/test_transformations.py | 279 + astropy/coordinates/transformations.py | 924 + astropy/cosmology/__init__.py | 13 + astropy/cosmology/core.py | 2462 ++ astropy/cosmology/funcs.py | 417 + astropy/cosmology/parameters.py | 130 + astropy/cosmology/setup_package.py | 5 + astropy/cosmology/tests/__init__.py | 0 astropy/cosmology/tests/test_cosmology.py | 1086 + astropy/cython_version.py | 2 + astropy/extern/__init__.py | 10 + astropy/extern/bundled/__init__.py | 0 astropy/extern/bundled/six.py | 632 + astropy/extern/configobj.py | 17 + astropy/extern/configobj_py2/__init__.py | 100 + astropy/extern/configobj_py2/configobj.py | 2468 ++ astropy/extern/configobj_py2/validate.py | 1450 + astropy/extern/configobj_py3/__init__.py | 104 + astropy/extern/configobj_py3/configobj.py | 2405 ++ astropy/extern/configobj_py3/validate.py | 1419 + astropy/extern/js/jquery-1.11.0.js | 10337 ++++++ astropy/extern/js/jquery.dataTables.js | 12099 +++++++ astropy/extern/ply/__init__.py | 4 + astropy/extern/ply/cpp.py | 898 + astropy/extern/ply/ctokens.py | 133 + astropy/extern/ply/lex.py | 1052 + astropy/extern/ply/yacc.py | 3271 ++ astropy/extern/pytest.py | 3211 ++ astropy/extern/setup_package.py | 13 + astropy/extern/six.py | 56 + astropy/io/__init__.py | 5 + astropy/io/ascii/__init__.py | 37 + astropy/io/ascii/basic.py | 280 + astropy/io/ascii/cds.py | 325 + astropy/io/ascii/connect.py | 70 + astropy/io/ascii/core.py | 1137 + astropy/io/ascii/daophot.py | 238 + astropy/io/ascii/fixedwidth.py | 368 + astropy/io/ascii/html.py | 349 + astropy/io/ascii/ipac.py | 462 + astropy/io/ascii/latex.py | 397 + astropy/io/ascii/setup_package.py | 70 + astropy/io/ascii/sextractor.py | 128 + astropy/io/ascii/tests/__init__.py | 0 astropy/io/ascii/tests/common.py | 113 + astropy/io/ascii/tests/t/apostrophe.rdb | 6 + astropy/io/ascii/tests/t/apostrophe.tab | 3 + astropy/io/ascii/tests/t/bad.txt | 6 + astropy/io/ascii/tests/t/bars_at_ends.txt | 4 + astropy/io/ascii/tests/t/cds.dat | 38 + astropy/io/ascii/tests/t/cds/glob/ReadMe | 572 + .../io/ascii/tests/t/cds/glob/lmxbrefs.dat | 291 + astropy/io/ascii/tests/t/cds/multi/ReadMe | 64 + .../io/ascii/tests/t/cds/multi/lhs2065.dat | 18 + .../io/ascii/tests/t/cds/multi/lp944-20.dat | 18 + astropy/io/ascii/tests/t/cds2.dat | 287 + astropy/io/ascii/tests/t/cds_malformed.dat | 37 + astropy/io/ascii/tests/t/commented_header.dat | 4 + .../io/ascii/tests/t/commented_header2.dat | 5 + astropy/io/ascii/tests/t/continuation.dat | 4 + astropy/io/ascii/tests/t/daophot.dat | 45 + astropy/io/ascii/tests/t/daophot.dat.gz | Bin 0 -> 793 bytes astropy/io/ascii/tests/t/daophot2.dat | 31 + astropy/io/ascii/tests/t/daophot3.dat | 120 + astropy/io/ascii/tests/t/fill_values.txt | 3 + astropy/io/ascii/tests/t/html.html | 29 + astropy/io/ascii/tests/t/html2.html | 28 + astropy/io/ascii/tests/t/ipac.dat | 12 + astropy/io/ascii/tests/t/ipac.dat.bz2 | Bin 0 -> 385 bytes astropy/io/ascii/tests/t/latex1.tex | 10 + astropy/io/ascii/tests/t/latex1.tex.gz | Bin 0 -> 198 bytes astropy/io/ascii/tests/t/latex2.tex | 14 + astropy/io/ascii/tests/t/nls1_stackinfo.dbout | 60 + astropy/io/ascii/tests/t/no_data_cds.dat | 37 + astropy/io/ascii/tests/t/no_data_daophot.dat | 7 + astropy/io/ascii/tests/t/no_data_ipac.dat | 10 + .../io/ascii/tests/t/no_data_sextractor.dat | 5 + .../io/ascii/tests/t/no_data_with_header.dat | 1 + .../ascii/tests/t/no_data_without_header.dat | 2 + astropy/io/ascii/tests/t/sextractor.dat | 8 + astropy/io/ascii/tests/t/sextractor2.dat | 14 + astropy/io/ascii/tests/t/short.rdb | 14 + astropy/io/ascii/tests/t/short.rdb.bz2 | Bin 0 -> 146 bytes astropy/io/ascii/tests/t/short.rdb.gz | Bin 0 -> 148 bytes astropy/io/ascii/tests/t/short.tab | 8 + astropy/io/ascii/tests/t/simple.txt | 4 + astropy/io/ascii/tests/t/simple2.txt | 4 + astropy/io/ascii/tests/t/simple3.txt | 3 + astropy/io/ascii/tests/t/simple4.txt | 3 + astropy/io/ascii/tests/t/simple5.txt | 4 + astropy/io/ascii/tests/t/simple_csv.csv | 3 + .../io/ascii/tests/t/simple_csv_missing.csv | 3 + .../ascii/tests/t/space_delim_blank_lines.txt | 8 + .../ascii/tests/t/space_delim_no_header.dat | 2 + .../io/ascii/tests/t/space_delim_no_names.dat | 2 + astropy/io/ascii/tests/t/test4.dat | 12 + astropy/io/ascii/tests/t/test5.dat | 22 + astropy/io/ascii/tests/t/vizier/ReadMe | 89 + astropy/io/ascii/tests/t/vizier/table1.dat | 15 + astropy/io/ascii/tests/t/vizier/table5.dat | 49 + astropy/io/ascii/tests/t/vots_spec.dat | 99 + astropy/io/ascii/tests/t/whitespace.dat | 3 + .../tests/test_cds_header_from_readme.py | 139 + astropy/io/ascii/tests/test_compressed.py | 25 + astropy/io/ascii/tests/test_connect.py | 132 + astropy/io/ascii/tests/test_fixedwidth.py | 422 + astropy/io/ascii/tests/test_html.py | 461 + .../io/ascii/tests/test_ipac_definitions.py | 134 + astropy/io/ascii/tests/test_read.py | 815 + astropy/io/ascii/tests/test_types.py | 58 + astropy/io/ascii/tests/test_write.py | 432 + astropy/io/ascii/ui.py | 288 + astropy/io/fits/__init__.py | 89 + astropy/io/fits/card.py | 1551 + astropy/io/fits/column.py | 1992 ++ astropy/io/fits/connect.py | 298 + astropy/io/fits/convenience.py | 810 + astropy/io/fits/diff.py | 1262 + astropy/io/fits/file.py | 543 + astropy/io/fits/fitsrec.py | 1092 + astropy/io/fits/hdu/__init__.py | 14 + astropy/io/fits/hdu/base.py | 1778 + astropy/io/fits/hdu/compressed.py | 1925 ++ astropy/io/fits/hdu/groups.py | 586 + astropy/io/fits/hdu/hdulist.py | 1073 + astropy/io/fits/hdu/image.py | 1044 + astropy/io/fits/hdu/nonstandard.py | 125 + astropy/io/fits/hdu/streaming.py | 232 + astropy/io/fits/hdu/table.py | 1327 + astropy/io/fits/header.py | 2321 ++ astropy/io/fits/py3compat.py | 152 + astropy/io/fits/scripts/__init__.py | 12 + astropy/io/fits/scripts/fitscheck.py | 220 + astropy/io/fits/scripts/fitsdiff.py | 271 + astropy/io/fits/scripts/fitsheader.py | 159 + astropy/io/fits/setup_package.py | 58 + astropy/io/fits/src/compressionmodule.c | 1162 + astropy/io/fits/src/compressionmodule.h | 67 + astropy/io/fits/tests/__init__.py | 63 + astropy/io/fits/tests/cfitsio_verify.c | 74 + astropy/io/fits/tests/data/arange.fits | Bin 0 -> 8640 bytes astropy/io/fits/tests/data/ascii.fits | 1 + astropy/io/fits/tests/data/blank.fits | Bin 0 -> 5760 bytes astropy/io/fits/tests/data/checksum.fits | 1 + astropy/io/fits/tests/data/comp.fits | 405 + astropy/io/fits/tests/data/fixed-1890.fits | 1 + astropy/io/fits/tests/data/o4sp040b0_raw.fits | 1 + astropy/io/fits/tests/data/random_groups.fits | 8 + astropy/io/fits/tests/data/scale.fits | Bin 0 -> 8640 bytes astropy/io/fits/tests/data/stddata.fits | Bin 0 -> 23040 bytes astropy/io/fits/tests/data/table.fits | Bin 0 -> 8640 bytes astropy/io/fits/tests/data/tb.fits | Bin 0 -> 8640 bytes astropy/io/fits/tests/data/test0.fits | 1 + astropy/io/fits/tests/data/zerowidth.fits | 27 + astropy/io/fits/tests/test_checksum.py | 413 + astropy/io/fits/tests/test_connect.py | 271 + astropy/io/fits/tests/test_core.py | 1061 + astropy/io/fits/tests/test_diff.py | 597 + astropy/io/fits/tests/test_division.py | 42 + astropy/io/fits/tests/test_groups.py | 199 + astropy/io/fits/tests/test_hdulist.py | 766 + astropy/io/fits/tests/test_header.py | 2602 ++ astropy/io/fits/tests/test_image.py | 1390 + astropy/io/fits/tests/test_nonstandard.py | 66 + astropy/io/fits/tests/test_structured.py | 101 + astropy/io/fits/tests/test_table.py | 2462 ++ astropy/io/fits/tests/test_uint.py | 114 + astropy/io/fits/tests/test_util.py | 28 + astropy/io/fits/tests/util.py | 45 + astropy/io/fits/util.py | 865 + astropy/io/fits/verify.py | 174 + astropy/io/misc/__init__.py | 7 + astropy/io/misc/connect.py | 15 + astropy/io/misc/hdf5.py | 251 + astropy/io/misc/pickle_helpers.py | 127 + astropy/io/misc/tests/__init__.py | 1 + astropy/io/misc/tests/test_hdf5.py | 415 + astropy/io/misc/tests/test_pickle_helpers.py | 104 + astropy/io/registry.py | 418 + astropy/io/setup_package.py | 4 + astropy/io/tests/__init__.py | 0 astropy/io/tests/test_registry.py | 325 + astropy/io/votable/__init__.py | 32 + astropy/io/votable/connect.py | 158 + astropy/io/votable/converters.py | 1439 + astropy/io/votable/data/VOTable.dtd | 158 + astropy/io/votable/data/VOTable.v1.1.xsd | 466 + astropy/io/votable/data/VOTable.v1.2.xsd | 558 + astropy/io/votable/data/VOTable.v1.3.xsd | 572 + astropy/io/votable/data/ucd1p-words.txt | 473 + astropy/io/votable/exceptions.py | 1442 + astropy/io/votable/setup_package.py | 29 + astropy/io/votable/src/tablewriter.c | 404 + astropy/io/votable/table.py | 383 + astropy/io/votable/tests/__init__.py | 2 + astropy/io/votable/tests/converter_test.py | 264 + .../io/votable/tests/data/irsa-nph-error.xml | 5 + .../io/votable/tests/data/irsa-nph-m31.xml | 70 + astropy/io/votable/tests/data/names.xml | 212 + astropy/io/votable/tests/data/no_resource.txt | 5 + astropy/io/votable/tests/data/no_resource.xml | 6 + .../votable/tests/data/nonstandard_units.xml | 13 + .../regression.bin.tabledata.truth.1.1.xml | 306 + .../regression.bin.tabledata.truth.1.3.xml | 309 + astropy/io/votable/tests/data/regression.xml | 305 + astropy/io/votable/tests/data/tb.fits | Bin 0 -> 8640 bytes .../tests/data/too_many_columns.xml.gz | Bin 0 -> 1734 bytes astropy/io/votable/tests/data/validation.txt | 237 + astropy/io/votable/tests/exception_test.py | 49 + astropy/io/votable/tests/table_test.py | 145 + astropy/io/votable/tests/tree_test.py | 30 + astropy/io/votable/tests/ucd_test.py | 62 + astropy/io/votable/tests/util_test.py | 68 + astropy/io/votable/tests/vo_test.py | 994 + astropy/io/votable/tree.py | 3580 ++ astropy/io/votable/ucd.py | 193 + astropy/io/votable/util.py | 217 + astropy/io/votable/validator/__init__.py | 6 + astropy/io/votable/validator/html.py | 310 + astropy/io/votable/validator/main.py | 164 + astropy/io/votable/validator/result.py | 360 + .../io/votable/validator/urls/cone.big.dat.gz | Bin 0 -> 168333 bytes .../votable/validator/urls/cone.broken.dat.gz | Bin 0 -> 350 bytes .../votable/validator/urls/cone.good.dat.gz | Bin 0 -> 168334 bytes .../validator/urls/cone.incorrect.dat.gz | Bin 0 -> 721 bytes astropy/io/votable/voexceptions.py | 11 + astropy/io/votable/volint.py | 18 + astropy/io/votable/xmlutil.py | 129 + astropy/logger.py | 631 + astropy/modeling/__init__.py | 13 + astropy/modeling/core.py | 1425 + astropy/modeling/fitting.py | 949 + astropy/modeling/functional_models.py | 1430 + astropy/modeling/models.py | 63 + astropy/modeling/optimizers.py | 237 + astropy/modeling/parameters.py | 575 + astropy/modeling/polynomial.py | 1192 + astropy/modeling/powerlaws.py | 257 + astropy/modeling/projections.py | 604 + astropy/modeling/rotations.py | 199 + astropy/modeling/setup_package.py | 12 + astropy/modeling/statistic.py | 44 + astropy/modeling/tests/__init__.py | 0 astropy/modeling/tests/data/1904-66_AZP.fits | 353 + astropy/modeling/tests/data/__init__.py | 5 + astropy/modeling/tests/data/hst_sip.hdr | 42 + astropy/modeling/tests/data/idcompspec.fits | 36 + astropy/modeling/tests/data/irac_sip.hdr | 241 + astropy/modeling/tests/example_models.py | 292 + astropy/modeling/tests/irafutil.py | 265 + astropy/modeling/tests/test_constraints.py | 406 + astropy/modeling/tests/test_core.py | 69 + astropy/modeling/tests/test_fitters.py | 379 + .../modeling/tests/test_functional_models.py | 107 + astropy/modeling/tests/test_input.py | 682 + astropy/modeling/tests/test_models.py | 524 + astropy/modeling/tests/test_parameters.py | 608 + astropy/modeling/tests/test_polynomial.py | 170 + astropy/modeling/tests/test_projections.py | 187 + astropy/modeling/tests/test_rotations.py | 54 + astropy/modeling/tests/utils.py | 20 + astropy/modeling/utils.py | 117 + astropy/nddata/__init__.py | 37 + astropy/nddata/flag_collection.py | 45 + astropy/nddata/nddata.py | 619 + astropy/nddata/nduncertainty.py | 351 + astropy/nddata/setup_package.py | 2 + astropy/nddata/tests/__init__.py | 0 astropy/nddata/tests/test_flag_collection.py | 49 + astropy/nddata/tests/test_nddata.py | 660 + astropy/setup_helpers.py | 1535 + astropy/setup_package.py | 4 + astropy/sphinx/__init__.py | 10 + astropy/sphinx/conf.py | 311 + astropy/sphinx/ext/__init__.py | 13 + astropy/sphinx/ext/astropyautosummary.py | 96 + astropy/sphinx/ext/automodapi.py | 353 + astropy/sphinx/ext/automodsumm.py | 579 + astropy/sphinx/ext/changelog_links.py | 67 + astropy/sphinx/ext/comment_eater.py | 162 + astropy/sphinx/ext/compiler_unparse.py | 864 + astropy/sphinx/ext/docscrape.py | 512 + astropy/sphinx/ext/docscrape_sphinx.py | 231 + astropy/sphinx/ext/doctest.py | 37 + astropy/sphinx/ext/edit_on_github.py | 169 + astropy/sphinx/ext/numpydoc.py | 173 + astropy/sphinx/ext/phantom_import.py | 166 + astropy/sphinx/ext/smart_resolver.py | 73 + .../ext/templates/autosummary_core/base.rst | 16 + .../ext/templates/autosummary_core/class.rst | 71 + .../ext/templates/autosummary_core/module.rst | 47 + astropy/sphinx/ext/tests/__init__.py | 4 + astropy/sphinx/ext/tests/test_automodapi.py | 300 + astropy/sphinx/ext/tests/test_automodsumm.py | 77 + astropy/sphinx/ext/tocdepthfix.py | 22 + astropy/sphinx/ext/traitsdoc.py | 144 + astropy/sphinx/ext/viewcode.py | 216 + astropy/sphinx/setup_package.py | 13 + .../sphinx/themes/bootstrap-astropy/README.md | 4 + .../themes/bootstrap-astropy/globaltoc.html | 3 + .../themes/bootstrap-astropy/layout.html | 94 + .../themes/bootstrap-astropy/localtoc.html | 3 + .../themes/bootstrap-astropy/searchbox.html | 7 + .../static/astropy_linkout_20.png | Bin 0 -> 1725 bytes .../bootstrap-astropy/static/astropy_logo.ico | Bin 0 -> 1150 bytes .../static/astropy_logo_32.png | Bin 0 -> 1884 bytes .../static/bootstrap-astropy.css | 585 + .../bootstrap-astropy/static/sidebar.js | 160 + .../themes/bootstrap-astropy/theme.conf | 10 + astropy/stats/__init__.py | 14 + astropy/stats/funcs.py | 862 + astropy/stats/setup_package.py | 2 + astropy/stats/tests/__init__.py | 0 astropy/stats/tests/test_funcs.py | 297 + astropy/table/__init__.py | 31 + astropy/table/_np_utils.c | 7096 ++++ astropy/table/_np_utils.pyx | 134 + astropy/table/column.py | 826 + astropy/table/data/demo_page.css | 96 + astropy/table/data/demo_table.css | 378 + astropy/table/data/jquery-ui.css | 1177 + astropy/table/groups.py | 377 + astropy/table/jsviewer.py | 144 + astropy/table/np_utils.py | 580 + astropy/table/operations.py | 298 + astropy/table/pprint.py | 520 + astropy/table/row.py | 150 + astropy/table/setup_package.py | 26 + astropy/table/table.py | 1859 ++ astropy/table/tests/__init__.py | 0 astropy/table/tests/conftest.py | 126 + astropy/table/tests/notebook_repr_html.ipynb | 233 + astropy/table/tests/test_column.py | 323 + astropy/table/tests/test_groups.py | 595 + astropy/table/tests/test_init_table.py | 435 + astropy/table/tests/test_item_access.py | 250 + astropy/table/tests/test_masked.py | 378 + astropy/table/tests/test_np_utils.py | 46 + astropy/table/tests/test_operations.py | 725 + astropy/table/tests/test_pickle.py | 64 + astropy/table/tests/test_pprint.py | 481 + astropy/table/tests/test_row.py | 150 + astropy/table/tests/test_subclass.py | 61 + astropy/table/tests/test_table.py | 1313 + astropy/tests/__init__.py | 11 + astropy/tests/coveragerc | 31 + astropy/tests/disable_internet.py | 129 + astropy/tests/helper.py | 726 + astropy/tests/output_checker.py | 166 + astropy/tests/pytest_plugins.py | 682 + astropy/tests/setup_package.py | 11 + astropy/tests/test_logger.py | 458 + astropy/tests/tests/__init__.py | 2 + .../tests/tests/data/open_file_detection.txt | 1 + astropy/tests/tests/run_after_2to3.py | 12 + astropy/tests/tests/test_imports.py | 77 + .../tests/tests/test_open_file_detection.py | 18 + astropy/tests/tests/test_run_tests.py | 80 + astropy/tests/tests/test_skip_remote_data.py | 25 + astropy/tests/tests/test_socketblocker.py | 54 + astropy/time/__init__.py | 2 + astropy/time/core.py | 2225 ++ astropy/time/erfa_time.c | 19679 +++++++++++ astropy/time/erfa_time.pyx | 2056 ++ astropy/time/setup_package.py | 36 + astropy/time/tests/__init__.py | 0 astropy/time/tests/test_basic.py | 682 + astropy/time/tests/test_comparisons.py | 71 + astropy/time/tests/test_delta.py | 406 + astropy/time/tests/test_guess.py | 29 + astropy/time/tests/test_pickle.py | 27 + astropy/time/tests/test_precision.py | 112 + .../time/tests/test_quantity_interaction.py | 150 + astropy/time/tests/test_sidereal.py | 167 + astropy/time/tests/test_ut1.py | 79 + astropy/units/__init__.py | 33 + astropy/units/astrophys.py | 171 + astropy/units/cds.py | 191 + astropy/units/cgs.py | 135 + astropy/units/core.py | 2239 ++ astropy/units/equivalencies.py | 479 + astropy/units/format/__init__.py | 58 + astropy/units/format/base.py | 31 + astropy/units/format/cds.py | 342 + astropy/units/format/cds_lextab.py | 9 + astropy/units/format/cds_parsetab.py | 54 + astropy/units/format/console.py | 100 + astropy/units/format/fits.py | 143 + astropy/units/format/generic.py | 445 + astropy/units/format/generic_lextab.py | 9 + astropy/units/format/generic_parsetab.py | 77 + astropy/units/format/latex.py | 89 + astropy/units/format/ogip.py | 448 + astropy/units/format/ogip_lextab.py | 12 + astropy/units/format/ogip_parsetab.py | 72 + astropy/units/format/unicode_format.py | 73 + astropy/units/format/utils.py | 128 + astropy/units/format/vounit.py | 149 + astropy/units/imperial.py | 137 + astropy/units/physical.py | 129 + astropy/units/quantity.py | 1271 + astropy/units/quantity_helper.py | 309 + astropy/units/setup_package.py | 2 + astropy/units/si.py | 234 + astropy/units/tests/__init__.py | 0 astropy/units/tests/test_equivalencies.py | 524 + astropy/units/tests/test_format.py | 356 + astropy/units/tests/test_physical.py | 60 + astropy/units/tests/test_quantity.py | 1024 + .../tests/test_quantity_array_methods.py | 487 + .../units/tests/test_quantity_non_ufuncs.py | 25 + astropy/units/tests/test_quantity_ufuncs.py | 642 + astropy/units/tests/test_units.py | 631 + astropy/units/utils.py | 216 + astropy/utils/__init__.py | 16 + astropy/utils/collections.py | 52 + astropy/utils/compat/__init__.py | 14 + astropy/utils/compat/_argparse/__init__.py | 2363 ++ .../utils/compat/_fractions_py2/__init__.py | 605 + astropy/utils/compat/_gzip_py2/__init__.py | 509 + astropy/utils/compat/_gzip_py3/__init__.py | 625 + astropy/utils/compat/_odict_py2/__init__.py | 190 + .../utils/compat/_subprocess_py2/__init__.py | 38 + astropy/utils/compat/argparse.py | 10 + astropy/utils/compat/fractions.py | 18 + astropy/utils/compat/futures/__init__.py | 21 + astropy/utils/compat/futures/_base.py | 636 + astropy/utils/compat/futures/process.py | 388 + astropy/utils/compat/futures/thread.py | 153 + astropy/utils/compat/gzip.py | 21 + astropy/utils/compat/misc.py | 196 + astropy/utils/compat/numpycompat.py | 41 + astropy/utils/compat/odict.py | 7 + astropy/utils/compat/subprocess.py | 19 + astropy/utils/console.py | 1039 + astropy/utils/data.py | 1251 + astropy/utils/exceptions.py | 46 + astropy/utils/iers/__init__.py | 4 + astropy/utils/iers/data/ReadMe.eopc04_IAU2000 | 41 + astropy/utils/iers/data/ReadMe.finals2000A | 57 + astropy/utils/iers/data/eopc04_IAU2000.62-now | 19100 +++++++++++ astropy/utils/iers/iers.py | 352 + astropy/utils/iers/tests/__init__.py | 2 + astropy/utils/iers/tests/test_iers.py | 76 + astropy/utils/metadata.py | 133 + astropy/utils/misc.py | 1041 + astropy/utils/release.py | 140 + astropy/utils/setup_package.py | 35 + astropy/utils/src/compiler.c | 129 + astropy/utils/state.py | 186 + astropy/utils/tests/__init__.py | 2 + astropy/utils/tests/data/.hidden_file.txt | 1 + astropy/utils/tests/data/alias.cfg | 2 + astropy/utils/tests/data/invalid.dat.bz2 | 1 + astropy/utils/tests/data/invalid.dat.gz | 1 + astropy/utils/tests/data/local.dat | 2 + astropy/utils/tests/data/local.dat.bz2 | Bin 0 -> 96 bytes astropy/utils/tests/data/local.dat.gz | Bin 0 -> 94 bytes .../utils/tests/data/test_package/__init__.py | 4 + .../tests/data/test_package/data/foo.txt | 0 astropy/utils/tests/data/unicode.txt | 2 + astropy/utils/tests/odict_mapping.py | 311 + astropy/utils/tests/odict_support.py | 95 + astropy/utils/tests/test_collections.py | 35 + astropy/utils/tests/test_compat.py | 2 + astropy/utils/tests/test_console.py | 204 + astropy/utils/tests/test_data.py | 326 + astropy/utils/tests/test_gzip.py | 22 + astropy/utils/tests/test_metadata.py | 73 + astropy/utils/tests/test_misc.py | 208 + astropy/utils/tests/test_odict.py | 280 + astropy/utils/tests/test_state.py | 71 + astropy/utils/tests/test_timer.py | 91 + astropy/utils/tests/test_xml.py | 76 + astropy/utils/timer.py | 377 + astropy/utils/xml/__init__.py | 2 + astropy/utils/xml/check.py | 77 + astropy/utils/xml/iterparser.py | 223 + astropy/utils/xml/setup_package.py | 44 + astropy/utils/xml/src/expat_config.h | 93 + astropy/utils/xml/src/iterparse.c | 1357 + astropy/utils/xml/src/iterparse.map | 7 + astropy/utils/xml/unescaper.py | 58 + astropy/utils/xml/validate.py | 58 + astropy/utils/xml/writer.py | 283 + astropy/version.py | 169 + astropy/version_helpers.py | 258 + astropy/vo/__init__.py | 23 + astropy/vo/client/__init__.py | 0 astropy/vo/client/async.py | 83 + astropy/vo/client/conesearch.py | 510 + astropy/vo/client/exceptions.py | 42 + astropy/vo/client/setup_package.py | 10 + astropy/vo/client/tests/__init__.py | 0 astropy/vo/client/tests/data/basic.json | 10 + .../client/tests/data/conesearch_error1.xml | 6 + .../client/tests/data/conesearch_error2.xml | 8 + .../client/tests/data/conesearch_error3.xml | 9 + .../client/tests/data/conesearch_error4.xml | 9 + astropy/vo/client/tests/test_conesearch.py | 250 + astropy/vo/client/tests/test_vos_catalog.py | 213 + astropy/vo/client/vos_catalog.py | 913 + astropy/vo/samp/__init__.py | 34 + astropy/vo/samp/client.py | 772 + astropy/vo/samp/constants.py | 35 + astropy/vo/samp/data/astropy_icon.png | Bin 0 -> 1434 bytes astropy/vo/samp/data/clientaccesspolicy.xml | 13 + astropy/vo/samp/data/crossdomain.xml | 7 + astropy/vo/samp/errors.py | 37 + astropy/vo/samp/hub.py | 1520 + astropy/vo/samp/hub_proxy.py | 257 + astropy/vo/samp/hub_script.py | 222 + astropy/vo/samp/integrated_client.py | 549 + astropy/vo/samp/lockfile_helpers.py | 282 + astropy/vo/samp/setup_package.py | 14 + astropy/vo/samp/ssl_utils.py | 160 + astropy/vo/samp/standard_profile.py | 248 + astropy/vo/samp/tests/__init__.py | 0 astropy/vo/samp/tests/data/README.md | 54 + astropy/vo/samp/tests/data/test1.crt | 10 + astropy/vo/samp/tests/data/test1.key | 9 + astropy/vo/samp/tests/data/test2.crt | 10 + astropy/vo/samp/tests/data/test2.key | 9 + astropy/vo/samp/tests/test_client.py | 27 + astropy/vo/samp/tests/test_errors.py | 24 + astropy/vo/samp/tests/test_helpers.py | 70 + astropy/vo/samp/tests/test_hub.py | 23 + astropy/vo/samp/tests/test_hub_proxy.py | 51 + astropy/vo/samp/tests/test_hub_script.py | 24 + .../vo/samp/tests/test_standard_profile.py | 290 + astropy/vo/samp/tests/test_web_profile.py | 96 + .../vo/samp/tests/web_profile_test_helpers.py | 264 + astropy/vo/samp/utils.py | 168 + astropy/vo/samp/web_profile.py | 184 + astropy/vo/validator/__init__.py | 35 + astropy/vo/validator/data/conesearch_urls.txt | 30 + astropy/vo/validator/exceptions.py | 21 + astropy/vo/validator/inspect.py | 189 + astropy/vo/validator/setup_package.py | 12 + astropy/vo/validator/tests/__init__.py | 0 .../tests/data/conesearch_error.json | 4 + .../tests/data/conesearch_exception.json | 4 + .../validator/tests/data/conesearch_good.json | 91 + .../tests/data/conesearch_good_subset.json | 47 + .../validator/tests/data/conesearch_warn.json | 4 + astropy/vo/validator/tests/data/listcats1.out | 10 + astropy/vo/validator/tests/data/listcats2.out | 5 + astropy/vo/validator/tests/data/printcat.out | 44 + astropy/vo/validator/tests/data/tally.out | 5 + .../vao_conesearch_sites_121107_subset.xml | 92 + astropy/vo/validator/tests/test_inpect.py | 75 + astropy/vo/validator/tests/test_validate.py | 97 + astropy/vo/validator/tstquery.py | 74 + astropy/vo/validator/validate.py | 351 + astropy/wcs/__init__.py | 39 + astropy/wcs/_docutil.py | 61 + astropy/wcs/docstrings.py | 2267 ++ astropy/wcs/include/astropy_wcs/astropy_wcs.h | 22 + .../wcs/include/astropy_wcs/astropy_wcs_api.h | 122 + astropy/wcs/include/astropy_wcs/distortion.h | 108 + .../wcs/include/astropy_wcs/distortion_wrap.h | 24 + astropy/wcs/include/astropy_wcs/docstrings.h | 153 + astropy/wcs/include/astropy_wcs/isnan.h | 42 + astropy/wcs/include/astropy_wcs/pipeline.h | 105 + astropy/wcs/include/astropy_wcs/pyutil.h | 361 + astropy/wcs/include/astropy_wcs/sip.h | 170 + astropy/wcs/include/astropy_wcs/sip_wrap.h | 23 + .../wcs/include/astropy_wcs/str_list_proxy.h | 38 + .../wcs/include/astropy_wcs/unit_list_proxy.h | 47 + astropy/wcs/include/astropy_wcs/util.h | 35 + astropy/wcs/include/astropy_wcs/wcsconfig.h | 35 + .../include/astropy_wcs/wcslib_tabprm_wrap.h | 25 + .../include/astropy_wcs/wcslib_units_wrap.h | 33 + astropy/wcs/include/astropy_wcs/wcslib_wrap.h | 26 + .../include/astropy_wcs/wcslib_wtbarr_wrap.h | 25 + astropy/wcs/include/astropy_wcs_api.h | 1 + astropy/wcs/include/wcsconfig.h | 35 + astropy/wcs/include/wcslib/cel.h | 435 + astropy/wcs/include/wcslib/lin.h | 448 + astropy/wcs/include/wcslib/prj.h | 816 + astropy/wcs/include/wcslib/spc.h | 884 + astropy/wcs/include/wcslib/spx.h | 538 + astropy/wcs/include/wcslib/tab.h | 608 + astropy/wcs/include/wcslib/wcs.h | 1618 + astropy/wcs/include/wcslib/wcserr.h | 253 + astropy/wcs/include/wcslib/wcsmath.h | 70 + astropy/wcs/include/wcslib/wcsprintf.h | 150 + astropy/wcs/setup_package.py | 337 + astropy/wcs/src/astropy_wcs.c | 991 + astropy/wcs/src/astropy_wcs_api.c | 56 + astropy/wcs/src/distortion.c | 222 + astropy/wcs/src/distortion_wrap.c | 359 + astropy/wcs/src/docstrings.c | 5399 +++ astropy/wcs/src/pipeline.c | 256 + astropy/wcs/src/pyutil.c | 971 + astropy/wcs/src/sip.c | 330 + astropy/wcs/src/sip_wrap.c | 532 + astropy/wcs/src/str_list_proxy.c | 283 + astropy/wcs/src/unit_list_proxy.c | 369 + astropy/wcs/src/util.c | 40 + astropy/wcs/src/wcslib_tabprm_wrap.c | 485 + astropy/wcs/src/wcslib_wrap.c | 3489 ++ astropy/wcs/src/wcslib_wtbarr_wrap.c | 294 + astropy/wcs/tests/__init__.py | 2 + astropy/wcs/tests/data/2wcses.hdr | 1 + astropy/wcs/tests/data/3d_cd.hdr | 16 + astropy/wcs/tests/data/defunct_keywords.hdr | 1 + astropy/wcs/tests/data/dist.fits | Bin 0 -> 23040 bytes astropy/wcs/tests/data/header_newlines.fits | 1 + astropy/wcs/tests/data/invalid_header.hdr | 1 + astropy/wcs/tests/data/irac_sip.hdr | 1 + astropy/wcs/tests/data/locale.hdr | 1 + astropy/wcs/tests/data/nonstandard_units.hdr | 1 + astropy/wcs/tests/data/outside_sky.hdr | 1 + astropy/wcs/tests/data/sip.fits | 1 + astropy/wcs/tests/data/sip2.fits | 1 + astropy/wcs/tests/data/sub-segfault.hdr | 28 + astropy/wcs/tests/data/too_many_pv.hdr | 1 + astropy/wcs/tests/data/unit.hdr | 1 + astropy/wcs/tests/data/validate.fits | 1 + astropy/wcs/tests/data/validate.txt | 16 + astropy/wcs/tests/data/zpn-hole.hdr | 1 + astropy/wcs/tests/extension/__init__.py | 0 astropy/wcs/tests/extension/setup.py | 50 + astropy/wcs/tests/extension/test_extension.py | 75 + astropy/wcs/tests/extension/wcsapi_test.c | 76 + astropy/wcs/tests/maps/1904-66_AIR.hdr | 1 + astropy/wcs/tests/maps/1904-66_AIT.hdr | 1 + astropy/wcs/tests/maps/1904-66_ARC.hdr | 1 + astropy/wcs/tests/maps/1904-66_AZP.hdr | 1 + astropy/wcs/tests/maps/1904-66_BON.hdr | 1 + astropy/wcs/tests/maps/1904-66_CAR.hdr | 1 + astropy/wcs/tests/maps/1904-66_CEA.hdr | 1 + astropy/wcs/tests/maps/1904-66_COD.hdr | 1 + astropy/wcs/tests/maps/1904-66_COE.hdr | 1 + astropy/wcs/tests/maps/1904-66_COO.hdr | 1 + astropy/wcs/tests/maps/1904-66_COP.hdr | 1 + astropy/wcs/tests/maps/1904-66_CSC.hdr | 1 + astropy/wcs/tests/maps/1904-66_CYP.hdr | 1 + astropy/wcs/tests/maps/1904-66_HPX.hdr | 1 + astropy/wcs/tests/maps/1904-66_MER.hdr | 1 + astropy/wcs/tests/maps/1904-66_MOL.hdr | 1 + astropy/wcs/tests/maps/1904-66_NCP.hdr | 1 + astropy/wcs/tests/maps/1904-66_PAR.hdr | 1 + astropy/wcs/tests/maps/1904-66_PCO.hdr | 1 + astropy/wcs/tests/maps/1904-66_QSC.hdr | 1 + astropy/wcs/tests/maps/1904-66_SFL.hdr | 1 + astropy/wcs/tests/maps/1904-66_SIN.hdr | 1 + astropy/wcs/tests/maps/1904-66_STG.hdr | 1 + astropy/wcs/tests/maps/1904-66_SZP.hdr | 1 + astropy/wcs/tests/maps/1904-66_TAN.hdr | 1 + astropy/wcs/tests/maps/1904-66_TSC.hdr | 1 + astropy/wcs/tests/maps/1904-66_ZEA.hdr | 1 + astropy/wcs/tests/maps/1904-66_ZPN.hdr | 1 + astropy/wcs/tests/spectra/orion-freq-1.hdr | 1 + astropy/wcs/tests/spectra/orion-freq-4.hdr | 1 + astropy/wcs/tests/spectra/orion-velo-1.hdr | 1 + astropy/wcs/tests/spectra/orion-velo-4.hdr | 1 + astropy/wcs/tests/spectra/orion-wave-1.hdr | 1 + astropy/wcs/tests/spectra/orion-wave-4.hdr | 1 + astropy/wcs/tests/test_pickle.py | 103 + astropy/wcs/tests/test_profiling.py | 92 + astropy/wcs/tests/test_utils.py | 141 + astropy/wcs/tests/test_wcs.py | 577 + astropy/wcs/tests/test_wcsprm.py | 761 + astropy/wcs/utils.py | 27 + astropy/wcs/wcs.py | 2232 ++ astropy/wcs/wcslint.py | 19 + astropy_helpers/.coveragerc | 21 + astropy_helpers/.travis.yml | 43 + astropy_helpers/CHANGES.rst | 61 + astropy_helpers/CONTRIBUTING.md | 20 + astropy_helpers/LICENSE.rst | 26 + astropy_helpers/MANIFEST.in | 10 + astropy_helpers/README.rst | 26 + astropy_helpers/ah_bootstrap.py | 748 + .../astropy_helpers.egg-info/PKG-INFO | 46 + .../astropy_helpers.egg-info/SOURCES.txt | 67 + .../dependency_links.txt | 1 + .../astropy_helpers.egg-info/not-zip-safe | 1 + .../astropy_helpers.egg-info/top_level.txt | 1 + astropy_helpers/astropy_helpers/__init__.py | 4 + .../astropy_helpers/compat/__init__.py | 12 + .../compat/_subprocess_py2/__init__.py | 38 + .../astropy_helpers/compat/subprocess.py | 19 + .../astropy_helpers/git_helpers.py | 155 + .../astropy_helpers/setup_helpers.py | 1526 + .../astropy_helpers/sphinx/__init__.py | 6 + .../astropy_helpers/sphinx/conf.py | 310 + .../astropy_helpers/sphinx/ext/__init__.py | 3 + .../sphinx/ext/astropyautosummary.py | 98 + .../astropy_helpers/sphinx/ext/automodapi.py | 350 + .../astropy_helpers/sphinx/ext/automodsumm.py | 581 + .../sphinx/ext/changelog_links.py | 78 + .../sphinx/ext/comment_eater.py | 169 + .../sphinx/ext/compiler_unparse.py | 865 + .../astropy_helpers/sphinx/ext/docscrape.py | 531 + .../sphinx/ext/docscrape_sphinx.py | 274 + .../astropy_helpers/sphinx/ext/doctest.py | 33 + .../sphinx/ext/edit_on_github.py | 165 + .../astropy_helpers/sphinx/ext/numpydoc.py | 187 + .../sphinx/ext/phantom_import.py | 167 + .../sphinx/ext/smart_resolver.py | 69 + .../ext/templates/autosummary_core/base.rst | 10 + .../ext/templates/autosummary_core/class.rst | 65 + .../ext/templates/autosummary_core/module.rst | 41 + .../sphinx/ext/tests/__init__.py | 70 + .../sphinx/ext/tests/test_automodapi.py | 346 + .../sphinx/ext/tests/test_automodsumm.py | 115 + .../sphinx/ext/tests/test_docscrape.py | 762 + .../sphinx/ext/tests/test_utils.py | 34 + .../astropy_helpers/sphinx/ext/tocdepthfix.py | 18 + .../astropy_helpers/sphinx/ext/traitsdoc.py | 142 + .../astropy_helpers/sphinx/ext/utils.py | 65 + .../astropy_helpers/sphinx/ext/viewcode.py | 216 + .../sphinx/local/python3links.inv | 7 + .../sphinx/local/python3links.txt | 13 + .../astropy_helpers/sphinx/setup_package.py | 10 + .../themes/bootstrap-astropy/globaltoc.html | 3 + .../themes/bootstrap-astropy/layout.html | 96 + .../themes/bootstrap-astropy/localtoc.html | 3 + .../themes/bootstrap-astropy/searchbox.html | 7 + .../static/astropy_linkout_20.png | Bin 0 -> 1725 bytes .../bootstrap-astropy/static/astropy_logo.ico | Bin 0 -> 1150 bytes .../static/astropy_logo_32.png | Bin 0 -> 1884 bytes .../static/bootstrap-astropy.css | 585 + .../bootstrap-astropy/static/copybutton.js | 57 + .../bootstrap-astropy/static/sidebar.js | 160 + .../themes/bootstrap-astropy/theme.conf | 10 + .../astropy_helpers/src/__init__.py | 0 .../astropy_helpers/src/compiler.c | 129 + .../astropy_helpers/src/setup_package.py | 2 + .../astropy_helpers/test_helpers.py | 207 + .../astropy_helpers/tests/__init__.py | 172 + .../tests/test_ah_bootstrap.py | 382 + .../astropy_helpers/tests/test_git_helpers.py | 58 + .../tests/test_setup_helpers.py | 119 + astropy_helpers/astropy_helpers/utils.py | 162 + astropy_helpers/astropy_helpers/version.py | 169 + .../astropy_helpers/version_helpers.py | 191 + .../astropy_helpers/__init__.py | 4 + .../astropy_helpers/compat/__init__.py | 12 + .../compat/_subprocess_py2/__init__.py | 38 + .../astropy_helpers/compat/subprocess.py | 19 + .../astropy_helpers/git_helpers.py | 155 + .../astropy_helpers/setup_helpers.py | 1526 + .../astropy_helpers/sphinx/__init__.py | 6 + .../astropy_helpers/sphinx/conf.py | 310 + .../astropy_helpers/sphinx/ext/__init__.py | 3 + .../sphinx/ext/astropyautosummary.py | 98 + .../astropy_helpers/sphinx/ext/automodapi.py | 350 + .../astropy_helpers/sphinx/ext/automodsumm.py | 581 + .../sphinx/ext/changelog_links.py | 78 + .../sphinx/ext/comment_eater.py | 169 + .../sphinx/ext/compiler_unparse.py | 865 + .../astropy_helpers/sphinx/ext/docscrape.py | 531 + .../sphinx/ext/docscrape_sphinx.py | 274 + .../astropy_helpers/sphinx/ext/doctest.py | 33 + .../sphinx/ext/edit_on_github.py | 165 + .../astropy_helpers/sphinx/ext/numpydoc.py | 187 + .../sphinx/ext/phantom_import.py | 167 + .../sphinx/ext/smart_resolver.py | 69 + .../ext/templates/autosummary_core/base.rst | 10 + .../ext/templates/autosummary_core/class.rst | 65 + .../ext/templates/autosummary_core/module.rst | 41 + .../sphinx/ext/tests/__init__.py | 70 + .../sphinx/ext/tests/test_automodapi.py | 346 + .../sphinx/ext/tests/test_automodsumm.py | 115 + .../sphinx/ext/tests/test_docscrape.py | 762 + .../sphinx/ext/tests/test_utils.py | 34 + .../astropy_helpers/sphinx/ext/tocdepthfix.py | 18 + .../astropy_helpers/sphinx/ext/traitsdoc.py | 142 + .../astropy_helpers/sphinx/ext/utils.py | 65 + .../astropy_helpers/sphinx/ext/viewcode.py | 216 + .../sphinx/local/python3links.inv | 7 + .../astropy_helpers/sphinx/setup_package.py | 10 + .../themes/bootstrap-astropy/globaltoc.html | 3 + .../themes/bootstrap-astropy/layout.html | 96 + .../themes/bootstrap-astropy/localtoc.html | 3 + .../themes/bootstrap-astropy/searchbox.html | 7 + .../static/astropy_linkout_20.png | Bin 0 -> 1725 bytes .../bootstrap-astropy/static/astropy_logo.ico | Bin 0 -> 1150 bytes .../static/astropy_logo_32.png | Bin 0 -> 1884 bytes .../static/bootstrap-astropy.css | 585 + .../bootstrap-astropy/static/copybutton.js | 57 + .../bootstrap-astropy/static/sidebar.js | 160 + .../themes/bootstrap-astropy/theme.conf | 10 + .../astropy_helpers/src/__init__.py | 0 .../astropy_helpers/src/compiler.c | 129 + .../astropy_helpers/src/setup_package.py | 2 + .../astropy_helpers/test_helpers.py | 207 + .../astropy_helpers/utils.py | 162 + .../astropy_helpers/version.py | 169 + .../astropy_helpers/version_helpers.py | 191 + .../dist/astropy_helpers-0.4.2-py2.7.egg | Bin 0 -> 210047 bytes astropy_helpers/ez_setup.py | 382 + .../licenses/LICENSE_COPYBUTTON.rst | 50 + astropy_helpers/licenses/LICENSE_NUMPYDOC.rst | 94 + astropy_helpers/setup.cfg | 5 + astropy_helpers/setup.py | 50 + astropy_helpers/tox.ini | 12 + cextern/.gitignore | 1 + cextern/README.rst | 8 + cextern/cfitsio/License.txt | 25 + cextern/cfitsio/adler32.c | 167 + cextern/cfitsio/buffers.c | 1371 + cextern/cfitsio/cfileio.c | 7267 ++++ cextern/cfitsio/changes.txt | 4050 +++ cextern/cfitsio/checksum.c | 508 + cextern/cfitsio/crc32.c | 440 + cextern/cfitsio/crc32.h | 441 + cextern/cfitsio/deflate.c | 1832 ++ cextern/cfitsio/deflate.h | 340 + cextern/cfitsio/drvrfile.c | 973 + cextern/cfitsio/drvrgsiftp.c | 522 + cextern/cfitsio/drvrgsiftp.h | 21 + cextern/cfitsio/drvrmem.c | 1214 + cextern/cfitsio/drvrnet.c | 2741 ++ cextern/cfitsio/drvrsmem.c | 973 + cextern/cfitsio/drvrsmem.h | 179 + cextern/cfitsio/editcol.c | 2477 ++ cextern/cfitsio/edithdu.c | 883 + cextern/cfitsio/eval.l | 545 + cextern/cfitsio/eval.y | 5837 ++++ cextern/cfitsio/eval_defs.h | 163 + cextern/cfitsio/eval_f.c | 2839 ++ cextern/cfitsio/eval_l.c | 2252 ++ cextern/cfitsio/eval_tab.h | 42 + cextern/cfitsio/eval_y.c | 7333 +++++ cextern/cfitsio/fits_hcompress.c | 1858 ++ cextern/cfitsio/fits_hdecompress.c | 2618 ++ cextern/cfitsio/fitscore.c | 9349 ++++++ cextern/cfitsio/fitsio.h | 1953 ++ cextern/cfitsio/fitsio2.h | 1209 + cextern/cfitsio/getcol.c | 1055 + cextern/cfitsio/getcolb.c | 2002 ++ cextern/cfitsio/getcold.c | 1677 + cextern/cfitsio/getcole.c | 1680 + cextern/cfitsio/getcoli.c | 1902 ++ cextern/cfitsio/getcolj.c | 3728 +++ cextern/cfitsio/getcolk.c | 1895 ++ cextern/cfitsio/getcoll.c | 621 + cextern/cfitsio/getcols.c | 835 + cextern/cfitsio/getcolsb.c | 1991 ++ cextern/cfitsio/getcolui.c | 1908 ++ cextern/cfitsio/getcoluj.c | 1902 ++ cextern/cfitsio/getcoluk.c | 1917 ++ cextern/cfitsio/getkey.c | 3247 ++ cextern/cfitsio/group.c | 6463 ++++ cextern/cfitsio/group.h | 65 + cextern/cfitsio/grparser.c | 1379 + cextern/cfitsio/grparser.h | 185 + cextern/cfitsio/histo.c | 2221 ++ cextern/cfitsio/imcompress.c | 9837 ++++++ cextern/cfitsio/infback.c | 632 + cextern/cfitsio/inffast.c | 340 + cextern/cfitsio/inffast.h | 11 + cextern/cfitsio/inffixed.h | 94 + cextern/cfitsio/inflate.c | 1480 + cextern/cfitsio/inflate.h | 122 + cextern/cfitsio/inftrees.c | 330 + cextern/cfitsio/inftrees.h | 62 + cextern/cfitsio/iraffits.c | 2073 ++ cextern/cfitsio/longnam.h | 593 + cextern/cfitsio/modkey.c | 1755 + cextern/cfitsio/pliocomp.c | 331 + cextern/cfitsio/putcol.c | 1929 ++ cextern/cfitsio/putcolb.c | 1013 + cextern/cfitsio/putcold.c | 1060 + cextern/cfitsio/putcole.c | 1074 + cextern/cfitsio/putcoli.c | 986 + cextern/cfitsio/putcolj.c | 1992 ++ cextern/cfitsio/putcolk.c | 1013 + cextern/cfitsio/putcoll.c | 369 + cextern/cfitsio/putcols.c | 303 + cextern/cfitsio/putcolsb.c | 974 + cextern/cfitsio/putcolu.c | 629 + cextern/cfitsio/putcolui.c | 969 + cextern/cfitsio/putcoluj.c | 977 + cextern/cfitsio/putcoluk.c | 993 + cextern/cfitsio/putkey.c | 3085 ++ cextern/cfitsio/quantize.c | 3920 +++ cextern/cfitsio/region.c | 1752 + cextern/cfitsio/region.h | 82 + cextern/cfitsio/ricecomp.c | 1353 + cextern/cfitsio/scalnull.c | 229 + cextern/cfitsio/swapproc.c | 247 + cextern/cfitsio/trees.c | 1242 + cextern/cfitsio/trees.h | 128 + cextern/cfitsio/uncompr.c | 57 + cextern/cfitsio/wcssub.c | 1043 + cextern/cfitsio/wcsutil.c | 502 + cextern/cfitsio/zcompress.c | 504 + cextern/cfitsio/zconf.h | 426 + cextern/cfitsio/zlib.h | 1613 + cextern/cfitsio/zuncompress.c | 603 + cextern/cfitsio/zutil.c | 316 + cextern/cfitsio/zutil.h | 272 + cextern/erfa/erfa.c | 27361 ++++++++++++++++ cextern/erfa/erfa.h | 628 + cextern/expat/CMake.README | 42 + cextern/expat/CMakeLists.txt | 111 + cextern/expat/COPYING | 22 + cextern/expat/Changes | 205 + cextern/expat/ConfigureChecks.cmake | 44 + cextern/expat/MANIFEST | 141 + cextern/expat/Makefile.in | 201 + cextern/expat/README | 139 + cextern/expat/aclocal.m4 | 8460 +++++ cextern/expat/amiga/Makefile | 336 + cextern/expat/amiga/README.txt | 98 + cextern/expat/amiga/expat.xml | 264 + cextern/expat/amiga/expat_68k.c | 939 + cextern/expat/amiga/expat_68k.h | 94 + cextern/expat/amiga/expat_68k_handler_stubs.c | 185 + cextern/expat/amiga/expat_base.h | 40 + cextern/expat/amiga/expat_lib.c | 247 + cextern/expat/amiga/expat_vectors.c | 505 + cextern/expat/amiga/include/inline4/expat.h | 94 + .../expat/amiga/include/interfaces/expat.h | 98 + cextern/expat/amiga/include/libraries/expat.h | 566 + cextern/expat/amiga/include/proto/expat.h | 52 + cextern/expat/amiga/launch.c | 57 + cextern/expat/amiga/stdlib.c | 109 + cextern/expat/bcb5/README.txt | 87 + cextern/expat/bcb5/all_projects.bpg | 49 + cextern/expat/bcb5/elements.bpf | 4 + cextern/expat/bcb5/elements.bpr | 149 + cextern/expat/bcb5/elements.mak | 186 + cextern/expat/bcb5/expat.bpf | 6 + cextern/expat/bcb5/expat.bpr | 140 + cextern/expat/bcb5/expat.mak | 187 + cextern/expat/bcb5/expat_static.bpf | 5 + cextern/expat/bcb5/expat_static.bpr | 143 + cextern/expat/bcb5/expat_static.mak | 189 + cextern/expat/bcb5/expatw.bpf | 6 + cextern/expat/bcb5/expatw.bpr | 146 + cextern/expat/bcb5/expatw.mak | 187 + cextern/expat/bcb5/expatw_static.bpf | 5 + cextern/expat/bcb5/expatw_static.bpr | 152 + cextern/expat/bcb5/expatw_static.mak | 190 + cextern/expat/bcb5/libexpat_mtd.def | 141 + cextern/expat/bcb5/libexpatw_mtd.def | 140 + cextern/expat/bcb5/makefile.mak | 37 + cextern/expat/bcb5/outline.bpf | 4 + cextern/expat/bcb5/outline.bpr | 132 + cextern/expat/bcb5/outline.mak | 186 + cextern/expat/bcb5/setup.bat | 9 + cextern/expat/bcb5/xmlwf.bpf | 7 + cextern/expat/bcb5/xmlwf.bpr | 136 + cextern/expat/bcb5/xmlwf.mak | 187 + cextern/expat/configure | 18632 +++++++++++ cextern/expat/configure.in | 155 + cextern/expat/conftools/PrintPath | 116 + .../expat/conftools/ac_c_bigendian_cross.m4 | 81 + cextern/expat/conftools/expat.m4 | 43 + cextern/expat/conftools/get-version.sh | 46 + cextern/expat/conftools/install-sh | 520 + cextern/expat/conftools/ltmain.sh | 9642 ++++++ cextern/expat/doc/expat.png | Bin 0 -> 1027 bytes cextern/expat/doc/reference.html | 2390 ++ cextern/expat/doc/style.css | 101 + cextern/expat/doc/valid-xhtml10.png | Bin 0 -> 2368 bytes cextern/expat/doc/xmlwf.1 | 251 + cextern/expat/doc/xmlwf.sgml | 468 + cextern/expat/examples/elements.c | 65 + cextern/expat/examples/elements.dsp | 103 + cextern/expat/examples/outline.c | 106 + cextern/expat/examples/outline.dsp | 103 + cextern/expat/expat.dsw | 110 + cextern/expat/expat.pc.in | 11 + cextern/expat/expat_config.h.cmake | 91 + cextern/expat/expat_config.h.in | 102 + cextern/expat/lib/Makefile.MPW | 206 + cextern/expat/lib/amigaconfig.h | 32 + cextern/expat/lib/ascii.h | 92 + cextern/expat/lib/asciitab.h | 36 + cextern/expat/lib/expat.dsp | 185 + cextern/expat/lib/expat.h | 1047 + cextern/expat/lib/expat_external.h | 115 + cextern/expat/lib/expat_static.dsp | 162 + cextern/expat/lib/expatw.dsp | 185 + cextern/expat/lib/expatw_static.dsp | 162 + cextern/expat/lib/iasciitab.h | 37 + cextern/expat/lib/internal.h | 73 + cextern/expat/lib/latin1tab.h | 36 + cextern/expat/lib/libexpat.def | 73 + cextern/expat/lib/libexpatw.def | 73 + cextern/expat/lib/macconfig.h | 53 + cextern/expat/lib/nametab.h | 150 + cextern/expat/lib/utf8tab.h | 37 + cextern/expat/lib/winconfig.h | 30 + cextern/expat/lib/xmlparse.c | 6403 ++++ cextern/expat/lib/xmlrole.c | 1336 + cextern/expat/lib/xmlrole.h | 114 + cextern/expat/lib/xmltok.c | 1651 + cextern/expat/lib/xmltok.h | 316 + cextern/expat/lib/xmltok_impl.c | 1783 + cextern/expat/lib/xmltok_impl.h | 46 + cextern/expat/lib/xmltok_ns.c | 115 + cextern/expat/m4/libtool.m4 | 7851 +++++ cextern/expat/m4/ltoptions.m4 | 369 + cextern/expat/m4/ltsugar.m4 | 123 + cextern/expat/m4/ltversion.m4 | 23 + cextern/expat/m4/lt~obsolete.m4 | 98 + cextern/expat/tests/README.txt | 13 + cextern/expat/tests/benchmark/README.txt | 16 + cextern/expat/tests/benchmark/benchmark.c | 114 + cextern/expat/tests/benchmark/benchmark.dsp | 88 + cextern/expat/tests/benchmark/benchmark.dsw | 44 + cextern/expat/tests/chardata.c | 131 + cextern/expat/tests/chardata.h | 40 + cextern/expat/tests/minicheck.c | 182 + cextern/expat/tests/minicheck.h | 90 + cextern/expat/tests/runtests.c | 1515 + cextern/expat/tests/runtestspp.cpp | 6 + cextern/expat/tests/xmltest.sh | 142 + cextern/expat/vms/README.vms | 23 + cextern/expat/vms/descrip.mms | 70 + cextern/expat/vms/expat_config.h | 52 + cextern/expat/win32/MANIFEST.txt | 27 + cextern/expat/win32/README.txt | 80 + cextern/expat/win32/expat.iss | 69 + cextern/expat/xmlwf/codepage.c | 68 + cextern/expat/xmlwf/codepage.h | 6 + cextern/expat/xmlwf/ct.c | 147 + cextern/expat/xmlwf/filemap.h | 17 + cextern/expat/xmlwf/readfilemap.c | 100 + cextern/expat/xmlwf/unixfilemap.c | 65 + cextern/expat/xmlwf/win32filemap.c | 96 + cextern/expat/xmlwf/xmlfile.c | 244 + cextern/expat/xmlwf/xmlfile.h | 20 + cextern/expat/xmlwf/xmlmime.c | 163 + cextern/expat/xmlwf/xmlmime.h | 19 + cextern/expat/xmlwf/xmltchar.h | 36 + cextern/expat/xmlwf/xmlurl.h | 13 + cextern/expat/xmlwf/xmlwf.c | 861 + cextern/expat/xmlwf/xmlwf.dsp | 139 + cextern/expat/xmlwf/xmlwin32url.cxx | 395 + cextern/trim_wcslib.sh | 13 + cextern/wcslib/C/GNUmakefile | 443 + cextern/wcslib/C/cel.c | 504 + cextern/wcslib/C/cel.h | 435 + cextern/wcslib/C/fitshdr.h | 443 + cextern/wcslib/C/fitshdr.l | 570 + cextern/wcslib/C/flexed/README | 4 + cextern/wcslib/C/flexed/fitshdr.c | 11947 +++++++ cextern/wcslib/C/flexed/wcsbth.c | 21065 ++++++++++++ cextern/wcslib/C/flexed/wcspih.c | 10832 ++++++ cextern/wcslib/C/flexed/wcsulex.c | 9428 ++++++ cextern/wcslib/C/flexed/wcsutrn.c | 5640 ++++ cextern/wcslib/C/getwcstab.c | 154 + cextern/wcslib/C/getwcstab.h | 189 + cextern/wcslib/C/lin.c | 749 + cextern/wcslib/C/lin.h | 448 + cextern/wcslib/C/log.c | 112 + cextern/wcslib/C/log.h | 166 + cextern/wcslib/C/makedefs.in | 240 + cextern/wcslib/C/prj.c | 8482 +++++ cextern/wcslib/C/prj.h | 816 + cextern/wcslib/C/spc.c | 1388 + cextern/wcslib/C/spc.h | 884 + cextern/wcslib/C/sph.c | 460 + cextern/wcslib/C/sph.h | 251 + cextern/wcslib/C/spx.c | 1148 + cextern/wcslib/C/spx.h | 538 + cextern/wcslib/C/tab.c | 1632 + cextern/wcslib/C/tab.h | 608 + cextern/wcslib/C/wcs.c | 3394 ++ cextern/wcslib/C/wcs.h | 1618 + cextern/wcslib/C/wcsbth.l | 2565 ++ cextern/wcslib/C/wcsconfig.h.in | 18 + cextern/wcslib/C/wcsconfig_tests.h.in | 18 + cextern/wcslib/C/wcserr.c | 160 + cextern/wcslib/C/wcserr.h | 253 + cextern/wcslib/C/wcsfix.c | 743 + cextern/wcslib/C/wcsfix.h | 399 + cextern/wcslib/C/wcshdr.c | 1076 + cextern/wcslib/C/wcshdr.h | 1129 + cextern/wcslib/C/wcslib.h | 60 + cextern/wcslib/C/wcsmath.h | 70 + cextern/wcslib/C/wcspih.l | 1183 + cextern/wcslib/C/wcsprintf.c | 167 + cextern/wcslib/C/wcsprintf.h | 150 + cextern/wcslib/C/wcstrig.c | 219 + cextern/wcslib/C/wcstrig.h | 212 + cextern/wcslib/C/wcsulex.l | 996 + cextern/wcslib/C/wcsunits.c | 225 + cextern/wcslib/C/wcsunits.h | 410 + cextern/wcslib/C/wcsutil.c | 343 + cextern/wcslib/C/wcsutil.h | 340 + cextern/wcslib/C/wcsutrn.l | 337 + cextern/wcslib/CHANGES | 2139 ++ cextern/wcslib/COPYING | 674 + cextern/wcslib/COPYING.LESSER | 165 + cextern/wcslib/GNUmakefile | 205 + cextern/wcslib/INSTALL | 327 + cextern/wcslib/README | 44 + cextern/wcslib/THANKS | 86 + cextern/wcslib/VALIDATION | 316 + cextern/wcslib/config/config.guess | 1519 + cextern/wcslib/config/config.sub | 1766 + cextern/wcslib/config/elisp-comp | 60 + cextern/wcslib/config/install-sh | 295 + cextern/wcslib/config/mdate-sh | 133 + cextern/wcslib/config/missing | 360 + cextern/wcslib/config/mkinstalldirs | 137 + cextern/wcslib/config/move-if-change | 13 + cextern/wcslib/configure | 13254 ++++++++ cextern/wcslib/configure.ac | 522 + cextern/wcslib/flavours | 172 + cextern/wcslib/makedefs.in | 240 + cextern/wcslib/wcsconfig.h.in | 18 + cextern/wcslib/wcsconfig_f77.h.in | 21 + cextern/wcslib/wcsconfig_tests.h.in | 18 + cextern/wcslib/wcsconfig_utils.h.in | 35 + cextern/wcslib/wcslib.pc.in | 11 + docs/Makefile | 132 + docs/_pkgtemplate.rst | 96 + docs/_static/astropy_logo.pdf | Bin 0 -> 5384 bytes docs/_static/dev.png | Bin 0 -> 3177 bytes docs/_static/mature.png | Bin 0 -> 3189 bytes docs/_static/planned.png | Bin 0 -> 3116 bytes docs/_static/stable.png | Bin 0 -> 6439 bytes docs/_static/timer_prediction_pow10.png | Bin 0 -> 28453 bytes docs/_templates/autosummary/base.rst | 1 + docs/_templates/autosummary/class.rst | 1 + docs/_templates/autosummary/module.rst | 1 + docs/astropy_banner_96.png | Bin 0 -> 26836 bytes docs/changelog.rst | 7 + docs/conf.py | 203 + docs/config/config_0_4_transition.rst | 325 + docs/config/index.rst | 370 + docs/constants/index.rst | 73 + docs/convolution/images/astropy.png | Bin 0 -> 9916 bytes docs/convolution/images/original.png | Bin 0 -> 10448 bytes docs/convolution/images/scipy.png | Bin 0 -> 10058 bytes docs/convolution/index.rst | 140 + docs/convolution/kernels.rst | 337 + docs/convolution/using.rst | 94 + docs/coordinates/angles.rst | 165 + docs/coordinates/definitions.rst | 41 + docs/coordinates/formatting.rst | 35 + docs/coordinates/frames.rst | 340 + docs/coordinates/index.rst | 356 + docs/coordinates/matchsep.rst | 112 + docs/coordinates/references.txt | 6 + docs/coordinates/representations.rst | 169 + docs/coordinates/sgr-example.py | 118 + docs/coordinates/sgr-example.rst | 187 + docs/coordinates/skycoord.rst | 681 + docs/coordinates/transforming.rst | 95 + docs/cosmology/index.rst | 408 + docs/credits.rst | 124 + docs/development/affiliated-packages.rst | 305 + docs/development/building.rst | 120 + docs/development/ccython.rst | 74 + docs/development/codeguide.rst | 775 + docs/development/codeguide_emacs.rst | 208 + docs/development/docguide.rst | 152 + docs/development/docrules.rst | 547 + docs/development/releasing.rst | 581 + docs/development/scripts.rst | 63 + docs/development/testguide.rst | 853 + docs/development/vision.rst | 112 + .../workflow/additional_git_topics.rst | 413 + docs/development/workflow/branch_dropdown.png | Bin 0 -> 39966 bytes docs/development/workflow/command_history.rst | 30 + docs/development/workflow/command_history.sh | 112 + .../workflow/command_history_with_output.sh | 457 + .../workflow/development_workflow.rst | 459 + docs/development/workflow/forking_button.png | Bin 0 -> 9495 bytes .../workflow/get_devel_version.rst | 333 + .../workflow/git_edit_workflow_examples.rst | 562 + docs/development/workflow/git_install.rst | 63 + docs/development/workflow/git_links.inc | 62 + docs/development/workflow/git_resources.rst | 54 + docs/development/workflow/known_projects.inc | 60 + docs/development/workflow/links.inc | 4 + .../workflow/maintainer_workflow.rst | 259 + docs/development/workflow/milestone.png | Bin 0 -> 23069 bytes docs/development/workflow/patches.rst | 112 + docs/development/workflow/pull_button.png | Bin 0 -> 15528 bytes docs/development/workflow/terminal_cast.rst | 17 + docs/development/workflow/this_project.inc | 2 + docs/development/workflow/virtual_pythons.rst | 189 + .../workflow/virtualenv_detail.rst | 164 + .../workflow/worked_example_switch_branch.png | Bin 0 -> 34439 bytes docs/getting_started.rst | 70 + docs/index.rst | 217 + docs/install.rst | 382 + docs/io/ascii/base_classes.rst | 21 + docs/io/ascii/extension_classes.rst | 30 + docs/io/ascii/fixed_width_gallery.rst | 359 + docs/io/ascii/index.rst | 202 + docs/io/ascii/read.rst | 387 + docs/io/ascii/references.txt | 4 + docs/io/ascii/toc.txt | 8 + docs/io/ascii/write.rst | 265 + docs/io/fits/api/cards.rst | 31 + docs/io/fits/api/diff.rst | 47 + docs/io/fits/api/files.rst | 44 + docs/io/fits/api/hdulists.rst | 14 + docs/io/fits/api/hdus.rst | 43 + docs/io/fits/api/headers.rst | 13 + docs/io/fits/api/images.rst | 30 + docs/io/fits/api/tables.rst | 63 + docs/io/fits/api/verification.rst | 72 + docs/io/fits/appendix/faq.rst | 800 + docs/io/fits/appendix/header_transition.rst | 427 + docs/io/fits/appendix/history.rst | 3248 ++ docs/io/fits/images/Blue.jpg | Bin 0 -> 35560 bytes docs/io/fits/images/Green.jpg | Bin 0 -> 33315 bytes docs/io/fits/images/Hs-2009-14-a-web.jpg | Bin 0 -> 23490 bytes docs/io/fits/images/Red.jpg | Bin 0 -> 34623 bytes docs/io/fits/index.rst | 717 + docs/io/fits/usage/examples.rst | 69 + docs/io/fits/usage/headers.rst | 380 + docs/io/fits/usage/image.rst | 209 + docs/io/fits/usage/misc.rst | 42 + docs/io/fits/usage/scripts.rst | 29 + docs/io/fits/usage/table.rst | 350 + docs/io/fits/usage/unfamiliar.rst | 535 + docs/io/fits/usage/verification.rst | 348 + docs/io/misc.rst | 16 + docs/io/registry.rst | 105 + docs/io/unified.rst | 288 + docs/io/votable/.gitignore | 2 + docs/io/votable/api_exceptions.rst | 41 + docs/io/votable/index.rst | 444 + docs/io/votable/references.txt | 23 + docs/known_issues.rst | 240 + docs/license.rst | 17 + docs/logging.rst | 151 + docs/make.bat | 170 + docs/modeling/algorithms.rst | 61 + docs/modeling/design.rst | 28 + docs/modeling/fitting.rst | 125 + docs/modeling/index.rst | 266 + docs/modeling/links.inc | 1 + docs/modeling/models.rst | 374 + docs/modeling/new.rst | 391 + docs/modeling/parameters.rst | 170 + docs/nddata/index.rst | 71 + docs/nddata/nddata.rst | 158 + docs/nddata/subclassing.rst | 58 + docs/nitpick-exceptions | 55 + docs/overview.rst | 80 + docs/rtd-pip-requirements | 4 + docs/stability.rst | 266 + docs/stats/index.rst | 32 + docs/table/access_table.rst | 415 + docs/table/construct_table.rst | 844 + docs/table/index.rst | 263 + docs/table/io.rst | 65 + docs/table/masking.rst | 176 + docs/table/modify_table.rst | 177 + docs/table/operations.rst | 771 + docs/table/references.txt | 6 + docs/table/table_repr_html.png | Bin 0 -> 9278 bytes docs/time/index.rst | 799 + docs/time/references.txt | 4 + docs/time/time_scale_conversion.odg | Bin 0 -> 12434 bytes docs/time/time_scale_conversion.png | Bin 0 -> 6773 bytes docs/units/combining_and_defining.rst | 69 + docs/units/conversion.rst | 65 + docs/units/decomposing_and_composing.rst | 99 + docs/units/equivalencies.rst | 331 + docs/units/format.rst | 224 + docs/units/index.rst | 199 + docs/units/quantity.rst | 289 + docs/units/standard_units.rst | 149 + docs/utils/index.rst | 84 + docs/vo/conesearch/client.rst | 731 + .../images/astropy_vo_flowchart.png | Bin 0 -> 20057 bytes .../images/client_predict_search_n.png | Bin 0 -> 35668 bytes .../images/client_predict_search_t.png | Bin 0 -> 28522 bytes .../vo/conesearch/images/validator_html_1.png | Bin 0 -> 28152 bytes .../vo/conesearch/images/validator_html_2.png | Bin 0 -> 56370 bytes .../vo/conesearch/images/validator_html_3.png | Bin 0 -> 45038 bytes .../vo/conesearch/images/validator_html_4.png | Bin 0 -> 65404 bytes docs/vo/conesearch/index.rst | 191 + docs/vo/conesearch/validator.rst | 362 + docs/vo/index.rst | 38 + docs/vo/samp/advanced_embed_samp_hub.rst | 134 + docs/vo/samp/example_clients.rst | 127 + docs/vo/samp/example_hub.rst | 51 + docs/vo/samp/example_table_image.rst | 274 + docs/vo/samp/index.rst | 67 + docs/vo/samp/references.txt | 5 + docs/warnings.rst | 55 + docs/wcs/examples/from_file.py | 43 + docs/wcs/examples/programmatic.py | 44 + docs/wcs/history.rst | 90 + docs/wcs/index.rst | 198 + docs/wcs/references.txt | 6 + docs/wcs/relax.rst | 377 + docs/whatsnew/0.1.rst | 26 + docs/whatsnew/0.2.rst | 9 + docs/whatsnew/0.3.rst | 9 + docs/whatsnew/0.4.rst | 225 + docs/whatsnew/index.rst | 11 + ez_setup.py | 382 + licenses/CONFIGOBJ_LICENSE.rst | 32 + licenses/DATATABLES_LICENSE.rst | 29 + licenses/ERFA.rst | 53 + licenses/EXPAT_LICENSE.rst | 22 + licenses/FUTURES_LICENSE.rst | 21 + licenses/JQUERY_LICENSE.rst | 21 + licenses/LICENSE.rst | 26 + licenses/PLY_LICENSE.rst | 30 + licenses/PYFITS.rst | 29 + licenses/PYTEST_LICENSE.rst | 18 + licenses/README.rst | 5 + licenses/SIX_LICENSE.rst | 18 + licenses/SPHINXEXT_LICENSES.rst | 80 + licenses/SYMPY.rst | 28 + licenses/WCSLIB_LICENSE.rst | 165 + scripts/README.rst | 5 + scripts/fitscheck | 8 + scripts/fitsdiff | 8 + scripts/fitsheader | 5 + scripts/samp_hub | 4 + scripts/volint | 5 + scripts/wcslint | 5 + setup.cfg | 25 + setup.py | 113 + static/wininst_background.bmp | Bin 0 -> 158742 bytes 1416 files changed, 739592 insertions(+) create mode 100644 CHANGES.rst create mode 100644 PKG-INFO create mode 100644 README.rst create mode 100644 ah_bootstrap.py create mode 100644 astropy/__init__.py create mode 100644 astropy/astropy.cfg create mode 100644 astropy/config/__init__.py create mode 100644 astropy/config/affiliated.py create mode 100644 astropy/config/configuration.py create mode 100644 astropy/config/paths.py create mode 100644 astropy/config/setup_package.py create mode 100644 astropy/config/tests/__init__.py create mode 100644 astropy/config/tests/data/alias.cfg create mode 100644 astropy/config/tests/data/astropy.0.3.cfg create mode 100644 astropy/config/tests/data/astropy.0.3.windows.cfg create mode 100644 astropy/config/tests/data/deprecated.cfg create mode 100644 astropy/config/tests/data/empty.cfg create mode 100644 astropy/config/tests/data/not_empty.cfg create mode 100644 astropy/config/tests/test_configs.py create mode 100644 astropy/conftest.py create mode 100644 astropy/constants/__init__.py create mode 100644 astropy/constants/cgs.py create mode 100644 astropy/constants/constant.py create mode 100644 astropy/constants/setup_package.py create mode 100644 astropy/constants/si.py create mode 100644 astropy/constants/tests/__init__.py create mode 100644 astropy/constants/tests/test_constant.py create mode 100644 astropy/convolution/__init__.py create mode 100644 astropy/convolution/boundary_extend.c create mode 100644 astropy/convolution/boundary_extend.pyx create mode 100644 astropy/convolution/boundary_fill.c create mode 100644 astropy/convolution/boundary_fill.pyx create mode 100644 astropy/convolution/boundary_none.c create mode 100644 astropy/convolution/boundary_none.pyx create mode 100644 astropy/convolution/boundary_wrap.c create mode 100644 astropy/convolution/boundary_wrap.pyx create mode 100644 astropy/convolution/convolve.py create mode 100644 astropy/convolution/core.py create mode 100644 astropy/convolution/kernels.py create mode 100644 astropy/convolution/setup_package.py create mode 100644 astropy/convolution/tests/__init__.py create mode 100644 astropy/convolution/tests/test_convolve.py create mode 100644 astropy/convolution/tests/test_convolve_fft.py create mode 100644 astropy/convolution/tests/test_convolve_kernels.py create mode 100644 astropy/convolution/tests/test_convolve_speeds.py create mode 100644 astropy/convolution/tests/test_discretize.py create mode 100644 astropy/convolution/tests/test_kernel_class.py create mode 100644 astropy/convolution/utils.py create mode 100644 astropy/coordinates/__init__.py create mode 100644 astropy/coordinates/angle_lextab.py create mode 100644 astropy/coordinates/angle_parsetab.py create mode 100644 astropy/coordinates/angle_utilities.py create mode 100644 astropy/coordinates/angles.py create mode 100644 astropy/coordinates/baseframe.py create mode 100644 astropy/coordinates/builtin_frames.py create mode 100644 astropy/coordinates/distances.py create mode 100644 astropy/coordinates/earth.py create mode 100644 astropy/coordinates/earth_orientation.py create mode 100644 astropy/coordinates/errors.py create mode 100644 astropy/coordinates/matching.py create mode 100644 astropy/coordinates/name_resolve.py create mode 100644 astropy/coordinates/representation.py create mode 100644 astropy/coordinates/setup_package.py create mode 100644 astropy/coordinates/sky_coordinate.py create mode 100644 astropy/coordinates/tests/__init__.py create mode 100644 astropy/coordinates/tests/accuracy/__init__.py create mode 100644 astropy/coordinates/tests/accuracy/fk4_no_e_fk4.csv create mode 100644 astropy/coordinates/tests/accuracy/fk4_no_e_fk5.csv create mode 100644 astropy/coordinates/tests/accuracy/galactic_fk4.csv create mode 100644 astropy/coordinates/tests/accuracy/generate_ref_ast.py create mode 100644 astropy/coordinates/tests/accuracy/icrs_fk5.csv create mode 100644 astropy/coordinates/tests/accuracy/test_fk4_no_e_fk4.py create mode 100644 astropy/coordinates/tests/accuracy/test_fk4_no_e_fk5.py create mode 100644 astropy/coordinates/tests/accuracy/test_galactic_fk4.py create mode 100644 astropy/coordinates/tests/accuracy/test_icrs_fk5.py create mode 100644 astropy/coordinates/tests/test_angles.py create mode 100644 astropy/coordinates/tests/test_angular_separation.py create mode 100644 astropy/coordinates/tests/test_api_ape5.py create mode 100644 astropy/coordinates/tests/test_arrays.py create mode 100644 astropy/coordinates/tests/test_distance.py create mode 100644 astropy/coordinates/tests/test_earth.py create mode 100644 astropy/coordinates/tests/test_formatting.py create mode 100644 astropy/coordinates/tests/test_frames.py create mode 100644 astropy/coordinates/tests/test_matching.py create mode 100644 astropy/coordinates/tests/test_name_resolve.py create mode 100644 astropy/coordinates/tests/test_pickle.py create mode 100644 astropy/coordinates/tests/test_representation.py create mode 100644 astropy/coordinates/tests/test_sky_coord.py create mode 100644 astropy/coordinates/tests/test_transformations.py create mode 100644 astropy/coordinates/transformations.py create mode 100644 astropy/cosmology/__init__.py create mode 100644 astropy/cosmology/core.py create mode 100644 astropy/cosmology/funcs.py create mode 100644 astropy/cosmology/parameters.py create mode 100644 astropy/cosmology/setup_package.py create mode 100644 astropy/cosmology/tests/__init__.py create mode 100644 astropy/cosmology/tests/test_cosmology.py create mode 100644 astropy/cython_version.py create mode 100644 astropy/extern/__init__.py create mode 100644 astropy/extern/bundled/__init__.py create mode 100644 astropy/extern/bundled/six.py create mode 100644 astropy/extern/configobj.py create mode 100644 astropy/extern/configobj_py2/__init__.py create mode 100644 astropy/extern/configobj_py2/configobj.py create mode 100644 astropy/extern/configobj_py2/validate.py create mode 100644 astropy/extern/configobj_py3/__init__.py create mode 100644 astropy/extern/configobj_py3/configobj.py create mode 100644 astropy/extern/configobj_py3/validate.py create mode 100644 astropy/extern/js/jquery-1.11.0.js create mode 100644 astropy/extern/js/jquery.dataTables.js create mode 100644 astropy/extern/ply/__init__.py create mode 100644 astropy/extern/ply/cpp.py create mode 100644 astropy/extern/ply/ctokens.py create mode 100644 astropy/extern/ply/lex.py create mode 100644 astropy/extern/ply/yacc.py create mode 100644 astropy/extern/pytest.py create mode 100644 astropy/extern/setup_package.py create mode 100644 astropy/extern/six.py create mode 100644 astropy/io/__init__.py create mode 100644 astropy/io/ascii/__init__.py create mode 100644 astropy/io/ascii/basic.py create mode 100644 astropy/io/ascii/cds.py create mode 100644 astropy/io/ascii/connect.py create mode 100644 astropy/io/ascii/core.py create mode 100644 astropy/io/ascii/daophot.py create mode 100644 astropy/io/ascii/fixedwidth.py create mode 100644 astropy/io/ascii/html.py create mode 100644 astropy/io/ascii/ipac.py create mode 100644 astropy/io/ascii/latex.py create mode 100644 astropy/io/ascii/setup_package.py create mode 100644 astropy/io/ascii/sextractor.py create mode 100644 astropy/io/ascii/tests/__init__.py create mode 100644 astropy/io/ascii/tests/common.py create mode 100644 astropy/io/ascii/tests/t/apostrophe.rdb create mode 100644 astropy/io/ascii/tests/t/apostrophe.tab create mode 100644 astropy/io/ascii/tests/t/bad.txt create mode 100644 astropy/io/ascii/tests/t/bars_at_ends.txt create mode 100644 astropy/io/ascii/tests/t/cds.dat create mode 100644 astropy/io/ascii/tests/t/cds/glob/ReadMe create mode 100644 astropy/io/ascii/tests/t/cds/glob/lmxbrefs.dat create mode 100644 astropy/io/ascii/tests/t/cds/multi/ReadMe create mode 100644 astropy/io/ascii/tests/t/cds/multi/lhs2065.dat create mode 100644 astropy/io/ascii/tests/t/cds/multi/lp944-20.dat create mode 100644 astropy/io/ascii/tests/t/cds2.dat create mode 100644 astropy/io/ascii/tests/t/cds_malformed.dat create mode 100644 astropy/io/ascii/tests/t/commented_header.dat create mode 100644 astropy/io/ascii/tests/t/commented_header2.dat create mode 100644 astropy/io/ascii/tests/t/continuation.dat create mode 100644 astropy/io/ascii/tests/t/daophot.dat create mode 100644 astropy/io/ascii/tests/t/daophot.dat.gz create mode 100644 astropy/io/ascii/tests/t/daophot2.dat create mode 100644 astropy/io/ascii/tests/t/daophot3.dat create mode 100644 astropy/io/ascii/tests/t/fill_values.txt create mode 100644 astropy/io/ascii/tests/t/html.html create mode 100644 astropy/io/ascii/tests/t/html2.html create mode 100644 astropy/io/ascii/tests/t/ipac.dat create mode 100644 astropy/io/ascii/tests/t/ipac.dat.bz2 create mode 100644 astropy/io/ascii/tests/t/latex1.tex create mode 100644 astropy/io/ascii/tests/t/latex1.tex.gz create mode 100644 astropy/io/ascii/tests/t/latex2.tex create mode 100644 astropy/io/ascii/tests/t/nls1_stackinfo.dbout create mode 100644 astropy/io/ascii/tests/t/no_data_cds.dat create mode 100644 astropy/io/ascii/tests/t/no_data_daophot.dat create mode 100644 astropy/io/ascii/tests/t/no_data_ipac.dat create mode 100644 astropy/io/ascii/tests/t/no_data_sextractor.dat create mode 100644 astropy/io/ascii/tests/t/no_data_with_header.dat create mode 100644 astropy/io/ascii/tests/t/no_data_without_header.dat create mode 100644 astropy/io/ascii/tests/t/sextractor.dat create mode 100644 astropy/io/ascii/tests/t/sextractor2.dat create mode 100644 astropy/io/ascii/tests/t/short.rdb create mode 100644 astropy/io/ascii/tests/t/short.rdb.bz2 create mode 100644 astropy/io/ascii/tests/t/short.rdb.gz create mode 100644 astropy/io/ascii/tests/t/short.tab create mode 100644 astropy/io/ascii/tests/t/simple.txt create mode 100644 astropy/io/ascii/tests/t/simple2.txt create mode 100644 astropy/io/ascii/tests/t/simple3.txt create mode 100644 astropy/io/ascii/tests/t/simple4.txt create mode 100644 astropy/io/ascii/tests/t/simple5.txt create mode 100644 astropy/io/ascii/tests/t/simple_csv.csv create mode 100644 astropy/io/ascii/tests/t/simple_csv_missing.csv create mode 100644 astropy/io/ascii/tests/t/space_delim_blank_lines.txt create mode 100644 astropy/io/ascii/tests/t/space_delim_no_header.dat create mode 100644 astropy/io/ascii/tests/t/space_delim_no_names.dat create mode 100644 astropy/io/ascii/tests/t/test4.dat create mode 100644 astropy/io/ascii/tests/t/test5.dat create mode 100644 astropy/io/ascii/tests/t/vizier/ReadMe create mode 100644 astropy/io/ascii/tests/t/vizier/table1.dat create mode 100644 astropy/io/ascii/tests/t/vizier/table5.dat create mode 100644 astropy/io/ascii/tests/t/vots_spec.dat create mode 100644 astropy/io/ascii/tests/t/whitespace.dat create mode 100644 astropy/io/ascii/tests/test_cds_header_from_readme.py create mode 100644 astropy/io/ascii/tests/test_compressed.py create mode 100644 astropy/io/ascii/tests/test_connect.py create mode 100644 astropy/io/ascii/tests/test_fixedwidth.py create mode 100644 astropy/io/ascii/tests/test_html.py create mode 100644 astropy/io/ascii/tests/test_ipac_definitions.py create mode 100644 astropy/io/ascii/tests/test_read.py create mode 100644 astropy/io/ascii/tests/test_types.py create mode 100644 astropy/io/ascii/tests/test_write.py create mode 100644 astropy/io/ascii/ui.py create mode 100644 astropy/io/fits/__init__.py create mode 100644 astropy/io/fits/card.py create mode 100644 astropy/io/fits/column.py create mode 100644 astropy/io/fits/connect.py create mode 100644 astropy/io/fits/convenience.py create mode 100644 astropy/io/fits/diff.py create mode 100644 astropy/io/fits/file.py create mode 100644 astropy/io/fits/fitsrec.py create mode 100644 astropy/io/fits/hdu/__init__.py create mode 100644 astropy/io/fits/hdu/base.py create mode 100644 astropy/io/fits/hdu/compressed.py create mode 100644 astropy/io/fits/hdu/groups.py create mode 100644 astropy/io/fits/hdu/hdulist.py create mode 100644 astropy/io/fits/hdu/image.py create mode 100644 astropy/io/fits/hdu/nonstandard.py create mode 100644 astropy/io/fits/hdu/streaming.py create mode 100644 astropy/io/fits/hdu/table.py create mode 100644 astropy/io/fits/header.py create mode 100644 astropy/io/fits/py3compat.py create mode 100644 astropy/io/fits/scripts/__init__.py create mode 100644 astropy/io/fits/scripts/fitscheck.py create mode 100644 astropy/io/fits/scripts/fitsdiff.py create mode 100644 astropy/io/fits/scripts/fitsheader.py create mode 100644 astropy/io/fits/setup_package.py create mode 100644 astropy/io/fits/src/compressionmodule.c create mode 100644 astropy/io/fits/src/compressionmodule.h create mode 100644 astropy/io/fits/tests/__init__.py create mode 100644 astropy/io/fits/tests/cfitsio_verify.c create mode 100644 astropy/io/fits/tests/data/arange.fits create mode 100644 astropy/io/fits/tests/data/ascii.fits create mode 100644 astropy/io/fits/tests/data/blank.fits create mode 100644 astropy/io/fits/tests/data/checksum.fits create mode 100644 astropy/io/fits/tests/data/comp.fits create mode 100644 astropy/io/fits/tests/data/fixed-1890.fits create mode 100644 astropy/io/fits/tests/data/o4sp040b0_raw.fits create mode 100644 astropy/io/fits/tests/data/random_groups.fits create mode 100644 astropy/io/fits/tests/data/scale.fits create mode 100644 astropy/io/fits/tests/data/stddata.fits create mode 100644 astropy/io/fits/tests/data/table.fits create mode 100644 astropy/io/fits/tests/data/tb.fits create mode 100644 astropy/io/fits/tests/data/test0.fits create mode 100644 astropy/io/fits/tests/data/zerowidth.fits create mode 100644 astropy/io/fits/tests/test_checksum.py create mode 100644 astropy/io/fits/tests/test_connect.py create mode 100644 astropy/io/fits/tests/test_core.py create mode 100644 astropy/io/fits/tests/test_diff.py create mode 100644 astropy/io/fits/tests/test_division.py create mode 100644 astropy/io/fits/tests/test_groups.py create mode 100644 astropy/io/fits/tests/test_hdulist.py create mode 100644 astropy/io/fits/tests/test_header.py create mode 100644 astropy/io/fits/tests/test_image.py create mode 100644 astropy/io/fits/tests/test_nonstandard.py create mode 100644 astropy/io/fits/tests/test_structured.py create mode 100644 astropy/io/fits/tests/test_table.py create mode 100644 astropy/io/fits/tests/test_uint.py create mode 100644 astropy/io/fits/tests/test_util.py create mode 100644 astropy/io/fits/tests/util.py create mode 100644 astropy/io/fits/util.py create mode 100644 astropy/io/fits/verify.py create mode 100644 astropy/io/misc/__init__.py create mode 100644 astropy/io/misc/connect.py create mode 100644 astropy/io/misc/hdf5.py create mode 100644 astropy/io/misc/pickle_helpers.py create mode 100644 astropy/io/misc/tests/__init__.py create mode 100644 astropy/io/misc/tests/test_hdf5.py create mode 100644 astropy/io/misc/tests/test_pickle_helpers.py create mode 100644 astropy/io/registry.py create mode 100644 astropy/io/setup_package.py create mode 100644 astropy/io/tests/__init__.py create mode 100644 astropy/io/tests/test_registry.py create mode 100644 astropy/io/votable/__init__.py create mode 100644 astropy/io/votable/connect.py create mode 100644 astropy/io/votable/converters.py create mode 100644 astropy/io/votable/data/VOTable.dtd create mode 100644 astropy/io/votable/data/VOTable.v1.1.xsd create mode 100644 astropy/io/votable/data/VOTable.v1.2.xsd create mode 100644 astropy/io/votable/data/VOTable.v1.3.xsd create mode 100644 astropy/io/votable/data/ucd1p-words.txt create mode 100644 astropy/io/votable/exceptions.py create mode 100755 astropy/io/votable/setup_package.py create mode 100644 astropy/io/votable/src/tablewriter.c create mode 100644 astropy/io/votable/table.py create mode 100644 astropy/io/votable/tests/__init__.py create mode 100644 astropy/io/votable/tests/converter_test.py create mode 100644 astropy/io/votable/tests/data/irsa-nph-error.xml create mode 100644 astropy/io/votable/tests/data/irsa-nph-m31.xml create mode 100644 astropy/io/votable/tests/data/names.xml create mode 100644 astropy/io/votable/tests/data/no_resource.txt create mode 100644 astropy/io/votable/tests/data/no_resource.xml create mode 100644 astropy/io/votable/tests/data/nonstandard_units.xml create mode 100644 astropy/io/votable/tests/data/regression.bin.tabledata.truth.1.1.xml create mode 100644 astropy/io/votable/tests/data/regression.bin.tabledata.truth.1.3.xml create mode 100644 astropy/io/votable/tests/data/regression.xml create mode 100644 astropy/io/votable/tests/data/tb.fits create mode 100644 astropy/io/votable/tests/data/too_many_columns.xml.gz create mode 100644 astropy/io/votable/tests/data/validation.txt create mode 100644 astropy/io/votable/tests/exception_test.py create mode 100644 astropy/io/votable/tests/table_test.py create mode 100644 astropy/io/votable/tests/tree_test.py create mode 100644 astropy/io/votable/tests/ucd_test.py create mode 100644 astropy/io/votable/tests/util_test.py create mode 100644 astropy/io/votable/tests/vo_test.py create mode 100644 astropy/io/votable/tree.py create mode 100644 astropy/io/votable/ucd.py create mode 100644 astropy/io/votable/util.py create mode 100644 astropy/io/votable/validator/__init__.py create mode 100644 astropy/io/votable/validator/html.py create mode 100644 astropy/io/votable/validator/main.py create mode 100644 astropy/io/votable/validator/result.py create mode 100644 astropy/io/votable/validator/urls/cone.big.dat.gz create mode 100644 astropy/io/votable/validator/urls/cone.broken.dat.gz create mode 100644 astropy/io/votable/validator/urls/cone.good.dat.gz create mode 100644 astropy/io/votable/validator/urls/cone.incorrect.dat.gz create mode 100644 astropy/io/votable/voexceptions.py create mode 100644 astropy/io/votable/volint.py create mode 100644 astropy/io/votable/xmlutil.py create mode 100644 astropy/logger.py create mode 100644 astropy/modeling/__init__.py create mode 100644 astropy/modeling/core.py create mode 100644 astropy/modeling/fitting.py create mode 100644 astropy/modeling/functional_models.py create mode 100644 astropy/modeling/models.py create mode 100644 astropy/modeling/optimizers.py create mode 100644 astropy/modeling/parameters.py create mode 100644 astropy/modeling/polynomial.py create mode 100644 astropy/modeling/powerlaws.py create mode 100644 astropy/modeling/projections.py create mode 100644 astropy/modeling/rotations.py create mode 100644 astropy/modeling/setup_package.py create mode 100644 astropy/modeling/statistic.py create mode 100644 astropy/modeling/tests/__init__.py create mode 100644 astropy/modeling/tests/data/1904-66_AZP.fits create mode 100644 astropy/modeling/tests/data/__init__.py create mode 100644 astropy/modeling/tests/data/hst_sip.hdr create mode 100644 astropy/modeling/tests/data/idcompspec.fits create mode 100644 astropy/modeling/tests/data/irac_sip.hdr create mode 100644 astropy/modeling/tests/example_models.py create mode 100644 astropy/modeling/tests/irafutil.py create mode 100644 astropy/modeling/tests/test_constraints.py create mode 100644 astropy/modeling/tests/test_core.py create mode 100644 astropy/modeling/tests/test_fitters.py create mode 100644 astropy/modeling/tests/test_functional_models.py create mode 100644 astropy/modeling/tests/test_input.py create mode 100644 astropy/modeling/tests/test_models.py create mode 100644 astropy/modeling/tests/test_parameters.py create mode 100644 astropy/modeling/tests/test_polynomial.py create mode 100644 astropy/modeling/tests/test_projections.py create mode 100644 astropy/modeling/tests/test_rotations.py create mode 100644 astropy/modeling/tests/utils.py create mode 100644 astropy/modeling/utils.py create mode 100644 astropy/nddata/__init__.py create mode 100644 astropy/nddata/flag_collection.py create mode 100644 astropy/nddata/nddata.py create mode 100644 astropy/nddata/nduncertainty.py create mode 100644 astropy/nddata/setup_package.py create mode 100644 astropy/nddata/tests/__init__.py create mode 100644 astropy/nddata/tests/test_flag_collection.py create mode 100644 astropy/nddata/tests/test_nddata.py create mode 100644 astropy/setup_helpers.py create mode 100644 astropy/setup_package.py create mode 100644 astropy/sphinx/__init__.py create mode 100644 astropy/sphinx/conf.py create mode 100644 astropy/sphinx/ext/__init__.py create mode 100644 astropy/sphinx/ext/astropyautosummary.py create mode 100644 astropy/sphinx/ext/automodapi.py create mode 100644 astropy/sphinx/ext/automodsumm.py create mode 100644 astropy/sphinx/ext/changelog_links.py create mode 100644 astropy/sphinx/ext/comment_eater.py create mode 100644 astropy/sphinx/ext/compiler_unparse.py create mode 100644 astropy/sphinx/ext/docscrape.py create mode 100644 astropy/sphinx/ext/docscrape_sphinx.py create mode 100644 astropy/sphinx/ext/doctest.py create mode 100644 astropy/sphinx/ext/edit_on_github.py create mode 100644 astropy/sphinx/ext/numpydoc.py create mode 100644 astropy/sphinx/ext/phantom_import.py create mode 100644 astropy/sphinx/ext/smart_resolver.py create mode 100644 astropy/sphinx/ext/templates/autosummary_core/base.rst create mode 100644 astropy/sphinx/ext/templates/autosummary_core/class.rst create mode 100644 astropy/sphinx/ext/templates/autosummary_core/module.rst create mode 100644 astropy/sphinx/ext/tests/__init__.py create mode 100644 astropy/sphinx/ext/tests/test_automodapi.py create mode 100644 astropy/sphinx/ext/tests/test_automodsumm.py create mode 100644 astropy/sphinx/ext/tocdepthfix.py create mode 100644 astropy/sphinx/ext/traitsdoc.py create mode 100644 astropy/sphinx/ext/viewcode.py create mode 100644 astropy/sphinx/setup_package.py create mode 100644 astropy/sphinx/themes/bootstrap-astropy/README.md create mode 100644 astropy/sphinx/themes/bootstrap-astropy/globaltoc.html create mode 100644 astropy/sphinx/themes/bootstrap-astropy/layout.html create mode 100644 astropy/sphinx/themes/bootstrap-astropy/localtoc.html create mode 100644 astropy/sphinx/themes/bootstrap-astropy/searchbox.html create mode 100644 astropy/sphinx/themes/bootstrap-astropy/static/astropy_linkout_20.png create mode 100644 astropy/sphinx/themes/bootstrap-astropy/static/astropy_logo.ico create mode 100644 astropy/sphinx/themes/bootstrap-astropy/static/astropy_logo_32.png create mode 100644 astropy/sphinx/themes/bootstrap-astropy/static/bootstrap-astropy.css create mode 100644 astropy/sphinx/themes/bootstrap-astropy/static/sidebar.js create mode 100644 astropy/sphinx/themes/bootstrap-astropy/theme.conf create mode 100644 astropy/stats/__init__.py create mode 100644 astropy/stats/funcs.py create mode 100644 astropy/stats/setup_package.py create mode 100644 astropy/stats/tests/__init__.py create mode 100644 astropy/stats/tests/test_funcs.py create mode 100644 astropy/table/__init__.py create mode 100644 astropy/table/_np_utils.c create mode 100644 astropy/table/_np_utils.pyx create mode 100644 astropy/table/column.py create mode 100644 astropy/table/data/demo_page.css create mode 100644 astropy/table/data/demo_table.css create mode 100644 astropy/table/data/jquery-ui.css create mode 100644 astropy/table/groups.py create mode 100644 astropy/table/jsviewer.py create mode 100644 astropy/table/np_utils.py create mode 100644 astropy/table/operations.py create mode 100644 astropy/table/pprint.py create mode 100644 astropy/table/row.py create mode 100644 astropy/table/setup_package.py create mode 100644 astropy/table/table.py create mode 100644 astropy/table/tests/__init__.py create mode 100644 astropy/table/tests/conftest.py create mode 100644 astropy/table/tests/notebook_repr_html.ipynb create mode 100644 astropy/table/tests/test_column.py create mode 100644 astropy/table/tests/test_groups.py create mode 100644 astropy/table/tests/test_init_table.py create mode 100644 astropy/table/tests/test_item_access.py create mode 100644 astropy/table/tests/test_masked.py create mode 100644 astropy/table/tests/test_np_utils.py create mode 100644 astropy/table/tests/test_operations.py create mode 100644 astropy/table/tests/test_pickle.py create mode 100644 astropy/table/tests/test_pprint.py create mode 100644 astropy/table/tests/test_row.py create mode 100644 astropy/table/tests/test_subclass.py create mode 100644 astropy/table/tests/test_table.py create mode 100644 astropy/tests/__init__.py create mode 100644 astropy/tests/coveragerc create mode 100644 astropy/tests/disable_internet.py create mode 100644 astropy/tests/helper.py create mode 100644 astropy/tests/output_checker.py create mode 100644 astropy/tests/pytest_plugins.py create mode 100644 astropy/tests/setup_package.py create mode 100644 astropy/tests/test_logger.py create mode 100644 astropy/tests/tests/__init__.py create mode 100644 astropy/tests/tests/data/open_file_detection.txt create mode 100644 astropy/tests/tests/run_after_2to3.py create mode 100644 astropy/tests/tests/test_imports.py create mode 100644 astropy/tests/tests/test_open_file_detection.py create mode 100644 astropy/tests/tests/test_run_tests.py create mode 100644 astropy/tests/tests/test_skip_remote_data.py create mode 100644 astropy/tests/tests/test_socketblocker.py create mode 100644 astropy/time/__init__.py create mode 100644 astropy/time/core.py create mode 100644 astropy/time/erfa_time.c create mode 100644 astropy/time/erfa_time.pyx create mode 100644 astropy/time/setup_package.py create mode 100644 astropy/time/tests/__init__.py create mode 100644 astropy/time/tests/test_basic.py create mode 100644 astropy/time/tests/test_comparisons.py create mode 100644 astropy/time/tests/test_delta.py create mode 100644 astropy/time/tests/test_guess.py create mode 100644 astropy/time/tests/test_pickle.py create mode 100644 astropy/time/tests/test_precision.py create mode 100644 astropy/time/tests/test_quantity_interaction.py create mode 100644 astropy/time/tests/test_sidereal.py create mode 100644 astropy/time/tests/test_ut1.py create mode 100644 astropy/units/__init__.py create mode 100644 astropy/units/astrophys.py create mode 100644 astropy/units/cds.py create mode 100644 astropy/units/cgs.py create mode 100644 astropy/units/core.py create mode 100644 astropy/units/equivalencies.py create mode 100644 astropy/units/format/__init__.py create mode 100644 astropy/units/format/base.py create mode 100644 astropy/units/format/cds.py create mode 100644 astropy/units/format/cds_lextab.py create mode 100644 astropy/units/format/cds_parsetab.py create mode 100644 astropy/units/format/console.py create mode 100644 astropy/units/format/fits.py create mode 100644 astropy/units/format/generic.py create mode 100644 astropy/units/format/generic_lextab.py create mode 100644 astropy/units/format/generic_parsetab.py create mode 100644 astropy/units/format/latex.py create mode 100644 astropy/units/format/ogip.py create mode 100644 astropy/units/format/ogip_lextab.py create mode 100644 astropy/units/format/ogip_parsetab.py create mode 100644 astropy/units/format/unicode_format.py create mode 100644 astropy/units/format/utils.py create mode 100644 astropy/units/format/vounit.py create mode 100644 astropy/units/imperial.py create mode 100644 astropy/units/physical.py create mode 100644 astropy/units/quantity.py create mode 100644 astropy/units/quantity_helper.py create mode 100644 astropy/units/setup_package.py create mode 100644 astropy/units/si.py create mode 100644 astropy/units/tests/__init__.py create mode 100644 astropy/units/tests/test_equivalencies.py create mode 100644 astropy/units/tests/test_format.py create mode 100644 astropy/units/tests/test_physical.py create mode 100644 astropy/units/tests/test_quantity.py create mode 100644 astropy/units/tests/test_quantity_array_methods.py create mode 100644 astropy/units/tests/test_quantity_non_ufuncs.py create mode 100644 astropy/units/tests/test_quantity_ufuncs.py create mode 100644 astropy/units/tests/test_units.py create mode 100644 astropy/units/utils.py create mode 100644 astropy/utils/__init__.py create mode 100644 astropy/utils/collections.py create mode 100644 astropy/utils/compat/__init__.py create mode 100644 astropy/utils/compat/_argparse/__init__.py create mode 100644 astropy/utils/compat/_fractions_py2/__init__.py create mode 100644 astropy/utils/compat/_gzip_py2/__init__.py create mode 100644 astropy/utils/compat/_gzip_py3/__init__.py create mode 100644 astropy/utils/compat/_odict_py2/__init__.py create mode 100644 astropy/utils/compat/_subprocess_py2/__init__.py create mode 100644 astropy/utils/compat/argparse.py create mode 100644 astropy/utils/compat/fractions.py create mode 100644 astropy/utils/compat/futures/__init__.py create mode 100644 astropy/utils/compat/futures/_base.py create mode 100644 astropy/utils/compat/futures/process.py create mode 100644 astropy/utils/compat/futures/thread.py create mode 100644 astropy/utils/compat/gzip.py create mode 100644 astropy/utils/compat/misc.py create mode 100644 astropy/utils/compat/numpycompat.py create mode 100644 astropy/utils/compat/odict.py create mode 100644 astropy/utils/compat/subprocess.py create mode 100644 astropy/utils/console.py create mode 100644 astropy/utils/data.py create mode 100644 astropy/utils/exceptions.py create mode 100644 astropy/utils/iers/__init__.py create mode 100644 astropy/utils/iers/data/ReadMe.eopc04_IAU2000 create mode 100644 astropy/utils/iers/data/ReadMe.finals2000A create mode 100644 astropy/utils/iers/data/eopc04_IAU2000.62-now create mode 100644 astropy/utils/iers/iers.py create mode 100644 astropy/utils/iers/tests/__init__.py create mode 100644 astropy/utils/iers/tests/test_iers.py create mode 100644 astropy/utils/metadata.py create mode 100644 astropy/utils/misc.py create mode 100644 astropy/utils/release.py create mode 100644 astropy/utils/setup_package.py create mode 100644 astropy/utils/src/compiler.c create mode 100644 astropy/utils/state.py create mode 100644 astropy/utils/tests/__init__.py create mode 100644 astropy/utils/tests/data/.hidden_file.txt create mode 100644 astropy/utils/tests/data/alias.cfg create mode 100644 astropy/utils/tests/data/invalid.dat.bz2 create mode 100644 astropy/utils/tests/data/invalid.dat.gz create mode 100644 astropy/utils/tests/data/local.dat create mode 100644 astropy/utils/tests/data/local.dat.bz2 create mode 100644 astropy/utils/tests/data/local.dat.gz create mode 100644 astropy/utils/tests/data/test_package/__init__.py create mode 100644 astropy/utils/tests/data/test_package/data/foo.txt create mode 100644 astropy/utils/tests/data/unicode.txt create mode 100644 astropy/utils/tests/odict_mapping.py create mode 100644 astropy/utils/tests/odict_support.py create mode 100644 astropy/utils/tests/test_collections.py create mode 100644 astropy/utils/tests/test_compat.py create mode 100644 astropy/utils/tests/test_console.py create mode 100644 astropy/utils/tests/test_data.py create mode 100644 astropy/utils/tests/test_gzip.py create mode 100644 astropy/utils/tests/test_metadata.py create mode 100644 astropy/utils/tests/test_misc.py create mode 100644 astropy/utils/tests/test_odict.py create mode 100644 astropy/utils/tests/test_state.py create mode 100644 astropy/utils/tests/test_timer.py create mode 100644 astropy/utils/tests/test_xml.py create mode 100644 astropy/utils/timer.py create mode 100644 astropy/utils/xml/__init__.py create mode 100644 astropy/utils/xml/check.py create mode 100644 astropy/utils/xml/iterparser.py create mode 100644 astropy/utils/xml/setup_package.py create mode 100644 astropy/utils/xml/src/expat_config.h create mode 100644 astropy/utils/xml/src/iterparse.c create mode 100644 astropy/utils/xml/src/iterparse.map create mode 100644 astropy/utils/xml/unescaper.py create mode 100644 astropy/utils/xml/validate.py create mode 100644 astropy/utils/xml/writer.py create mode 100644 astropy/version.py create mode 100644 astropy/version_helpers.py create mode 100644 astropy/vo/__init__.py create mode 100644 astropy/vo/client/__init__.py create mode 100644 astropy/vo/client/async.py create mode 100644 astropy/vo/client/conesearch.py create mode 100644 astropy/vo/client/exceptions.py create mode 100755 astropy/vo/client/setup_package.py create mode 100644 astropy/vo/client/tests/__init__.py create mode 100644 astropy/vo/client/tests/data/basic.json create mode 100644 astropy/vo/client/tests/data/conesearch_error1.xml create mode 100644 astropy/vo/client/tests/data/conesearch_error2.xml create mode 100644 astropy/vo/client/tests/data/conesearch_error3.xml create mode 100644 astropy/vo/client/tests/data/conesearch_error4.xml create mode 100644 astropy/vo/client/tests/test_conesearch.py create mode 100644 astropy/vo/client/tests/test_vos_catalog.py create mode 100644 astropy/vo/client/vos_catalog.py create mode 100644 astropy/vo/samp/__init__.py create mode 100644 astropy/vo/samp/client.py create mode 100644 astropy/vo/samp/constants.py create mode 100644 astropy/vo/samp/data/astropy_icon.png create mode 100644 astropy/vo/samp/data/clientaccesspolicy.xml create mode 100644 astropy/vo/samp/data/crossdomain.xml create mode 100644 astropy/vo/samp/errors.py create mode 100644 astropy/vo/samp/hub.py create mode 100644 astropy/vo/samp/hub_proxy.py create mode 100644 astropy/vo/samp/hub_script.py create mode 100644 astropy/vo/samp/integrated_client.py create mode 100644 astropy/vo/samp/lockfile_helpers.py create mode 100644 astropy/vo/samp/setup_package.py create mode 100644 astropy/vo/samp/ssl_utils.py create mode 100644 astropy/vo/samp/standard_profile.py create mode 100644 astropy/vo/samp/tests/__init__.py create mode 100644 astropy/vo/samp/tests/data/README.md create mode 100644 astropy/vo/samp/tests/data/test1.crt create mode 100644 astropy/vo/samp/tests/data/test1.key create mode 100644 astropy/vo/samp/tests/data/test2.crt create mode 100644 astropy/vo/samp/tests/data/test2.key create mode 100644 astropy/vo/samp/tests/test_client.py create mode 100644 astropy/vo/samp/tests/test_errors.py create mode 100644 astropy/vo/samp/tests/test_helpers.py create mode 100644 astropy/vo/samp/tests/test_hub.py create mode 100644 astropy/vo/samp/tests/test_hub_proxy.py create mode 100644 astropy/vo/samp/tests/test_hub_script.py create mode 100644 astropy/vo/samp/tests/test_standard_profile.py create mode 100644 astropy/vo/samp/tests/test_web_profile.py create mode 100644 astropy/vo/samp/tests/web_profile_test_helpers.py create mode 100644 astropy/vo/samp/utils.py create mode 100644 astropy/vo/samp/web_profile.py create mode 100644 astropy/vo/validator/__init__.py create mode 100644 astropy/vo/validator/data/conesearch_urls.txt create mode 100644 astropy/vo/validator/exceptions.py create mode 100644 astropy/vo/validator/inspect.py create mode 100755 astropy/vo/validator/setup_package.py create mode 100644 astropy/vo/validator/tests/__init__.py create mode 100644 astropy/vo/validator/tests/data/conesearch_error.json create mode 100644 astropy/vo/validator/tests/data/conesearch_exception.json create mode 100644 astropy/vo/validator/tests/data/conesearch_good.json create mode 100644 astropy/vo/validator/tests/data/conesearch_good_subset.json create mode 100644 astropy/vo/validator/tests/data/conesearch_warn.json create mode 100644 astropy/vo/validator/tests/data/listcats1.out create mode 100644 astropy/vo/validator/tests/data/listcats2.out create mode 100644 astropy/vo/validator/tests/data/printcat.out create mode 100644 astropy/vo/validator/tests/data/tally.out create mode 100644 astropy/vo/validator/tests/data/vao_conesearch_sites_121107_subset.xml create mode 100644 astropy/vo/validator/tests/test_inpect.py create mode 100644 astropy/vo/validator/tests/test_validate.py create mode 100644 astropy/vo/validator/tstquery.py create mode 100644 astropy/vo/validator/validate.py create mode 100644 astropy/wcs/__init__.py create mode 100644 astropy/wcs/_docutil.py create mode 100644 astropy/wcs/docstrings.py create mode 100644 astropy/wcs/include/astropy_wcs/astropy_wcs.h create mode 100644 astropy/wcs/include/astropy_wcs/astropy_wcs_api.h create mode 100644 astropy/wcs/include/astropy_wcs/distortion.h create mode 100644 astropy/wcs/include/astropy_wcs/distortion_wrap.h create mode 100644 astropy/wcs/include/astropy_wcs/docstrings.h create mode 100644 astropy/wcs/include/astropy_wcs/isnan.h create mode 100644 astropy/wcs/include/astropy_wcs/pipeline.h create mode 100644 astropy/wcs/include/astropy_wcs/pyutil.h create mode 100644 astropy/wcs/include/astropy_wcs/sip.h create mode 100644 astropy/wcs/include/astropy_wcs/sip_wrap.h create mode 100644 astropy/wcs/include/astropy_wcs/str_list_proxy.h create mode 100644 astropy/wcs/include/astropy_wcs/unit_list_proxy.h create mode 100644 astropy/wcs/include/astropy_wcs/util.h create mode 100644 astropy/wcs/include/astropy_wcs/wcsconfig.h create mode 100644 astropy/wcs/include/astropy_wcs/wcslib_tabprm_wrap.h create mode 100644 astropy/wcs/include/astropy_wcs/wcslib_units_wrap.h create mode 100644 astropy/wcs/include/astropy_wcs/wcslib_wrap.h create mode 100644 astropy/wcs/include/astropy_wcs/wcslib_wtbarr_wrap.h create mode 100644 astropy/wcs/include/astropy_wcs_api.h create mode 100644 astropy/wcs/include/wcsconfig.h create mode 100644 astropy/wcs/include/wcslib/cel.h create mode 100644 astropy/wcs/include/wcslib/lin.h create mode 100644 astropy/wcs/include/wcslib/prj.h create mode 100644 astropy/wcs/include/wcslib/spc.h create mode 100644 astropy/wcs/include/wcslib/spx.h create mode 100644 astropy/wcs/include/wcslib/tab.h create mode 100644 astropy/wcs/include/wcslib/wcs.h create mode 100644 astropy/wcs/include/wcslib/wcserr.h create mode 100644 astropy/wcs/include/wcslib/wcsmath.h create mode 100644 astropy/wcs/include/wcslib/wcsprintf.h create mode 100644 astropy/wcs/setup_package.py create mode 100644 astropy/wcs/src/astropy_wcs.c create mode 100644 astropy/wcs/src/astropy_wcs_api.c create mode 100644 astropy/wcs/src/distortion.c create mode 100644 astropy/wcs/src/distortion_wrap.c create mode 100644 astropy/wcs/src/docstrings.c create mode 100644 astropy/wcs/src/pipeline.c create mode 100644 astropy/wcs/src/pyutil.c create mode 100644 astropy/wcs/src/sip.c create mode 100644 astropy/wcs/src/sip_wrap.c create mode 100644 astropy/wcs/src/str_list_proxy.c create mode 100644 astropy/wcs/src/unit_list_proxy.c create mode 100644 astropy/wcs/src/util.c create mode 100644 astropy/wcs/src/wcslib_tabprm_wrap.c create mode 100644 astropy/wcs/src/wcslib_wrap.c create mode 100644 astropy/wcs/src/wcslib_wtbarr_wrap.c create mode 100644 astropy/wcs/tests/__init__.py create mode 100644 astropy/wcs/tests/data/2wcses.hdr create mode 100644 astropy/wcs/tests/data/3d_cd.hdr create mode 100644 astropy/wcs/tests/data/defunct_keywords.hdr create mode 100644 astropy/wcs/tests/data/dist.fits create mode 100644 astropy/wcs/tests/data/header_newlines.fits create mode 100644 astropy/wcs/tests/data/invalid_header.hdr create mode 100644 astropy/wcs/tests/data/irac_sip.hdr create mode 100644 astropy/wcs/tests/data/locale.hdr create mode 100644 astropy/wcs/tests/data/nonstandard_units.hdr create mode 100644 astropy/wcs/tests/data/outside_sky.hdr create mode 100644 astropy/wcs/tests/data/sip.fits create mode 100644 astropy/wcs/tests/data/sip2.fits create mode 100644 astropy/wcs/tests/data/sub-segfault.hdr create mode 100644 astropy/wcs/tests/data/too_many_pv.hdr create mode 100644 astropy/wcs/tests/data/unit.hdr create mode 100644 astropy/wcs/tests/data/validate.fits create mode 100644 astropy/wcs/tests/data/validate.txt create mode 100644 astropy/wcs/tests/data/zpn-hole.hdr create mode 100644 astropy/wcs/tests/extension/__init__.py create mode 100644 astropy/wcs/tests/extension/setup.py create mode 100644 astropy/wcs/tests/extension/test_extension.py create mode 100644 astropy/wcs/tests/extension/wcsapi_test.c create mode 100644 astropy/wcs/tests/maps/1904-66_AIR.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_AIT.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_ARC.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_AZP.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_BON.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_CAR.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_CEA.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_COD.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_COE.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_COO.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_COP.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_CSC.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_CYP.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_HPX.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_MER.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_MOL.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_NCP.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_PAR.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_PCO.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_QSC.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_SFL.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_SIN.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_STG.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_SZP.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_TAN.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_TSC.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_ZEA.hdr create mode 100644 astropy/wcs/tests/maps/1904-66_ZPN.hdr create mode 100644 astropy/wcs/tests/spectra/orion-freq-1.hdr create mode 100644 astropy/wcs/tests/spectra/orion-freq-4.hdr create mode 100644 astropy/wcs/tests/spectra/orion-velo-1.hdr create mode 100644 astropy/wcs/tests/spectra/orion-velo-4.hdr create mode 100644 astropy/wcs/tests/spectra/orion-wave-1.hdr create mode 100644 astropy/wcs/tests/spectra/orion-wave-4.hdr create mode 100644 astropy/wcs/tests/test_pickle.py create mode 100644 astropy/wcs/tests/test_profiling.py create mode 100644 astropy/wcs/tests/test_utils.py create mode 100644 astropy/wcs/tests/test_wcs.py create mode 100644 astropy/wcs/tests/test_wcsprm.py create mode 100644 astropy/wcs/utils.py create mode 100644 astropy/wcs/wcs.py create mode 100644 astropy/wcs/wcslint.py create mode 100644 astropy_helpers/.coveragerc create mode 100644 astropy_helpers/.travis.yml create mode 100644 astropy_helpers/CHANGES.rst create mode 100644 astropy_helpers/CONTRIBUTING.md create mode 100644 astropy_helpers/LICENSE.rst create mode 100644 astropy_helpers/MANIFEST.in create mode 100644 astropy_helpers/README.rst create mode 100644 astropy_helpers/ah_bootstrap.py create mode 100644 astropy_helpers/astropy_helpers.egg-info/PKG-INFO create mode 100644 astropy_helpers/astropy_helpers.egg-info/SOURCES.txt create mode 100644 astropy_helpers/astropy_helpers.egg-info/dependency_links.txt create mode 100644 astropy_helpers/astropy_helpers.egg-info/not-zip-safe create mode 100644 astropy_helpers/astropy_helpers.egg-info/top_level.txt create mode 100644 astropy_helpers/astropy_helpers/__init__.py create mode 100644 astropy_helpers/astropy_helpers/compat/__init__.py create mode 100644 astropy_helpers/astropy_helpers/compat/_subprocess_py2/__init__.py create mode 100644 astropy_helpers/astropy_helpers/compat/subprocess.py create mode 100644 astropy_helpers/astropy_helpers/git_helpers.py create mode 100644 astropy_helpers/astropy_helpers/setup_helpers.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/__init__.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/conf.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/__init__.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/astropyautosummary.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/automodapi.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/automodsumm.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/changelog_links.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/comment_eater.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/compiler_unparse.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/docscrape.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/docscrape_sphinx.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/doctest.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/edit_on_github.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/numpydoc.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/phantom_import.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/smart_resolver.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/templates/autosummary_core/base.rst create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/templates/autosummary_core/class.rst create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/templates/autosummary_core/module.rst create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/tests/__init__.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/tests/test_automodapi.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/tests/test_automodsumm.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/tests/test_docscrape.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/tests/test_utils.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/tocdepthfix.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/traitsdoc.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/utils.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/ext/viewcode.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/local/python3links.inv create mode 100644 astropy_helpers/astropy_helpers/sphinx/local/python3links.txt create mode 100644 astropy_helpers/astropy_helpers/sphinx/setup_package.py create mode 100644 astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/globaltoc.html create mode 100644 astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/layout.html create mode 100644 astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/localtoc.html create mode 100644 astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/searchbox.html create mode 100644 astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout_20.png create mode 100644 astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.ico create mode 100644 astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo_32.png create mode 100644 astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/bootstrap-astropy.css create mode 100644 astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/copybutton.js create mode 100644 astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/static/sidebar.js create mode 100644 astropy_helpers/astropy_helpers/sphinx/themes/bootstrap-astropy/theme.conf create mode 100644 astropy_helpers/astropy_helpers/src/__init__.py create mode 100644 astropy_helpers/astropy_helpers/src/compiler.c create mode 100644 astropy_helpers/astropy_helpers/src/setup_package.py create mode 100644 astropy_helpers/astropy_helpers/test_helpers.py create mode 100644 astropy_helpers/astropy_helpers/tests/__init__.py create mode 100644 astropy_helpers/astropy_helpers/tests/test_ah_bootstrap.py create mode 100644 astropy_helpers/astropy_helpers/tests/test_git_helpers.py create mode 100644 astropy_helpers/astropy_helpers/tests/test_setup_helpers.py create mode 100644 astropy_helpers/astropy_helpers/utils.py create mode 100644 astropy_helpers/astropy_helpers/version.py create mode 100644 astropy_helpers/astropy_helpers/version_helpers.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/__init__.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/compat/__init__.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/compat/_subprocess_py2/__init__.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/compat/subprocess.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/git_helpers.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/setup_helpers.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/__init__.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/conf.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/__init__.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/astropyautosummary.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/automodapi.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/automodsumm.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/changelog_links.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/comment_eater.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/compiler_unparse.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/docscrape.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/docscrape_sphinx.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/doctest.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/edit_on_github.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/numpydoc.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/phantom_import.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/smart_resolver.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/templates/autosummary_core/base.rst create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/templates/autosummary_core/class.rst create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/templates/autosummary_core/module.rst create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/tests/__init__.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/tests/test_automodapi.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/tests/test_automodsumm.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/tests/test_docscrape.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/tests/test_utils.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/tocdepthfix.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/traitsdoc.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/utils.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/ext/viewcode.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/local/python3links.inv create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/setup_package.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/themes/bootstrap-astropy/globaltoc.html create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/themes/bootstrap-astropy/layout.html create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/themes/bootstrap-astropy/localtoc.html create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/themes/bootstrap-astropy/searchbox.html create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_linkout_20.png create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo.ico create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/themes/bootstrap-astropy/static/astropy_logo_32.png create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/themes/bootstrap-astropy/static/bootstrap-astropy.css create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/themes/bootstrap-astropy/static/copybutton.js create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/themes/bootstrap-astropy/static/sidebar.js create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/sphinx/themes/bootstrap-astropy/theme.conf create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/src/__init__.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/src/compiler.c create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/src/setup_package.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/test_helpers.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/utils.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/version.py create mode 100644 astropy_helpers/build/lib.linux-x86_64-2.7/astropy_helpers/version_helpers.py create mode 100644 astropy_helpers/dist/astropy_helpers-0.4.2-py2.7.egg create mode 100644 astropy_helpers/ez_setup.py create mode 100644 astropy_helpers/licenses/LICENSE_COPYBUTTON.rst create mode 100644 astropy_helpers/licenses/LICENSE_NUMPYDOC.rst create mode 100644 astropy_helpers/setup.cfg create mode 100755 astropy_helpers/setup.py create mode 100644 astropy_helpers/tox.ini create mode 100644 cextern/.gitignore create mode 100644 cextern/README.rst create mode 100644 cextern/cfitsio/License.txt create mode 100644 cextern/cfitsio/adler32.c create mode 100644 cextern/cfitsio/buffers.c create mode 100644 cextern/cfitsio/cfileio.c create mode 100644 cextern/cfitsio/changes.txt create mode 100644 cextern/cfitsio/checksum.c create mode 100644 cextern/cfitsio/crc32.c create mode 100644 cextern/cfitsio/crc32.h create mode 100644 cextern/cfitsio/deflate.c create mode 100644 cextern/cfitsio/deflate.h create mode 100644 cextern/cfitsio/drvrfile.c create mode 100644 cextern/cfitsio/drvrgsiftp.c create mode 100644 cextern/cfitsio/drvrgsiftp.h create mode 100644 cextern/cfitsio/drvrmem.c create mode 100644 cextern/cfitsio/drvrnet.c create mode 100644 cextern/cfitsio/drvrsmem.c create mode 100644 cextern/cfitsio/drvrsmem.h create mode 100644 cextern/cfitsio/editcol.c create mode 100644 cextern/cfitsio/edithdu.c create mode 100644 cextern/cfitsio/eval.l create mode 100644 cextern/cfitsio/eval.y create mode 100644 cextern/cfitsio/eval_defs.h create mode 100644 cextern/cfitsio/eval_f.c create mode 100644 cextern/cfitsio/eval_l.c create mode 100644 cextern/cfitsio/eval_tab.h create mode 100644 cextern/cfitsio/eval_y.c create mode 100644 cextern/cfitsio/fits_hcompress.c create mode 100644 cextern/cfitsio/fits_hdecompress.c create mode 100644 cextern/cfitsio/fitscore.c create mode 100644 cextern/cfitsio/fitsio.h create mode 100644 cextern/cfitsio/fitsio2.h create mode 100644 cextern/cfitsio/getcol.c create mode 100644 cextern/cfitsio/getcolb.c create mode 100644 cextern/cfitsio/getcold.c create mode 100644 cextern/cfitsio/getcole.c create mode 100644 cextern/cfitsio/getcoli.c create mode 100644 cextern/cfitsio/getcolj.c create mode 100644 cextern/cfitsio/getcolk.c create mode 100644 cextern/cfitsio/getcoll.c create mode 100644 cextern/cfitsio/getcols.c create mode 100644 cextern/cfitsio/getcolsb.c create mode 100644 cextern/cfitsio/getcolui.c create mode 100644 cextern/cfitsio/getcoluj.c create mode 100644 cextern/cfitsio/getcoluk.c create mode 100644 cextern/cfitsio/getkey.c create mode 100644 cextern/cfitsio/group.c create mode 100644 cextern/cfitsio/group.h create mode 100644 cextern/cfitsio/grparser.c create mode 100644 cextern/cfitsio/grparser.h create mode 100644 cextern/cfitsio/histo.c create mode 100644 cextern/cfitsio/imcompress.c create mode 100644 cextern/cfitsio/infback.c create mode 100644 cextern/cfitsio/inffast.c create mode 100644 cextern/cfitsio/inffast.h create mode 100644 cextern/cfitsio/inffixed.h create mode 100644 cextern/cfitsio/inflate.c create mode 100644 cextern/cfitsio/inflate.h create mode 100644 cextern/cfitsio/inftrees.c create mode 100644 cextern/cfitsio/inftrees.h create mode 100644 cextern/cfitsio/iraffits.c create mode 100644 cextern/cfitsio/longnam.h create mode 100644 cextern/cfitsio/modkey.c create mode 100644 cextern/cfitsio/pliocomp.c create mode 100644 cextern/cfitsio/putcol.c create mode 100644 cextern/cfitsio/putcolb.c create mode 100644 cextern/cfitsio/putcold.c create mode 100644 cextern/cfitsio/putcole.c create mode 100644 cextern/cfitsio/putcoli.c create mode 100644 cextern/cfitsio/putcolj.c create mode 100644 cextern/cfitsio/putcolk.c create mode 100644 cextern/cfitsio/putcoll.c create mode 100644 cextern/cfitsio/putcols.c create mode 100644 cextern/cfitsio/putcolsb.c create mode 100644 cextern/cfitsio/putcolu.c create mode 100644 cextern/cfitsio/putcolui.c create mode 100644 cextern/cfitsio/putcoluj.c create mode 100644 cextern/cfitsio/putcoluk.c create mode 100644 cextern/cfitsio/putkey.c create mode 100644 cextern/cfitsio/quantize.c create mode 100644 cextern/cfitsio/region.c create mode 100644 cextern/cfitsio/region.h create mode 100644 cextern/cfitsio/ricecomp.c create mode 100644 cextern/cfitsio/scalnull.c create mode 100644 cextern/cfitsio/swapproc.c create mode 100644 cextern/cfitsio/trees.c create mode 100644 cextern/cfitsio/trees.h create mode 100644 cextern/cfitsio/uncompr.c create mode 100644 cextern/cfitsio/wcssub.c create mode 100644 cextern/cfitsio/wcsutil.c create mode 100644 cextern/cfitsio/zcompress.c create mode 100644 cextern/cfitsio/zconf.h create mode 100644 cextern/cfitsio/zlib.h create mode 100644 cextern/cfitsio/zuncompress.c create mode 100644 cextern/cfitsio/zutil.c create mode 100644 cextern/cfitsio/zutil.h create mode 100644 cextern/erfa/erfa.c create mode 100644 cextern/erfa/erfa.h create mode 100755 cextern/expat/CMake.README create mode 100755 cextern/expat/CMakeLists.txt create mode 100755 cextern/expat/COPYING create mode 100755 cextern/expat/Changes create mode 100755 cextern/expat/ConfigureChecks.cmake create mode 100755 cextern/expat/MANIFEST create mode 100755 cextern/expat/Makefile.in create mode 100755 cextern/expat/README create mode 100644 cextern/expat/aclocal.m4 create mode 100755 cextern/expat/amiga/Makefile create mode 100755 cextern/expat/amiga/README.txt create mode 100755 cextern/expat/amiga/expat.xml create mode 100644 cextern/expat/amiga/expat_68k.c create mode 100755 cextern/expat/amiga/expat_68k.h create mode 100755 cextern/expat/amiga/expat_68k_handler_stubs.c create mode 100644 cextern/expat/amiga/expat_base.h create mode 100755 cextern/expat/amiga/expat_lib.c create mode 100755 cextern/expat/amiga/expat_vectors.c create mode 100755 cextern/expat/amiga/include/inline4/expat.h create mode 100755 cextern/expat/amiga/include/interfaces/expat.h create mode 100755 cextern/expat/amiga/include/libraries/expat.h create mode 100755 cextern/expat/amiga/include/proto/expat.h create mode 100755 cextern/expat/amiga/launch.c create mode 100755 cextern/expat/amiga/stdlib.c create mode 100755 cextern/expat/bcb5/README.txt create mode 100755 cextern/expat/bcb5/all_projects.bpg create mode 100755 cextern/expat/bcb5/elements.bpf create mode 100755 cextern/expat/bcb5/elements.bpr create mode 100755 cextern/expat/bcb5/elements.mak create mode 100755 cextern/expat/bcb5/expat.bpf create mode 100755 cextern/expat/bcb5/expat.bpr create mode 100755 cextern/expat/bcb5/expat.mak create mode 100755 cextern/expat/bcb5/expat_static.bpf create mode 100755 cextern/expat/bcb5/expat_static.bpr create mode 100755 cextern/expat/bcb5/expat_static.mak create mode 100755 cextern/expat/bcb5/expatw.bpf create mode 100755 cextern/expat/bcb5/expatw.bpr create mode 100755 cextern/expat/bcb5/expatw.mak create mode 100755 cextern/expat/bcb5/expatw_static.bpf create mode 100755 cextern/expat/bcb5/expatw_static.bpr create mode 100755 cextern/expat/bcb5/expatw_static.mak create mode 100755 cextern/expat/bcb5/libexpat_mtd.def create mode 100755 cextern/expat/bcb5/libexpatw_mtd.def create mode 100755 cextern/expat/bcb5/makefile.mak create mode 100755 cextern/expat/bcb5/outline.bpf create mode 100755 cextern/expat/bcb5/outline.bpr create mode 100755 cextern/expat/bcb5/outline.mak create mode 100755 cextern/expat/bcb5/setup.bat create mode 100755 cextern/expat/bcb5/xmlwf.bpf create mode 100755 cextern/expat/bcb5/xmlwf.bpr create mode 100755 cextern/expat/bcb5/xmlwf.mak create mode 100755 cextern/expat/configure create mode 100755 cextern/expat/configure.in create mode 100755 cextern/expat/conftools/PrintPath create mode 100755 cextern/expat/conftools/ac_c_bigendian_cross.m4 create mode 100755 cextern/expat/conftools/expat.m4 create mode 100755 cextern/expat/conftools/get-version.sh create mode 100755 cextern/expat/conftools/install-sh create mode 100755 cextern/expat/conftools/ltmain.sh create mode 100755 cextern/expat/doc/expat.png create mode 100755 cextern/expat/doc/reference.html create mode 100755 cextern/expat/doc/style.css create mode 100755 cextern/expat/doc/valid-xhtml10.png create mode 100755 cextern/expat/doc/xmlwf.1 create mode 100755 cextern/expat/doc/xmlwf.sgml create mode 100755 cextern/expat/examples/elements.c create mode 100755 cextern/expat/examples/elements.dsp create mode 100755 cextern/expat/examples/outline.c create mode 100755 cextern/expat/examples/outline.dsp create mode 100755 cextern/expat/expat.dsw create mode 100644 cextern/expat/expat.pc.in create mode 100755 cextern/expat/expat_config.h.cmake create mode 100755 cextern/expat/expat_config.h.in create mode 100755 cextern/expat/lib/Makefile.MPW create mode 100755 cextern/expat/lib/amigaconfig.h create mode 100755 cextern/expat/lib/ascii.h create mode 100755 cextern/expat/lib/asciitab.h create mode 100755 cextern/expat/lib/expat.dsp create mode 100755 cextern/expat/lib/expat.h create mode 100755 cextern/expat/lib/expat_external.h create mode 100755 cextern/expat/lib/expat_static.dsp create mode 100755 cextern/expat/lib/expatw.dsp create mode 100755 cextern/expat/lib/expatw_static.dsp create mode 100755 cextern/expat/lib/iasciitab.h create mode 100755 cextern/expat/lib/internal.h create mode 100755 cextern/expat/lib/latin1tab.h create mode 100755 cextern/expat/lib/libexpat.def create mode 100755 cextern/expat/lib/libexpatw.def create mode 100755 cextern/expat/lib/macconfig.h create mode 100755 cextern/expat/lib/nametab.h create mode 100755 cextern/expat/lib/utf8tab.h create mode 100755 cextern/expat/lib/winconfig.h create mode 100755 cextern/expat/lib/xmlparse.c create mode 100755 cextern/expat/lib/xmlrole.c create mode 100755 cextern/expat/lib/xmlrole.h create mode 100755 cextern/expat/lib/xmltok.c create mode 100755 cextern/expat/lib/xmltok.h create mode 100755 cextern/expat/lib/xmltok_impl.c create mode 100755 cextern/expat/lib/xmltok_impl.h create mode 100755 cextern/expat/lib/xmltok_ns.c create mode 100644 cextern/expat/m4/libtool.m4 create mode 100644 cextern/expat/m4/ltoptions.m4 create mode 100644 cextern/expat/m4/ltsugar.m4 create mode 100644 cextern/expat/m4/ltversion.m4 create mode 100644 cextern/expat/m4/lt~obsolete.m4 create mode 100755 cextern/expat/tests/README.txt create mode 100755 cextern/expat/tests/benchmark/README.txt create mode 100755 cextern/expat/tests/benchmark/benchmark.c create mode 100755 cextern/expat/tests/benchmark/benchmark.dsp create mode 100755 cextern/expat/tests/benchmark/benchmark.dsw create mode 100755 cextern/expat/tests/chardata.c create mode 100755 cextern/expat/tests/chardata.h create mode 100755 cextern/expat/tests/minicheck.c create mode 100755 cextern/expat/tests/minicheck.h create mode 100755 cextern/expat/tests/runtests.c create mode 100755 cextern/expat/tests/runtestspp.cpp create mode 100755 cextern/expat/tests/xmltest.sh create mode 100755 cextern/expat/vms/README.vms create mode 100755 cextern/expat/vms/descrip.mms create mode 100755 cextern/expat/vms/expat_config.h create mode 100755 cextern/expat/win32/MANIFEST.txt create mode 100755 cextern/expat/win32/README.txt create mode 100755 cextern/expat/win32/expat.iss create mode 100755 cextern/expat/xmlwf/codepage.c create mode 100755 cextern/expat/xmlwf/codepage.h create mode 100755 cextern/expat/xmlwf/ct.c create mode 100755 cextern/expat/xmlwf/filemap.h create mode 100755 cextern/expat/xmlwf/readfilemap.c create mode 100755 cextern/expat/xmlwf/unixfilemap.c create mode 100755 cextern/expat/xmlwf/win32filemap.c create mode 100755 cextern/expat/xmlwf/xmlfile.c create mode 100755 cextern/expat/xmlwf/xmlfile.h create mode 100755 cextern/expat/xmlwf/xmlmime.c create mode 100755 cextern/expat/xmlwf/xmlmime.h create mode 100755 cextern/expat/xmlwf/xmltchar.h create mode 100755 cextern/expat/xmlwf/xmlurl.h create mode 100755 cextern/expat/xmlwf/xmlwf.c create mode 100755 cextern/expat/xmlwf/xmlwf.dsp create mode 100755 cextern/expat/xmlwf/xmlwin32url.cxx create mode 100755 cextern/trim_wcslib.sh create mode 100644 cextern/wcslib/C/GNUmakefile create mode 100644 cextern/wcslib/C/cel.c create mode 100644 cextern/wcslib/C/cel.h create mode 100644 cextern/wcslib/C/fitshdr.h create mode 100644 cextern/wcslib/C/fitshdr.l create mode 100644 cextern/wcslib/C/flexed/README create mode 100644 cextern/wcslib/C/flexed/fitshdr.c create mode 100644 cextern/wcslib/C/flexed/wcsbth.c create mode 100644 cextern/wcslib/C/flexed/wcspih.c create mode 100644 cextern/wcslib/C/flexed/wcsulex.c create mode 100644 cextern/wcslib/C/flexed/wcsutrn.c create mode 100644 cextern/wcslib/C/getwcstab.c create mode 100644 cextern/wcslib/C/getwcstab.h create mode 100644 cextern/wcslib/C/lin.c create mode 100644 cextern/wcslib/C/lin.h create mode 100644 cextern/wcslib/C/log.c create mode 100644 cextern/wcslib/C/log.h create mode 100644 cextern/wcslib/C/makedefs.in create mode 100644 cextern/wcslib/C/prj.c create mode 100644 cextern/wcslib/C/prj.h create mode 100644 cextern/wcslib/C/spc.c create mode 100644 cextern/wcslib/C/spc.h create mode 100644 cextern/wcslib/C/sph.c create mode 100644 cextern/wcslib/C/sph.h create mode 100644 cextern/wcslib/C/spx.c create mode 100644 cextern/wcslib/C/spx.h create mode 100644 cextern/wcslib/C/tab.c create mode 100644 cextern/wcslib/C/tab.h create mode 100644 cextern/wcslib/C/wcs.c create mode 100644 cextern/wcslib/C/wcs.h create mode 100644 cextern/wcslib/C/wcsbth.l create mode 100644 cextern/wcslib/C/wcsconfig.h.in create mode 100644 cextern/wcslib/C/wcsconfig_tests.h.in create mode 100644 cextern/wcslib/C/wcserr.c create mode 100644 cextern/wcslib/C/wcserr.h create mode 100644 cextern/wcslib/C/wcsfix.c create mode 100644 cextern/wcslib/C/wcsfix.h create mode 100644 cextern/wcslib/C/wcshdr.c create mode 100644 cextern/wcslib/C/wcshdr.h create mode 100644 cextern/wcslib/C/wcslib.h create mode 100644 cextern/wcslib/C/wcsmath.h create mode 100644 cextern/wcslib/C/wcspih.l create mode 100644 cextern/wcslib/C/wcsprintf.c create mode 100644 cextern/wcslib/C/wcsprintf.h create mode 100644 cextern/wcslib/C/wcstrig.c create mode 100644 cextern/wcslib/C/wcstrig.h create mode 100644 cextern/wcslib/C/wcsulex.l create mode 100644 cextern/wcslib/C/wcsunits.c create mode 100644 cextern/wcslib/C/wcsunits.h create mode 100644 cextern/wcslib/C/wcsutil.c create mode 100644 cextern/wcslib/C/wcsutil.h create mode 100644 cextern/wcslib/C/wcsutrn.l create mode 100644 cextern/wcslib/CHANGES create mode 100644 cextern/wcslib/COPYING create mode 100644 cextern/wcslib/COPYING.LESSER create mode 100644 cextern/wcslib/GNUmakefile create mode 100644 cextern/wcslib/INSTALL create mode 100644 cextern/wcslib/README create mode 100644 cextern/wcslib/THANKS create mode 100644 cextern/wcslib/VALIDATION create mode 100755 cextern/wcslib/config/config.guess create mode 100755 cextern/wcslib/config/config.sub create mode 100755 cextern/wcslib/config/elisp-comp create mode 100755 cextern/wcslib/config/install-sh create mode 100755 cextern/wcslib/config/mdate-sh create mode 100755 cextern/wcslib/config/missing create mode 100755 cextern/wcslib/config/mkinstalldirs create mode 100755 cextern/wcslib/config/move-if-change create mode 100755 cextern/wcslib/configure create mode 100644 cextern/wcslib/configure.ac create mode 100644 cextern/wcslib/flavours create mode 100644 cextern/wcslib/makedefs.in create mode 100644 cextern/wcslib/wcsconfig.h.in create mode 100644 cextern/wcslib/wcsconfig_f77.h.in create mode 100644 cextern/wcslib/wcsconfig_tests.h.in create mode 100644 cextern/wcslib/wcsconfig_utils.h.in create mode 100644 cextern/wcslib/wcslib.pc.in create mode 100644 docs/Makefile create mode 100644 docs/_pkgtemplate.rst create mode 100644 docs/_static/astropy_logo.pdf create mode 100644 docs/_static/dev.png create mode 100644 docs/_static/mature.png create mode 100644 docs/_static/planned.png create mode 100644 docs/_static/stable.png create mode 100644 docs/_static/timer_prediction_pow10.png create mode 100644 docs/_templates/autosummary/base.rst create mode 100644 docs/_templates/autosummary/class.rst create mode 100644 docs/_templates/autosummary/module.rst create mode 100644 docs/astropy_banner_96.png create mode 100644 docs/changelog.rst create mode 100644 docs/conf.py create mode 100644 docs/config/config_0_4_transition.rst create mode 100644 docs/config/index.rst create mode 100644 docs/constants/index.rst create mode 100644 docs/convolution/images/astropy.png create mode 100644 docs/convolution/images/original.png create mode 100644 docs/convolution/images/scipy.png create mode 100644 docs/convolution/index.rst create mode 100644 docs/convolution/kernels.rst create mode 100644 docs/convolution/using.rst create mode 100644 docs/coordinates/angles.rst create mode 100644 docs/coordinates/definitions.rst create mode 100644 docs/coordinates/formatting.rst create mode 100644 docs/coordinates/frames.rst create mode 100644 docs/coordinates/index.rst create mode 100644 docs/coordinates/matchsep.rst create mode 100644 docs/coordinates/references.txt create mode 100644 docs/coordinates/representations.rst create mode 100644 docs/coordinates/sgr-example.py create mode 100644 docs/coordinates/sgr-example.rst create mode 100644 docs/coordinates/skycoord.rst create mode 100644 docs/coordinates/transforming.rst create mode 100644 docs/cosmology/index.rst create mode 100644 docs/credits.rst create mode 100644 docs/development/affiliated-packages.rst create mode 100644 docs/development/building.rst create mode 100644 docs/development/ccython.rst create mode 100644 docs/development/codeguide.rst create mode 100644 docs/development/codeguide_emacs.rst create mode 100644 docs/development/docguide.rst create mode 100644 docs/development/docrules.rst create mode 100644 docs/development/releasing.rst create mode 100644 docs/development/scripts.rst create mode 100644 docs/development/testguide.rst create mode 100644 docs/development/vision.rst create mode 100644 docs/development/workflow/additional_git_topics.rst create mode 100644 docs/development/workflow/branch_dropdown.png create mode 100644 docs/development/workflow/command_history.rst create mode 100644 docs/development/workflow/command_history.sh create mode 100644 docs/development/workflow/command_history_with_output.sh create mode 100644 docs/development/workflow/development_workflow.rst create mode 100644 docs/development/workflow/forking_button.png create mode 100644 docs/development/workflow/get_devel_version.rst create mode 100644 docs/development/workflow/git_edit_workflow_examples.rst create mode 100644 docs/development/workflow/git_install.rst create mode 100644 docs/development/workflow/git_links.inc create mode 100644 docs/development/workflow/git_resources.rst create mode 100644 docs/development/workflow/known_projects.inc create mode 100644 docs/development/workflow/links.inc create mode 100644 docs/development/workflow/maintainer_workflow.rst create mode 100644 docs/development/workflow/milestone.png create mode 100644 docs/development/workflow/patches.rst create mode 100644 docs/development/workflow/pull_button.png create mode 100644 docs/development/workflow/terminal_cast.rst create mode 100644 docs/development/workflow/this_project.inc create mode 100644 docs/development/workflow/virtual_pythons.rst create mode 100644 docs/development/workflow/virtualenv_detail.rst create mode 100644 docs/development/workflow/worked_example_switch_branch.png create mode 100644 docs/getting_started.rst create mode 100644 docs/index.rst create mode 100644 docs/install.rst create mode 100644 docs/io/ascii/base_classes.rst create mode 100644 docs/io/ascii/extension_classes.rst create mode 100644 docs/io/ascii/fixed_width_gallery.rst create mode 100644 docs/io/ascii/index.rst create mode 100644 docs/io/ascii/read.rst create mode 100644 docs/io/ascii/references.txt create mode 100644 docs/io/ascii/toc.txt create mode 100644 docs/io/ascii/write.rst create mode 100644 docs/io/fits/api/cards.rst create mode 100644 docs/io/fits/api/diff.rst create mode 100644 docs/io/fits/api/files.rst create mode 100644 docs/io/fits/api/hdulists.rst create mode 100644 docs/io/fits/api/hdus.rst create mode 100644 docs/io/fits/api/headers.rst create mode 100644 docs/io/fits/api/images.rst create mode 100644 docs/io/fits/api/tables.rst create mode 100644 docs/io/fits/api/verification.rst create mode 100644 docs/io/fits/appendix/faq.rst create mode 100644 docs/io/fits/appendix/header_transition.rst create mode 100644 docs/io/fits/appendix/history.rst create mode 100644 docs/io/fits/images/Blue.jpg create mode 100644 docs/io/fits/images/Green.jpg create mode 100644 docs/io/fits/images/Hs-2009-14-a-web.jpg create mode 100644 docs/io/fits/images/Red.jpg create mode 100644 docs/io/fits/index.rst create mode 100644 docs/io/fits/usage/examples.rst create mode 100644 docs/io/fits/usage/headers.rst create mode 100644 docs/io/fits/usage/image.rst create mode 100644 docs/io/fits/usage/misc.rst create mode 100644 docs/io/fits/usage/scripts.rst create mode 100644 docs/io/fits/usage/table.rst create mode 100644 docs/io/fits/usage/unfamiliar.rst create mode 100644 docs/io/fits/usage/verification.rst create mode 100644 docs/io/misc.rst create mode 100644 docs/io/registry.rst create mode 100644 docs/io/unified.rst create mode 100644 docs/io/votable/.gitignore create mode 100644 docs/io/votable/api_exceptions.rst create mode 100644 docs/io/votable/index.rst create mode 100644 docs/io/votable/references.txt create mode 100644 docs/known_issues.rst create mode 100644 docs/license.rst create mode 100644 docs/logging.rst create mode 100644 docs/make.bat create mode 100644 docs/modeling/algorithms.rst create mode 100644 docs/modeling/design.rst create mode 100644 docs/modeling/fitting.rst create mode 100644 docs/modeling/index.rst create mode 100644 docs/modeling/links.inc create mode 100644 docs/modeling/models.rst create mode 100644 docs/modeling/new.rst create mode 100644 docs/modeling/parameters.rst create mode 100644 docs/nddata/index.rst create mode 100644 docs/nddata/nddata.rst create mode 100644 docs/nddata/subclassing.rst create mode 100644 docs/nitpick-exceptions create mode 100644 docs/overview.rst create mode 100644 docs/rtd-pip-requirements create mode 100644 docs/stability.rst create mode 100644 docs/stats/index.rst create mode 100644 docs/table/access_table.rst create mode 100644 docs/table/construct_table.rst create mode 100644 docs/table/index.rst create mode 100644 docs/table/io.rst create mode 100644 docs/table/masking.rst create mode 100644 docs/table/modify_table.rst create mode 100644 docs/table/operations.rst create mode 100644 docs/table/references.txt create mode 100644 docs/table/table_repr_html.png create mode 100644 docs/time/index.rst create mode 100644 docs/time/references.txt create mode 100644 docs/time/time_scale_conversion.odg create mode 100644 docs/time/time_scale_conversion.png create mode 100644 docs/units/combining_and_defining.rst create mode 100644 docs/units/conversion.rst create mode 100644 docs/units/decomposing_and_composing.rst create mode 100644 docs/units/equivalencies.rst create mode 100644 docs/units/format.rst create mode 100644 docs/units/index.rst create mode 100644 docs/units/quantity.rst create mode 100644 docs/units/standard_units.rst create mode 100644 docs/utils/index.rst create mode 100644 docs/vo/conesearch/client.rst create mode 100644 docs/vo/conesearch/images/astropy_vo_flowchart.png create mode 100644 docs/vo/conesearch/images/client_predict_search_n.png create mode 100644 docs/vo/conesearch/images/client_predict_search_t.png create mode 100644 docs/vo/conesearch/images/validator_html_1.png create mode 100644 docs/vo/conesearch/images/validator_html_2.png create mode 100644 docs/vo/conesearch/images/validator_html_3.png create mode 100644 docs/vo/conesearch/images/validator_html_4.png create mode 100644 docs/vo/conesearch/index.rst create mode 100644 docs/vo/conesearch/validator.rst create mode 100644 docs/vo/index.rst create mode 100644 docs/vo/samp/advanced_embed_samp_hub.rst create mode 100644 docs/vo/samp/example_clients.rst create mode 100644 docs/vo/samp/example_hub.rst create mode 100644 docs/vo/samp/example_table_image.rst create mode 100644 docs/vo/samp/index.rst create mode 100644 docs/vo/samp/references.txt create mode 100644 docs/warnings.rst create mode 100644 docs/wcs/examples/from_file.py create mode 100644 docs/wcs/examples/programmatic.py create mode 100644 docs/wcs/history.rst create mode 100644 docs/wcs/index.rst create mode 100644 docs/wcs/references.txt create mode 100644 docs/wcs/relax.rst create mode 100644 docs/whatsnew/0.1.rst create mode 100644 docs/whatsnew/0.2.rst create mode 100644 docs/whatsnew/0.3.rst create mode 100644 docs/whatsnew/0.4.rst create mode 100644 docs/whatsnew/index.rst create mode 100644 ez_setup.py create mode 100644 licenses/CONFIGOBJ_LICENSE.rst create mode 100644 licenses/DATATABLES_LICENSE.rst create mode 100644 licenses/ERFA.rst create mode 100644 licenses/EXPAT_LICENSE.rst create mode 100644 licenses/FUTURES_LICENSE.rst create mode 100644 licenses/JQUERY_LICENSE.rst create mode 100644 licenses/LICENSE.rst create mode 100644 licenses/PLY_LICENSE.rst create mode 100644 licenses/PYFITS.rst create mode 100644 licenses/PYTEST_LICENSE.rst create mode 100644 licenses/README.rst create mode 100644 licenses/SIX_LICENSE.rst create mode 100644 licenses/SPHINXEXT_LICENSES.rst create mode 100644 licenses/SYMPY.rst create mode 100644 licenses/WCSLIB_LICENSE.rst create mode 100644 scripts/README.rst create mode 100755 scripts/fitscheck create mode 100755 scripts/fitsdiff create mode 100755 scripts/fitsheader create mode 100755 scripts/samp_hub create mode 100755 scripts/volint create mode 100755 scripts/wcslint create mode 100644 setup.cfg create mode 100755 setup.py create mode 100644 static/wininst_background.bmp diff --git a/CHANGES.rst b/CHANGES.rst new file mode 100644 index 0000000..4cdbc74 --- /dev/null +++ b/CHANGES.rst @@ -0,0 +1,2947 @@ +0.4.2 (2014-09-23) +------------------ + +Bug Fixes +^^^^^^^^^ + +- ``astropy.coordinates`` + + - ``Angle`` accepts hours:mins or deg:mins initializers (without + seconds). In these cases float minutes are also accepted. + + - The ``repr`` for coordinate frames now displayes the frame attributes + (ex: ra, dec) in a consistent order. It should be noted that as part of + this fix, the ``BaseCoordinateFrame.get_frame_attr_names()`` method now + returns an ``OrderedDict`` instead of just a ``dict``. [#2845] + +- ``astropy.io.fits`` + + - Fixed a crash when reading scaled float data out of a FITS file that was + loaded from a string (using ``HDUList.fromfile``) rather than from a file. + [#2710] + + - Fixed a crash when reading data from an HDU whose header contained in + invalid value for the BLANK keyword (eg. a string value instead of an + integer as required by the FITS Standard). Invalid BLANK keywords are now + warned about, but are otherwise ignored. [#2711] + + - Fixed a crash when reading the header of a tile-compressed HDU if that + header contained invalid duplicate keywords resulting in a ``KeyError`` + [#2750] + + - Fixed crash when reading gzip-compressed FITS tables through the Astropy + ``Table`` interface. [#2783] + + - Fixed corruption when writing new FITS files through to gzipped files. + [#2794] + + - Fixed crash when writing HDUs made with non-contiguous data arrays to + file-like objects. [#2794] + + - It is now possible to create ``astropy.io.fits.BinTableHDU`` + objects with a table with zero rows. [#2916] + +- ``astropy.io.misc`` + + - Fixed a bug that prevented h5py ``Dataset`` objects from being + automatically recognized by ``Table.read``. [#2831] + +- ``astropy.modeling`` + + - Make ``LevMarLSQFitter`` work with ``weights`` keyword. [#2900] + +- ``astropy.table`` + + - Fixed reference cycle in tables that could prevent ``Table`` objects + from being freed from memory. [#2879] + + - Fixed an issue where ``Table.pprint()`` did not print the header to + ``stdout`` when ``stdout`` is redirected (say, to a file). [#2878] + + - Fixed printing of masked values when a format is specified. [#1026] + + - Ensured that numpy ufuncs that return booleans return plain ``ndarray`` + instances, just like the comparison operators. [#2963] + +- ``astropy.time`` + + - Ensure bigendian input to Time works on a little-endian machine + (and vice versa). [#2942] + +- ``astropy.units`` + + - Ensure unit is kept when adding 0 to quantities. [#2968] + +- ``astropy.utils`` + + - Fixed color printing on Windows with IPython 2.0. [#2878] + +- ``astropy.vo`` + + - Improved error message on Cone Search time out. [#2687] + +- ``astropy.wcs`` + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Fixed a couple issues with files being inappropriately included and/or + excluded from the source archive distributions of Astropy. [#2843, #2854] + +- As part of fixing the fact that masked elements of table columns could not be + printed when a format was specified, the column format string options were + expanded to allow simple specifiers such as ``'5.2f'``. [#2898] + +- Ensure numpy 1.9 is supported. [#2917] + +- Ensure numpy master is supported, by making ``np.cbrt`` work with quantities. + [#2937] + + +0.4.1 (2014-08-08) +------------------ + +Bug Fixes +^^^^^^^^^ + +- ``astropy.config`` + + - Fixed a bug where an unedited configuration file from astropy + 0.3.2 would not be correctly identified as unedited. [#2772] This + resulted in the warning:: + + WARNING: ConfigurationChangedWarning: The configuration options + in astropy 0.4 may have changed, your configuration file was not + updated in order to preserve local changes. A new configuration + template has been saved to + '~/.astropy/config/astropy.0.4.cfg'. [astropy.config.configuration] + + - Fixed the error message that is displayed when an old + configuration item has moved. Before, the destination + section was wrong. [#2772] + + - Added configuration settings for ``io.fits``, ``io.votable`` and + ``table.jsviewer`` that were missing from the configuration file + template. [#2772] + + - The configuration template is no longer rewritten on every import + of astropy, causing race conditions. [#2805] + +- ``astropy.convolution`` + + - Fixed the multiplication of ``Kernel`` with numpy floats. [#2174] + +- ``astropy.coordinates`` + + - ``Distance`` can now take a list of quantities. [#2261] + + - For in-place operations for ``Angle`` instances in which the result unit + is not an angle, an exception is raised before the instance is corrupted. + [#2718] + + - ``CartesianPoints`` are now deprecated in favor of + ``CartesianRepresentation``. [#2727] + +- ``astropy.io.misc`` + + - An existing table within an HDF5 file can be overwritten without affecting + other datasets in the same HDF5 file by simultaneously using + ``overwrite=True`` and ``append=True`` arguments to the ``Table.write`` + method. [#2624] + +- ``astropy.logger`` + + - Fixed a crash that could occur in rare cases when (such as in bundled + apps) where submodules of the ``email`` package are not importable. [#2671] + +- ``astropy.nddata`` + + - ``astropy.nddata.NDData()`` no longer raises a ``ValueError`` when passed + a numpy masked array which has no masked entries. [#2784] + +- ``astropy.table`` + + - When saving a table to a FITS file containing a unit that is not + supported by the FITS standard, a warning rather than an exception + is raised. [#2797] + +- ``astropy.units`` + + - By default, ``Quantity`` and its subclasses will now convert to float also + numerical types such as ``decimal.Decimal``, which are stored as objects + by numpy. [#1419] + + - The units ``count``, ``pixel``, ``voxel`` and ``dbyte`` now output + to FITS, OGIP and VOUnit formats correctly. [#2798] + +- ``astropy.utils`` + + - Restored missing information from deprecation warning messages + from the ``deprecated`` decorator. [#2811] + + - Fixed support for ``staticmethod`` deprecation in the ``deprecated`` + decorator. [#2811] + +- ``astropy.wcs`` + + - Fixed a memory leak when ``astropy.wcs.WCS`` objects are copied + [#2754] + + - Fixed a crash when passing ``ra_dec_order=True`` to any of the + ``*2world`` methods. [#2791] + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Bundled copy of astropy-helpers upgraded to v0.4.1. [#2825] + +- General improvements to documentation and docstrings [#2722, #2728, #2742] + +- Made it easier for third-party packagers to have Astropy use their own + version of the ``six`` module (so long as it meets the minimum version + requirement) and remove the copy bundled with Astropy. See the + astropy/extern/README file in the source tree. [#2623] + + +0.4 (2014-07-16) +---------------- + +New Features +^^^^^^^^^^^^ + +- ``astropy.constants`` + + - Added ``b_wien`` to represent Wien wavelength displacement law constant. + [#2194] + +- ``astropy.convolution`` + + - Changed the input parameter in ``Gaussian1DKernel`` and + ``Gaussian2DKernel`` from ``width`` to ``stddev`` [#2085]. + +- ``astropy.coordinates`` + + - The coordinates package has undergone major changes to implement + `APE5 `_ . + These include backwards-incompatible changes, as the underlying framework + has changed substantially. See the APE5 text and the package documentation + for more details. [#2422] + + - A ``position_angle`` method has been added to the new ``SkyCoord``. [#2487] + + - Updated ``Angle.dms`` and ``Angle.hms`` to return ``namedtuple`` -s instead + of regular tuples, and added ``Angle.signed_dms`` attribute that gives the + absolute value of the ``d``, ``m``, and ``s`` along with the sign. [#1988] + + - By default, ``Distance`` objects are now required to be positive. To + allow negative values, set ``allow_negative=True`` in the ``Distance`` + constructor when creating a ``Distance`` instance. + + - ``Longitude`` (resp. ``Latitude``) objects cannot be used any more to + initialize or set ``Latitude`` (resp. ``Longitude``) objects. An explicit + conversion to ``Angle`` is now required. [#2461] + + - The deprecated functions for pre-0.3 coordinate object names like + ``ICRSCoordinates`` have been removed. [#2422] + + - The ``rotation_matrix`` and ``angle_axis`` functions in + ``astropy.coordinates.angles`` were made more numerically consistent and + are now tested explicitly [#2619] + +- ``astropy.cosmology`` + + - Added ``z_at_value`` function to find the redshift at which a cosmology + function matches a desired value. [#1909] + + - Added ``FLRW.differential_comoving_volume`` method to give the differential + comoving volume at redshift z. [#2103] + + - The functional interface is now deprecated in favor of the more-explicit + use of methods on cosmology objects. [#2343] + + - Updated documentation to reflect the removal of the functional + interface. [#2507] + +- ``astropy.io.ascii`` + + - The ``astropy.io.ascii`` output formats ``latex`` and ``aastex`` accept a + dictionary called ``latex_dict`` to specify options for LaTeX output. It is + now possible to specify the table alignment within the text via the + ``tablealign`` keyword. [#1838] + + - If ``header_start`` is specified in a call to ``ascii.get_reader`` or any + method that calls ``get_reader`` (e.g. ``ascii.read``) but ``data_start`` + is not specified at the same time, then ``data_start`` is calculated so + that the data starts after the header. Before this, the default was + that the header line was read again as the first data line + [#855 and #1844]. + + - A new ``csv`` format was added as a convenience for handling CSV (comma- + separated values) data. [#1935] + This format also recognises rows with an inconsistent number of elements. + [#1562] + + - An option was added to guess the start of data for CDS format files when + they do not strictly conform to the format standard. [#2241] + + - Added an HTML reader and writer to the ``astropy.io.ascii`` package. + Parsing requires the installation of BeautifulSoup and is therefore + an optional feature. [#2160] + + - Added support for inputting column descriptions and column units + with the ``io.ascii.SExtractor`` reader. [#2372] + + - Allow the use of non-local ReadMe files in the CDS reader. [#2329] + + - Provide a mechanism to select how masked values are printed. [#2424] + + - Added support for reading multi-aperture daophot file. [#2656] + +- ``astropy.io.fits`` + + - Included a new command-line script called ``fitsheader`` to display the + header(s) of a FITS file from the command line. [#2092] + + - Added new verification options ``fix+ignore``, ``fix+warn``, + ``fix+exception``, ``silentfix+ignore``, ``silentfix+warn``, and + ``silentfix+exception`` which give more control over how to report fixable + errors as opposed to unfixable errors. + +- ``astropy.modeling`` + + - Prototype implementation of fitters that treat optimization algorithms + separately from fit statistics, allowing new fitters to be created by + mixing and matching optimizers and statistic functions. [#1914] + + - Slight overhaul to how inputs to and outputs from models are handled with + respect to array-valued parameters and variables, as well as sets of + multiple models. See the associated PR and the modeling section of the + v0.4 documentation for more details. [#2634] + + - Added a new ``SimplexLSQFitter`` which uses a downhill simplex optimizer + with a least squares statistic. [#1914] + + - Changed ``Gaussian2D`` model such that ``theta`` now increases + counterclockwise. [#2199] + + - Replaced the ``MatrixRotation2D`` model with a new model called simply + ``Rotation2D`` which requires only an angle to specify the rotation. + The new ``Rotation2D`` rotates in a counter-clockwise sense whereas + the old ``MatrixRotation2D`` increased the angle clockwise. + [#2266, #2269] + + - Added a new ``AffineTransformation2D`` model which serves as a + replacement for the capability of ``MatrixRotation2D`` to accept an + arbitrary matrix, while also adding a translation capability. [#2269] + + - Added ``GaussianAbsorption1D`` model. [#2215] + + - New ``Redshift`` model [#2176]. + +- ``astropy.nddata`` + + - Allow initialization ``NDData`` or ``StdDevUncertainty`` with a + ``Quantity``. [#2380] + +- ``astropy.stats`` + + - Added flat prior to binom_conf_interval and binned_binom_proportion + + - Change default in ``sigma_clip`` from ``np.median`` to ``np.ma.median``. + [#2582] + +- ``astropy.sphinx`` + + - Note, the following new features are included in astropy-helpers as well: + + - The ``automodapi`` and ``automodsumm`` extensions now include sphinx + configuration options to write out what ``automodapi`` and ``automodsumm`` + generate, mainly for debugging purposes. [#1975, #2022] + + - Reference documentation now shows functions/class docstrings at the + inteded user-facing API location rather than the actual file where + the implementation is found. [#1826] + + - The ``automodsumm`` extension configuration was changed to generate + documentation of class ``__call__`` member functions. [#1817, #2135] + + - ``automodapi`` and ``automodsumm`` now have an ``:allowed-package-names:`` + option that make it possible to document functions and classes that + are in a different namespace. [#2370] + +- ``astropy.table`` + + - Improved grouped table aggregation by using the numpy ``reduceat()`` method + when possible. This can speed up the operation by a factor of at least 10 + to 100 for large unmasked tables and columns with relatively small + group sizes. [#2625] + + - Allow row-oriented data input using a new ``rows`` keyword argument. + [#850] + + - Allow subclassing of ``Table`` and the component classes ``Row``, ``Column``, + ``MaskedColumn``, ``TableColumns``, and ``TableFormatter``. [#2287] + + - Fix to allow numpy integer types as valid indices into tables in + Python 3.x [#2477] + + - Remove transition code related to the order change in ``Column`` and + ``MaskedColumn`` arguments ``name`` and ``data`` from Astropy 0.2 + to 0.3. [#2511] + + - Change HTML table representation in IPython notebook to show all + table columns instead of restricting to 80 column width. [#2651] + +- ``astropy.time`` + + - Mean and apparent sidereal time can now be calculated using the + ``sidereal_time`` method [#1418]. + + - The time scale now defaults to UTC if no scale is provided. [#2091] + + - ``TimeDelta`` objects can have all scales but UTC, as well as, for + consistency with time-like quantities, undefined scale (where the + scale is taken from the object one adds to or subtracts from). + This allows, e.g., to work consistently in TDB. [#1932] + + - ``Time`` now supports ISO format strings that end in "Z". [#2211, #2203] + +- ``astropy.units`` + + - Support for the unit format `Office of Guest Investigator Programs (OGIP) + FITS files + `__ + has been added. [#377] + + - The ``spectral`` equivalency can now handle angular wave number. [#1306 and + #1899] + + - Added ``one`` as a shorthand for ``dimensionless_unscaled``. [#1980] + + - Added ``dex`` and ``dB`` units. [#1628] + + - Added ``temperature()`` equivalencies to support conversion between + Kelvin, Celsius, and Fahrenheit. [#2209] + + - Added ``temperature_energy()`` equivalencies to support conversion + between electron-volt and Kelvin. [#2637] + + - The runtime of ``astropy.units.Unit.compose`` is greatly improved + (by a factor of 2 in most cases) [#2544] + + - Added ``electron`` unit. [#2599] + +- ``astropy.utils`` + + - ``timer.RunTimePredictor`` now uses ``astropy.modeling`` in its + ``do_fit()`` method. [#1896] + +- ``astropy.vo`` + + - A new sub-package, ``astropy.vo.samp``, is now available (this was + previously the SAMPy package, which has been refactored for use in + Astropy). [#1907] + + - Enhanced functionalities for ``VOSCatalog`` and ``VOSDatabase``. [#1206] + +- ``astropy.wcs`` + + - astropy now requires wcslib version 4.23 or later. The version of + wcslib included with astropy has been updated to version 4.23. + + - Bounds checking is now performed on native spherical + coordinates. Any out-of-bounds values will be returned as + ``NaN``, and marked in the ``stat`` array, if using the + low-level ``wcslib`` interface such as + ``astropy.wcs.Wcsprm.p2s``. [#2107] + + - A new method, ``astropy.wcs.WCS.compare()``, compares two wcsprm + structs for equality with varying degrees of strictness. [#2361] + + - New ``astropy.wcs.utils`` module, with a handful of tools for manipulating + WCS objects, including dropping, swapping, and adding axes. + +- Misc + + - Includes the new astropy-helpers package which separates some of Astropy's + build, installation, and documentation infrastructure out into an + independent package, making it easier for Affiliated Packages to depend on + these features. astropy-helpers replaces/deprecates some of the submodules + in the ``astropy`` package (see API Changes below). See also + `APE 4 `_ + for more details on the motivation behind and implementation of + astropy-helpers. [#1563] + + +API Changes +^^^^^^^^^^^ + +- ``astropy.config`` + + - The configuration system received a major overhaul, as part of APE3. It is + no longer possible to save configuration items from Python, but instead + users must edit the configuration file directly. The locations of + configuration items have moved, and some have been changed to science state + values. The old locations should continue to work until astropy 0.5, but + deprecation warnings will be displayed. See the `Configuration transition + `_ + docs for a detailed description of the changes and how to update existing + code. [#2094] + +- ``astropy.io.fits`` + + - The ``astropy.io.fits.new_table`` function is now fully deprecated (though + will not be removed for a long time, considering how widely it is used). + + Instead please use the more explicit ``BinTableHDU.from_columns`` to create + a new binary table HDU, and the similar ``TableHDU.from_columns`` to create + a new ASCII table. These otherwise accept the same arguments as + ``new_table`` which is now just a wrapper for these. + + - The ``.fromstring`` classmethod of each HDU type has been simplified such + that, true to its namesake, it only initializes an HDU from a string + containing its header *and* data. + + - Fixed an issue where header wildcard matching (for example + ``header['DATE*']``) can be used to match *any* characters that might + appear in a keyword. Previously this only matched keywords containing + characters in the set ``[0-9A-Za-z_]``. Now this can also match a hyphen + ``-`` and any other characters, as some conventions like ``HIERARCH`` and + record-valued keyword cards allow a wider range of valid characters than + standard FITS keywords. + + - This will be the *last* release to support the following APIs that have + been marked deprecated since Astropy v0.1/PyFITS v3.1: + + - The ``CardList`` class, which was part of the old header implementation. + + - The ``Card.key`` attribute. Use ``Card.keyword`` instead. + + - The ``Card.cardimage`` and ``Card.ascardimage`` attributes. Use simply + ``Card.image`` or ``str(card)`` instead. + + - The ``create_card`` factory function. Simply use the normal ``Card`` + constructor instead. + + - The ``create_card_from_string`` factory function. Use ``Card.fromstring`` + instead. + + - The ``upper_key`` function. Use ``Card.normalize_keyword`` method + instead (this is not unlikely to be used outside of PyFITS itself, but it + was technically public API). + + - The usage of ``Header.update`` with ``Header.update(keyword, value, + comment)`` arguments. ``Header.update`` should only be used analogously + to ``dict.update``. Use ``Header.set`` instead. + + - The ``Header.ascard`` attribute. Use ``Header.cards`` instead for a list + of all the ``Card`` objects in the header. + + - The ``Header.rename_key`` method. Use ``Header.rename_keyword`` instead. + + - The ``Header.get_history`` method. Use ``header['HISTORY']`` instead + (normal keyword lookup). + + - The ``Header.get_comment`` method. Use ``header['COMMENT']`` instead. + + - The ``Header.toTxtFile`` method. Use ``header.totextfile`` instead. + + - The ``Header.fromTxtFile`` method. Use ``Header.fromtextfile`` instead. + + - The ``tdump`` and ``tcreate`` functions. Use ``tabledump`` and + ``tableload`` respectively. + + - The ``BinTableHDU.tdump`` and ``tcreate`` methods. Use + ``BinTableHDU.dump`` and ``BinTableHDU.load`` respectively. + + - The ``txtfile`` argument to the ``Header`` constructor. Use + ``Header.fromfile`` instead. + + - The ``startColumn`` and ``endColumn`` arguments to the ``FITS_record`` + constructor. These are unlikely to be used by any user code. + + These deprecated interfaces will be removed from the development version of + Astropy following the v0.4 release (they will still be available in any + v0.4.x bugfix releases, however). + +- ``astropy.modeling`` + + - The method computing the derivative of the model with respect + to parameters was renamed from ``deriv`` to ``fit_deriv``. [#1739] + + - ``ParametricModel`` and the associated ``Parametric1DModel`` and + ``Parametric2DModel`` classes have been renamed ``FittableModel``, + ``Fittable1DModel``, and ``Fittable2DModel`` respectively. The base + ``Model`` class has subsumed the functionality of the old + + ``ParametricModel`` class so that all models support parameter constraints. + The only distinction of ``FittableModel`` is that anything which subclasses + it is assumed "safe" to use with Astropy fitters. [#2276] + + - ``NonLinearLSQFitter`` has been renamed ``LevMarLSQFitter`` to emphasise + that it uses the Levenberg-Marquardt optimization algorithm with a + least squares statistic function. [#1914] + + - The ``SLSQPFitter`` class has been renamed ``SLSQPLSQFitter`` to emphasize + that it uses the Sequential Least Squares Programming optimization + algorithm with a least squares statistic function. [#1914] + + - The ``Fitter.errorfunc`` method has been renamed to the more general + ``Fitter.objective_function``. [#1914] + +- ``astropy.nddata`` + + - Issue warning if unit is changed from a non-trivial value by directly + setting ``NDData.unit``. [#2411] + + - The ``mask`` and ``flag`` attributes of ``astropy.nddata.NDData`` can now + be set with any array-like object instead of requiring that they be set + with a ``numpy.ndarray``. [#2419] + +- ``astropy.sphinx`` + + - Use of the ``astropy.sphinx`` module is deprecated; all new development of + this module is in ``astropy_helpers.sphinx`` which should be used instead + (therefore documentation builds that made use of any of the utilities in + ``astropy.sphinx`` now have ``astropy_helpers`` as a documentation + dependency). + +- ``astropy.table`` + + - The default table printing function now shows a table header row for units + if any columns have the unit attribute set. [#1282] + + - Before, an unmasked ``Table`` was automatically converted to a masked + table if generated from a masked Table or a ``MaskedColumn``. + Now, this conversion is only done if explicitly requested or if any + of the input values is actually masked. [#1185] + + - The repr() function of ``astropy.table.Table`` now shows the units + if any columns have the unit attribute set. [#2180] + + - The semantics of the config options ``table.max_lines`` and + ``table.max_width`` has changed slightly. If these values are not + set in the config file, astropy will try to determine the size + automatically from the terminal. [#2683] + +- ``astropy.time`` + + - Correct use of UT in TDB calculation [#1938, #1939]. + + - ``TimeDelta`` objects can have scales other than TAI [#1932]. + + - Location information should now be passed on via an ``EarthLocation`` + instance or anything that initialises it, e.g., a tuple containing + either geocentric or geodetic coordinates. [#1928] + +- ``astropy.units`` + + - ``Quantity`` now converts input to float by default, as this is physically + most sensible for nearly all units [#1776]. + + - ``Quantity`` comparisons with ``==`` or ``!=`` now always return ``True`` + or ``False``, even if units do not match (for which case a ``UnitsError`` + used to be raised). [#2328] + + - Applying ``float`` or ``int`` to a ``Quantity`` now works for all + dimensionless quantities; they are automatically converted to unscaled + dimensionless. [#2249] + + - The exception ``astropy.units.UnitException``, which was + deprecated in astropy 0.2, has been removed. Use + ``astropy.units.UnitError`` instead [#2386] + + - Initializing a ``Quantity`` with a valid number/array with a ``unit`` + attribute now interprets that attribute as the units of the input value. + This makes it possible to initialize a ``Quantity`` from an Astropy + ``Table`` column and have it correctly pick up the units from the column. + [#2486] + +- ``astropy.wcs`` + + - ``calcFootprint`` was deprecated. It is replaced by + ``calc_footprint``. An optional boolean keyword ``center`` was + added to ``calc_footprint``. It controls whether the centers or + the corners of the pixels are used in the computation. [#2384] + + - ``astropy.wcs.WCS.sip_pix2foc`` and + ``astropy.wcs.WCS.sip_foc2pix`` formerly did not conform to the + ``SIP`` standard: ``CRPIX`` was added to the ``foc`` result so + that it could be used as input to "core FITS WCS". As of astropy + 0.4, ``CRPIX`` is no longer added to the result, so the ``foc`` + space is correct as defined in the `SIP convention + `__. [#2360] + + - ``astropy.wcs.UnitConverter``, which was deprecated in astropy + 0.2, has been removed. Use the ``astropy.units`` module + instead. [#2386] + + - The following methods on ``astropy.wcs.WCS``, which were + deprecated in astropy 0.1, have been removed [#2386]: + + - ``all_pix2sky`` -> ``all_pix2world`` + - ``wcs_pix2sky`` -> ``wcs_pix2world`` + - ``wcs_sky2pix`` -> ``wcs_world2pix`` + + - The ``naxis1`` and ``naxis2`` attributes and the ``get_naxis`` + method of ``astropy.wcs.WCS``, which were deprecated in astropy + 0.2, have been removed. Use the shape of the underlying FITS data + array instead. [#2386] + +- Misc + + - The ``astropy.setup_helpers`` and ``astropy.version_helpers`` modules are + deprecated; any non-critical fixes and development to those modules should + be in ``astropy_helpers`` instead. Packages that use these modules in + their ``setup.py`` should depend on ``astropy_helpers`` following the same + pattern as in the Astropy package template. + + +Bug Fixes +^^^^^^^^^ + +- ``astropy.constants`` + + - ``astropy.constants.Contant`` objects can now be deep + copied. [#2601] + +- ``astropy.cosmology`` + + - The distance modulus function in ``astropy.cosmology`` can now handle + negative distances, which can occur in certain closed cosmologies. [#2008] + + - Removed accidental imports of some extraneous variables in + ``astropy.cosmology`` [#2025] + +- ``astropy.io.ascii`` + + - ``astropy.io.ascii.read`` would fail to read lists of strings where some of + the strings consisted of just a newline ("\n"). [#2648] + +- ``astropy.io.fits`` + + - Use NaN for missing values in FITS when using Table.write for float + columns. Earlier the default fill value was close to 1e20.[#2186] + + - Fixes for checksums on 32-bit platforms. Results may be different + if writing or checking checksums in "nonstandard" mode. [#2484] + + - Additional minor bug fixes ported from PyFITS. [#2575] + +- ``astropy.io.votable`` + + - It is now possible to save an ``astropy.table.Table`` object as a + VOTable with any of the supported data formats, ``tabledata``, + ``binary`` and ``binary2``, by using the ``tabledata_format`` + kwarg. [#2138] + + - Fixed a crash writing out variable length arrays. [#2577] + +- ``astropy.nddata`` + + - Indexing ``NDData`` in a way that results in a single element returns that + element. [#2170] + + - Change construction of result of arithmetic and unit conversion to allow + subclasses to require the presence of attribute like unit. [#2300] + + - Scale uncertainties to correct units in arithmetic operations and unit + conversion. [#2393] + + - Ensure uncertainty and mask members are copied in arithmetic and + convert_unit_to. [#2394] + + - Mask result of arithmetic if either of the operands is masked. [#2403] + + - Copy all attributes of input object if ``astropy.nddata.NDData`` is + initialized with an ``NDData`` object. [#2406] + + - Copy ``flags`` to new object in ``convert_unit_to``. [#2409] + + - Result of ``NDData`` arithmetic makes a copy of any WCS instead of using + a reference. [#2410] + + - Fix unit handling for multiplication/division and use + ``astropy.units.Quantity`` for units arithmetic. [#2413] + + - A masked ``NDData`` is now converted to a masked array when used in an + operation or ufunc with a numpy array. [#2414] + + - An unmasked ``NDData`` now uses an internal representation of its mask + state that ``numpy.ma`` expects so that an ``NDData`` behaves as an + unmasked array. [#2417] + +- ``astropy.sphinx`` + + - Fix crash in smart resolver when the resolution doesn't work. [#2591] + +- ``astropy.table`` + + - The ``astropy.table.Column`` object can now use both functions and callable + objects as formats. [#2313] + + - Fixed a problem on 64 bit windows that caused errors + "expected 'DTYPE_t' but got 'long long'" [#2490] + + - Fix initialisation of ``TableColumns`` with lists or tuples. [#2647] + + - Fix removal of single column using ``remove_columns``. [#2699] + + - Fix a problem that setting a row element within a masked table did not + update the corresponding table element. [#2734] + +- ``astropy.time`` + + - Correct UT1->UTC->UT1 round-trip being off by 1 second if UT1 is + on a leap second. [#2077] + +- ``astropy.units`` + + - ``Quantity.copy`` now behaves identically to ``ndarray.copy``, and thus + supports the ``order`` argument (for numpy >=1.6). [#2284] + + - Composing base units into identical composite units now works. [#2382] + + - Creating and composing/decomposing units is now substantially faster [#2544] + + - ``Quantity`` objects now are able to be assigned NaN [#2695] + +- ``astropy.wcs`` + + - Astropy now requires wcslib version 4.23 or later. The version of + wcslib included with astropy has been updated to version 4.23. + + - Bug fixes in the projection routines: in ``hpxx2s`` [the + cartesian-to-spherical operation of the ``HPX`` projection] + relating to bounds checking, bug introduced at wcslib 4.20; in + ``parx2s`` and molx2s`` [the cartesion-to-spherical operation of + the ``PAR`` and ``MOL`` projections respectively] relating to + setting the stat vector; in ``hpxx2s`` relating to implementation + of the vector API; and in ``xphx2s`` relating to setting an + out-of-bounds value of *phi*. + + - In the ``PCO`` projection, use alternative projection equations + for greater numerical precision near theta == 0. In the ``COP`` + projection, return an exact result for theta at the poles. + Relaxed the tolerance for bounds checking a little in ``SFL`` + projection. + + - Fix a bug allocating insufficient memory in + ``astropy.wcs.WCS.sub`` [#2468] + + - A new method, ``Wcsprm.bounds_check`` (corresponding to wcslib's + ``wcsbchk``) has been added to control what bounds checking is performed by + wcslib. + + - ``WCS.to_header`` will now raise a more meaningful exception when the WCS + information is invalid or inconsistent in some way. [#1854] + + - In ``WCS.to_header``, ``RESTFRQ`` and ``RESTWAV`` are no longer + rewritten if zero. [#2468] + + - In ``WCS.to_header``, floating point values will now always be written + with an exponent or fractional part, i.e. ``.0`` being appended if necessary + to acheive this. [#2468] + + - If the C extension for ``astropy.wcs`` was not built or fails to import for + any reason, ``import astropy.wcs`` will result in an ``ImportError``, + rather than getting obscure errors once the ``astropy.wcs`` is used. + [#2061] + + - When the C extension for ``astropy.wcs`` is built using a version of + ``wscslib`` already present in the system, the package does not try + to install ``wcslib`` headers under ``astropy/wcs/include``. [#2536] + + - Fixes an unresolved external symbol error in the + `astropy.wcs._wcs` C extension on Microsoft Windows when built + with a Microsoft compiler. [#2478] + +- Misc + + - Running the test suite with ``python setup.py test`` now works if + the path to the source contains spaces. [#2488] + + - The version of ERFA included with Astropy is now v1.1.0 [#2497] + + - Removed deprecated option from travis configuration and force use of + wheels rather than allowing build from source. [#2576] + + - The short option ``-n`` to run tests in parallel was broken + (conflicts with the distutils built-in option of "dry-run"). + Changed to ``-j``. [#2566] + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- ``python setup.py test --coverage`` will now give more accurate + results, because the coverage analysis will include early imports of + astropy. There doesn't seem to be a way to get this to work when + doing ``import astropy; astropy.test()``, so the ``coverage`` + keyword to ``astropy.test`` has been removed. Coverage testing now + depends only on `coverage.py + `__, not + ``pytest-cov``. [#2112] + +- The included version of py.test has been upgraded to 2.5.1. [#1970] + +- The included version of six.py has been upgraded to 1.5.2. [#2006] + +- Where appropriate, tests are now run both with and without the + ``unicode_literals`` option to ensure that we support both cases. [#1962] + +- Running the Astropy test suite from within the IPython REPL is disabled for + now due to bad interaction between the test runner and IPython's logging + and I/O handler. For now, run the Astropy tests should be run in the basic + Python interpreter. [#2684] + +- Added support for numerical comparison of floating point values appearing in + the output of doctests using a ``+FLOAT_CMP`` doctest flag. [#2087] + +- A monkey patch is performed to fix a bug in Numpy version 1.7 and + earlier where unicode fill values on masked arrays are not + supported. This may cause unintended side effects if your + application also monkey patches ``numpy.ma`` or relies on the broken + behavior. If unicode support of masked arrays is important to your + application, upgrade to Numpy 1.8 or later for best results. [#2059] + +- The developer documentation has been extensively rearranged and + rewritten. [#1712] + +- The ``human_time`` function in ``astropy.utils`` now returns strings + without zero padding. [#2420] + +- The ``bdist_dmg`` command for ``setup.py`` has now been removed. [#2553] + +- Many broken API links have been fixed in the documentation, and the + ``nitpick`` Sphinx option is now used to avoid broken links in future. + [#1221, #2019, #2109, #2161, #2162, #2192, #2200, #2296, #2448, #2456, + #2460, #2467, #2476, #2508, #2509] + + +0.3.2 (2014-05-13) +------------------ + +Bug Fixes +^^^^^^^^^ + +- ``astropy.coordinates`` + + - if ``sep`` argument is specified to be a single character in + ``sexagisimal_to_string``, it now includes seperators only between + items [#2183] + + - Ensure comparisons involving ``Distance`` objects do not raise exceptions; + also ensure operations that lead to units other than length return + ``Quantity``. [#2206, #2250] + + - Multiplication and division of ``Angle`` objects is now + supported. [#2273] + + - Fixed ``Angle.to_string`` functionality so that negative angles have the + correct amount of padding when ``pad=True``. [#2337] + + - Mixing strings and quantities in the ``Angle`` constructor now + works. For example: ``Angle(['1d', 1. * u.d])``. [#2398] + + - If ``Longitude`` is given a ``Longitude`` as input, use its ``wrap_angle`` + by default [#2705] + +- ``astropy.cosmology`` + + - Fixed ``format()`` compatibility with Python 2.6. [#2129] + + - Be more careful about converting to floating point internally [#1815, #1818] + +- ``astropy.io.ascii`` + + - The CDS reader in ``astropy.io.ascii`` can now handle multiple + description lines in ReadMe files. [#2225] + + - When reading a table with values that generate an overflow error during + type conversion (e.g. overflowing the native C long type), fall through to + using string. Previously this generated an exception [#2234]. + + - Some CDS files mark missing values with ``"---"``, others with ``"--"``. + Recognize any string with one to four dashes as null value. [#1335] + +- ``astropy.io.fits`` + + - Allow pickling of ``FITS_rec`` objects. [#1597] + + - Improved behavior when writing large compressed images on OSX by removing + an unncessary check for platform architecture. [#2345] + + - Fixed an issue where Astropy ``Table`` objects containing boolean columns + were not correctly written out to FITS files. [#1953] + + - Several other bug fixes ported from PyFITS v3.2.3 [#2368] + + - Fixed a crash on Python 2.x when writing a FITS file directly to a + ``StringIO.StringIO`` object. [#2463] + +- ``astropy.io.registry`` + + - Allow readers/writers with the same name to be attached to different + classes. [#2312] + +- ``astropy.io.votable`` + + - By default, floating point values are now written out using + ``repr`` rather than ``str`` to preserve precision [#2137] + +- ``astropy.modeling`` + + - Fixed the ``SIP`` and ``InverseSIP`` models both so that they work in the + first place, and so that they return results consistent with the SIP + functions in ``astropy.wcs``. [#2177] + +- ``astropy.stats`` + + - Ensure the ``axis`` keyword in ``astropy.stats.funcs`` can now be used for + all axes. [#2173] + +- ``astropy.table`` + + - Ensure nameless columns can be printed, using 'None' for the header. [#2213] + +- ``astropy.time`` + + - Fixed pickling of ``Time`` objects. [#2123] + +- ``astropy.units`` + + - ``Quantity._repr_latex_()`` returns ``NotImplementedError`` for quantity + arrays instead of an uninformative formatting exception. [#2258] + + - Ensure ``Quantity.flat`` always returns ``Quantity``. [#2251] + + - Angstrom unit renders better in MathJax [#2286] + +- ``astropy.utils`` + + - Progress bars will now be displayed inside the IPython + qtconsole. [#2230] + + - ``data.download_file()`` now evaluates ``REMOTE_TIMEOUT()`` at runtime + rather than import time. Previously, setting ``REMOTE_TIMEOUT`` after + import had no effect on the function's behavior. [#2302] + + - Progressbar will be limited to 100% so that the bar does not exceed the + terminal width. The numerical display can still exceed 100%, however. + +- ``astropy.vo`` + + - Fixed ``format()`` compatibility with Python 2.6. [#2129] + + - Cone Search validation no longer raises ``ConeSearchError`` for positive RA. + [#2240, #2242] + +- ``astropy.wcs`` + + - Fixed a bug where calling ``astropy.wcs.Wcsprm.sub`` with + ``WCSSUB_CELESTIAL`` may cause memory corruption due to + underallocation of a temporary buffer. [#2350] + + - Fixed a memory allocation bug in ``astropy.wcs.Wcsprm.sub`` and + ``astropy.wcs.Wcsprm.copy``. [#2439] + +- Misc + + - Fixes for compatibility with Python 3.4. [#1945] + + - ``import astropy; astropy.test()`` now correctly uses the same test + configuration as ``python setup.py test`` [#1811] + + +0.3.1 (2014-03-04) +------------------ + +Bug Fixes +^^^^^^^^^ + +- ``astropy.config`` + + - Fixed a bug where ``ConfigurationItem.set_temp()`` does not reset to + default value when exception is raised within ``with`` block. [#2117] + +- ``astropy.convolution`` + + - Fixed a bug where ``_truncation`` was left undefined for ``CustomKernel``. + [#2016] + + - Fixed a bug with ``_normalization`` when ``CustomKernel`` input array + sums to zero. [#2016] + +- ``astropy.coordinates`` + + - Fixed a bug where using ``==`` on two array coordinates wouldn't + work. [#1832] + + - Fixed bug which caused ``len()`` not to work for coordinate objects and + added a ``.shape`` property to get appropriately array-like behavior. + [#1761, #2014] + + - Fixed a bug where sexagesimal notation would sometimes include + exponential notation in the last field. [#1908, #1913] + + - ``CompositeStaticMatrixTransform`` no longer attempts to reference the + undefined variable ``self.matrix`` during instantiation. [#1944] + + - Fixed pickling of ``Longitude``, ensuring ``wrap_angle`` is preserved + [#1961] + + - Allow ``sep`` argument in ``Angle.to_string`` to be empty (resulting in no + separators) [#1989] + +- ``astropy.io.ascii`` + + - Allow passing unicode delimiters when reading or writing tables. The + delimiter must be convertible to pure ASCII. [#1949] + + - Fix a problem when reading a table and renaming the columns to names that + already exist. [#1991] + +- ``astropy.io.fits`` + + - Ported all bug fixes from PyFITS 3.2.1. See the PyFITS changelog at + http://pyfits.readthedocs.org/en/v3.2.1/ [#2056] + +- ``astropy.io.misc`` + + - Fixed issues in the HDF5 Table reader/writer functions that occurred on + Windows. [#2099] + +- ``astropy.io.votable`` + + - The ``write_null_values`` kwarg to ``VOTable.to_xml``, when set to `False` + (the default) would produce non-standard VOTable files. Therefore, this + functionality has been replaced by a better understanding that knows which + fields in a VOTable may be left empty (only ``char``, ``float`` and + ``double`` in VOTable 1.1 and 1.2, and all fields in VOTable 1.3). The + kwarg is still accepted but it will be ignored, and a warning is emitted. + [#1809] + + - Printing out a ``astropy.io.votable.tree.Table`` object using `repr` or + `str` now uses the pretty formatting in ``astropy.table``, so it's possible + to easily preview the contents of a ``VOTable``. [#1766] + +- ``astropy.modeling`` + + - Fixed bug in computation of model derivatives in ``LinearLSQFitter``. + [#1903] + + - Raise a ``NotImplementedError`` when fitting composite models. [#1915] + + - Fixed bug in the computation of the ``Gaussian2D`` model. [#2038] + + - Fixed bug in the computation of the ``AiryDisk2D`` model. [#2093] + +- ``astropy.sphinx`` + + - Added slightly more useful debug info for AstropyAutosummary. [#2024] + +- ``astropy.table`` + + - The column string representation for n-dimensional cells with only + one element has been fixed. [#1522] + + - Fix a problem that caused ``MaskedColumn.__getitem__`` to not preserve + column metadata. [#1471, #1872] + + - With Numpy prior to version 1.6.2, tables with Unicode columns now + sort correctly. [#1867] + + - ``astropy.table`` can now print out tables with Unicode columns containing + non-ascii characters. [#1864] + + - Columns can now be named with Unicode strings, as long as they contain only + ascii characters. This makes using ``astropy.table`` easier on Python 2 + when ``from __future__ import unicode_literals`` is used. [#1864] + + - Allow pickling of ``Table``, ``Column``, and ``MaskedColumn`` objects. [#792] + + - Fix a problem where it was not possible to rename columns after sorting or + adding a row. [#2039] + +- ``astropy.time`` + + - Fix a problem where scale conversion problem in TimeFromEpoch + was not showing a useful error [#2046] + + - Fix a problem when converting to one of the formats ``unix``, ``cxcsec``, + ``gps`` or ``plot_date`` when the time scale is ``UT1``, ``TDB`` or ``TCB`` + [#1732] + + - Ensure that ``delta_ut1_utc`` gets calculated when accessed directly, + instead of failing and giving a rather obscure error message [#1925] + + - Fix a bug when computing the TDB to TT offset. The transform routine was + using meters instead of kilometers for the Earth vector. [#1929] + + - Increase ``__array_priority__`` so that ``TimeDelta`` can convert itself + to a ``Quantity`` also in reverse operations [#1940] + + - Correct hop list from TCG to TDB to ensure that conversion is + possible [#2074] + +- ``astropy.units`` + + - ``Quantity`` initialisation rewritten for speed [#1775] + + - Fixed minor string formatting issue for dimensionless quantities. [#1772] + + - Fix error for inplace operations on non-contiguous quantities [#1834]. + + - The definition of the unit ``bar`` has been corrected to "1e5 + Pascal" from "100 Pascal" [#1910] + + - For units that are close to known units, but not quite, for + example due to differences in case, the exception will now include + recommendations. [#1870] + + - The generic and FITS unit parsers now accept multiple slashes in + the unit string. There are multiple ways to interpret them, but + the approach taken here is to convert "m/s/kg" to "m s-1 kg-1". + Multiple slashes are accepted, but discouraged, by the FITS + standard, due to the ambiguity of parsing, so a warning is raised + when it is encountered. [#1911] + + - The use of "angstrom" (with a lower case "a") is now accepted in FITS unit + strings, since it is in common usage. However, since it is not officially + part of the FITS standard, a warning will be issued when it is encountered. + [#1911] + + - Pickling unrecognized units will not raise a ``AttributeError``. [#2047] + + - ``astropy.units`` now correctly preserves the precision of + fractional powers. [#2070] + + - If a ``Unit`` or ``Quantity`` is raised to a floating point power + that is very close to a rational number with a denominator less + than or equal to 10, it is converted to a ``Fraction`` object to + preserve its precision through complex unit conversion operations. + [#2070] + +- ``astropy.utils`` + + - Fixed crash in ``timer.RunTimePredictor.do_fit``. [#1905] + + - Fixed ``astropy.utils.compat.argparse`` for Python 3.1. [#2017] + +- ``astropy.wcs`` + + - ``astropy.wcs.WCS``, ``astropy.wcs.WCS.fix`` and + ``astropy.wcs.find_all_wcs`` now have a ``translate_units`` keyword + argument that is passed down to ``astropy.wcs.Wcsprm.fix``. This can be + used to specify any unsafe translations of units from rarely used ones to + more commonly used ones. + + Although ``"S"`` is commonly used to represent seconds, its translation to + ``"s"`` is potentially unsafe since the standard recognizes ``"S"`` + formally as Siemens, however rarely that may be used. The same applies to + ``"H"`` for hours (Henry), and ``"D"`` for days (Debye). + + When these sorts of changes are performed, a warning is emitted. + [#1854] + + - When a unit is "fixed" by ``astropy.wcs.WCS.fix`` or + ``astropy.wcs.Wcsprm.unitfix``, it now correctly reports the ``CUNIT`` + field that was changed. [#1854] + + - ``astropy.wcs.Wcs.printwcs`` will no longer warn that ``cdelt`` is being + ignored when none was present in the FITS file. [#1845] + + - ``astropy.wcs.Wcsprm.set`` is called from within the ``astropy.wcs.WCS`` + constructor, therefore any invalid information in the keywords will be + raised from the constructor, rather than on a subsequent call to a + transformation method. [#1918] + + - Fix a memory corruption bug when using ``astropy.wcs.Wcs.sub`` with + ``astropy.wcs.WCSSUB_CELESTIAL``. [#1960] + + - Fixed the ``AttributeError`` exception that was raised when using + ``astropy.wcs.WCS.footprint_to_file``. [#1912] + + - Fixed a ``NameError`` exception that was raised when using + ``astropy.wcs.validate`` or the ``wcslint`` script. [#2053] + + - Fixed a bug where named WCSes may be erroneously reported as ``' '`` when + using ``astropy.wcs.validate`` or the ``wcslint`` script. [#2053] + + - Fixed a bug where error messages about incorrect header keywords + may not be propagated correctly, resulting in a "NULL error object + in wcslib" message. [#2106] + +- Misc + + - There are a number of improvements to make Astropy work better on big + endian platforms, such as MIPS, PPC, s390x and SPARC. [#1849] + + - The test suite will now raise exceptions when a deprecated feature of + Python or Numpy is used. [#1948] + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- A new function, ``astropy.wcs.get_include``, has been added to get the + location of the ``astropy.wcs`` C header files. [#1755] + +- The doctests in the ``.rst`` files in the ``docs`` folder are now + tested along with the other unit tests. This is in addition to the + testing of doctests in docstrings that was already being performed. + See ``docs/development/testguide.rst`` for more information. [#1771] + +- Fix a problem where import fails on Python 3 if setup.py exists + in current directory. [#1877] + + +0.3 (2013-11-20) +---------------- + +New Features +^^^^^^^^^^^^ + +- General + + - A top-level configuration item, ``unicode_output`` has been added to + control whether the Unicode string representation of certain + objects will contain Unicode characters. For example, when + ``use_unicode`` is `False` (default):: + + >>> from astropy import units as u + >>> print(unicode(u.degree)) + deg + + When ``use_unicode`` is `True`:: + + >>> from astropy import units as u + >>> print(unicode(u.degree)) + ° + + See `handling-unicode + `_ + for more information. [#1441] + + - ``astropy.utils.misc.find_api_page`` is now imported into the top-level. + This allows usage like ``astropy.find_api_page(astropy.units.Quantity)``. + [#1779] + +- ``astropy.convolution`` + + - New class-based system for generating kernels, replacing ``make_kernel``. + [#1255] The ``astropy.nddata.convolution`` sub-package has now been moved + to ``astropy.convolution``. [#1451] + +- ``astropy.coordinates`` + + - Two classes ``astropy.coordinates.Longitude`` and + ``astropy.coordinates.Latitude`` have been added. These are derived from + the new ``Angle`` class and used for all longitude-like (RA, azimuth, + galactic L) and latitude-like coordinates (Dec, elevation, galactic B) + respectively. The ``Longitude`` class provides auto-wrapping capability + and ``Latitude`` performs bounds checking. + + - ``astropy.coordinates.Distance`` supports conversion to and from distance + modulii. [#1472] + + - ``astropy.coordinates.SphericalCoordinateBase`` and derived classes now + support arrays of coordinates, enabling large speed-ups for some operations + on multiple coordinates at the same time. These coordinates can also be + indexed using standard slicing or any Numpy-compatible indexing. [#1535, + #1615] + + - Array coordinates can be matched to other array coordinates, finding the + closest matches between the two sets of coordinates (see the + ``astropy.coordinates.matching.match_coordinates_3d`` and + ``astropy.coordinates.matching.match_coordinates_sky`` functions). [#1535] + +- ``astropy.cosmology`` + + - Added support for including massive Neutrinos in the cosmology classes. The + Planck (2013) cosmology has been updated to use this. [#1364] + + - Calculations now use and return ``Quantity`` objects where appropriate. + [#1237] + +- ``astropy.io.ascii`` + + - Added support for writing IPAC format tables [#1152]. + +- ``astropy.io.fits`` + + - Added initial support for table columns containing pseudo-unsigned + integers. This is currently enabled by using the ``uint=True`` option when + opening files; any table columns with the correct BZERO value will be + interpreted and returned as arrays of unsigned integers. [#906] + + - Upgraded vendored copy of CFITSIO to v3.35, though backwards compatibility + back to version v3.28 is maintained. + + - Added support for reading and writing tables using the Q format for columns. + The Q format is identical to the P format (variable-length arrays) except + that it uses 64-bit integers for the data descriptors, allowing more than + 4 GB of variable-length array data in a single table. + + - Some refactoring of the table and ``FITS_rec`` modules in order to better + separate the details of the FITS binary and ASCII table data structures from + the HDU data structures that encapsulate them. Most of these changes should + not be apparent to users (but see API Changes below). + +- ``astropy.io.votable`` + + - Updated to support the VOTable 1.3 draft. [#433] + + - Added the ability to look up and group elements by their utype attribute. + [#622] + + - The format of the units of a VOTable file can be specified using the + ``unit_format`` parameter. Note that units are still always written out + using the CDS format, to ensure compatibility with the standard. + +- ``astropy.modeling`` + + - Added a new framework for representing and evaluating mathematical models + and for fitting data to models. See "What's New in Astropy 0.3" in the + documentation for further details. [#493] + +- ``astropy.stats`` + + - Added robust statistics functions + ``astropy.stats.funcs.median_absolute_deviation``, + ``astropy.stats.funcs.biweight_location``, and + ``astropy.stats.funcs.biweight_midvariance``. [#621] + + - Added ``astropy.stats.funcs.signal_to_noise_oir_ccd`` for computing the + signal to noise ratio for source being observed in the optical/IR using a + CCD. [#870] + + - Add ``axis=int`` option to ``stropy.stats.funcs.sigma_clip`` to allow + clipping along a given axis for multidimensional data. [#1083] + +- ``astropy.table`` + + - New columns can be added to a table via assignment to a non-existing + column by name. [#726] + + - Added ``join`` function to perform a database-like join on two tables. This + includes support for inner, left, right, and outer joins as well as + metadata merging. [#903] + + - Added ``hstack`` and ``vstack`` functions to stack two or more tables. + [#937] + + - Tables now have a ``.copy`` method and include support for ``copy`` and + ``deepcopy``. [#1208] + + - Added support for selecting and manipulating groups within a table with + a database style ``group_by`` method. [#1424] + + - Table ``read`` and ``write`` functions now include rudimentary support + reading and writing of FITS tables via the unified reading/writing + interface. [#591] + + - The ``units`` and ``dtypes`` attributes and keyword arguments in Column, + MaskedColumn, Row, and Table are now deprecated in favor of the + single-tense ``unit`` and ``dtype``. [#1174] + + - Setting a column from a Quantity now correctly sets the unit on the Column + object. [#732] + + - Add ``remove_row`` and ``remove_rows`` to remove table rows. [#1230] + + - Added a new ``Table.show_in_browser`` method that opens a web browser + and displays the table rendered as HTML. [#1342] + + - New tables can now be instantiated using a single row from an existing + table. [#1417] + +- ``astropy.time`` + + - New ``Time`` objects can be instantiated from existing ``Time`` objects + (but with different format, scale, etc.) [#889] + + - Added a ``Time.now`` classmethod that returns the current UTC time, + similarly to Python's ``datetime.now``. [#1061] + + - Update internal time manipulations so that arithmetic with Time and + TimeDelta objects maintains sub-nanosecond precision over a time span + longer than the age of the universe. [#1189] + + - Use ``astropy.utils.iers`` to provide ``delta_ut1_utc``, so that + automatic calculation of UT1 becomes possible. [#1145] + + - Add ``datetime`` format which allows converting to and from standard + library ``datetime.datetime`` objects. [#860] + + - Add ``plot_date`` format which allows converting to and from the date + representation used when plotting dates with matplotlib via the + ``matplotlib.pyplot.plot_date`` function. [#860] + + - Add ``gps`` format (seconds since 1980-01-01 00:00:00 UTC, + including leap seconds) [#1164] + + - Add array indexing to Time objects [#1132] + + - Allow for arithmetic of multi-element and single-element Time and TimeDelta + objects. [#1081] + + - Allow multiplication and division of TimeDelta objects by + constants and arrays, as well as changing sign (negation) and + taking the absolute value of TimeDelta objects. [#1082] + + - Allow comparisons of Time and TimeDelta objects. [#1171] + + - Support interaction of Time and Quantity objects that represent a time + interval. [#1431] + +- ``astropy.units`` + + - Added parallax equivalency for length-angle. [#985] + + - Added mass-energy equivalency. [#1333] + + - Added a new-style format method which will use format specifiers + (like ``0.03f``) in new-style format strings for the Quantity's value. + Specifiers which can't be applied to the value will fall back to the + entire string representation of the quantity. [#1383] + + - Added support for complex number values in quantities. [#1384] + + - Added new spectroscopic equivalencies for velocity conversions + (relativistic, optical, and radio conventions are supported) [#1200] + + - The ``spectral`` equivalency now also handles wave number. + + - The ``spectral_density`` equivalency now also accepts a Quantity for the + frequency or wavelength. It also handles additional flux units. + + - Added Brightness Temperature (antenna gain) equivalency for conversion + between :math:`T_B` and flux density. [#1327] + + - Added percent unit, and allowed any string containing just a number to be + interpreted as a scaled dimensionless unit. [#1409] + + - New-style format strings can be used to set the unit output format. For + example, ``"{0:latex}".format(u.km)`` will print with the latex formatter. + [#1462] + + - The ``Unit.is_equivalent`` method can now take a tuple. In this case, the + method returns ``True`` if the unit is equivalent to any of the units + listed in the tuple. [#1521] + + - ``def_unit`` can now take a 2-tuple of names of the form (short, long), + where each entry is a list. This allows for handling strange units that + might have multiple short names. [#1543] + + - Added ``dimensionless_angles`` equivalency, which allows conversion of any + power of radian to dimensionless. [#1161] + + - Added the ability to enable set of units, or equivalencies that are used by + default. Also provided context managers for these cases. [#1268] + + - Imperial units are disabled by default. [#1593, #1662] + + - Added an ``astropy.units.add_enabled_units`` context manager, which allows + creating a temporary context with additional units temporarily enabled in + the global units namespace. [#1662] + + - ``Unit`` instances now have ``.si`` and ``.cgs`` properties a la + ``Quantity``. These serve as shortcuts for ``Unit.to_system(cgs)[0]`` + etc. [#1610] + +- ``astropy.vo`` + + - New package added to support Virtual Observatory Simple Cone Search query + and service validation. [#552] + +- ``astropy.wcs`` + + - Fixed attribute error in ``astropy.wcs.Wcsprm`` (lattype->lattyp) [#1463] + + - Included a new command-line script called ``wcslint`` and accompanying API + for validating the WCS in a given FITS file or header. [#580] + + - Upgraded included version of WCSLIB to 4.19. + +- ``astropy.utils`` + + - Added a new set of utilities in ``astropy.utils.timer`` for analyzing the + runtime of functions and making runtime predections for larger inputs. + [#743] + + - ``ProgressBar`` and ``Spinner`` classes can now be used directly to return + generator expressions. [#771] + + - Added ``astropy.utils.iers`` which allows reading in of IERS A or IERS B + bulletins and interpolation in UT1-UTC. + + - Added a function ``astropy.utils.find_api_page``--given a class or object + from the ``astropy`` package, this will open that class's API documentation + in a web browser. [#663] + + - Data download functions such as ``download_file`` now accept a + ``show_progress`` argument to suppress console output, and a ``timeout`` + argument. [#865, #1258] + +- ``astropy.extern.six`` + + - Added `six `_ for python2/python3 + compatibility + +- Astropy now uses the ERFA library instead of the IAU SOFA library for + fundamental time transformation routines. The ERFA library is derived, with + permission, from the IAU SOFA library but is distributed under a BSD license. + See ``license/ERFA.rst`` for details. [#1293] + +- ``astropy.logger`` + + - The Astropy logger now no longer catches exceptions by default, and also + only captures warnings emitted by Astropy itself (prior to this change, + following an import of Astropy, any warning got re-directed through the + Astropy logger). Logging to the Astropy log file has also been disabled by + default. However, users of Astropy 0.2 will likely still see the previous + behavior with Astropy 0.3 for exceptions and logging to file since the + default configuration file installed by 0.2 set the exception logging to be + on by default. To get the new behavior, set the ``log_exceptions`` and + ``log_to_file`` configuration items to ``False`` in the ``astropy.cfg`` + file. [#1331] + +API Changes +^^^^^^^^^^^ + +- General + + - The configuration option ``utils.console.use_unicode`` has been + moved to the top level and renamed to ``unicode_output``. It now + not only affects console widgets, such as progress bars, but also + controls whether calling `unicode` on certain classes will return a + string containing unicode characters. + +- ``astropy.coordinates`` + + - The ``astropy.coordinates.Angle`` class is now a subclass of + ``astropy.units.Quantity``. This means it has all of the methods of a + `numpy.ndarray`. [#1006] + + - The ``astropy.coordinates.Distance`` class is now a subclass of + ``astropy.units.Quantity``. This means it has all of the methods of a + `numpy.ndarray`. [#1472] + + - All angular units are now supported, not just ``radian``, ``degree`` and + ``hour``, but now ``arcsecond`` and ``arcminute`` as well. The object + will retain its native unit, so when printing out a value initially + provided in hours, its ``to_string()`` will, by default, also be + expressed in hours. + + - The ``Angle`` class now supports arrays of angles. + + - To be consistent with ``units.Unit``, ``Angle.format`` has been + deprecated and renamed to ``Angle.to_string``. + + - To be consistent with ``astropy.units``, all plural forms of unit names + have been removed. Therefore, the following properties of + ``astropy.coordinates.Angle`` should be renamed: + + - ``radians`` -> ``radian`` + - ``degrees`` -> ``degree`` + - ``hours`` -> ``hour`` + + - Multiplication and division of two ``Angle`` objects used to raise + ``NotImplementedError``. Now they raise ``TypeError``. + + - The ``astropy.coordinates.Angle`` class no longer has a ``bounds`` + attribute so there is no bounds-checking or auto-wrapping at this level. + This allows ``Angle`` objects to be used in arbitrary arithmetic + expressions (e.g. coordinate distance computation). + + - The ``astropy.coordinates.RA`` and ``astropy.coordinates.Dec`` classes have + been removed and replaced with ``astropy.coordinates.Longitude`` and + ``astropy.coordinates.Latitude`` respectively. These are now used for the + components of Galactic and Horizontal (Alt-Az) coordinates as well instead + of plain ``Angle`` objects. + + - ``astropy.coordinates.angles.rotation_matrix`` and + ``astropy.coordinates.angles.angle_axis`` now take a ``unit`` kwarg instead + of ``degrees`` kwarg to specify the units of the angles. + ``rotation_matrix`` will also take the unit from the given ``Angle`` object + if no unit is provided. + + - The ``AngularSeparation`` class has been removed. The output of the + coordinates ``separation()`` method is now an + ``astropy.coordinates.Angle``. [#1007] + + - The coordinate classes have been renamed in a way that remove the + ``Coordinates`` at the end of the class names. E.g., ``ICRSCoordinates`` + from previous versions is now called ``ICRS``. [#1614] + + - ``HorizontalCoordinates`` are now named ``AltAz``, to reflect more common + terminology. + +- ``astropy.cosmology`` + + - The Planck (2013) cosmology will likely give slightly different (and more + accurate) results due to the inclusion of Neutrino masses. [#1364] + + - Cosmology class properties now return ``Quantity`` objects instead of + simple floating-point values. [#1237] + + - The names of cosmology instances are now truly optional, and are set to + ``None`` rather than the name of the class if the user does not provide + them. [#1705] + +- ``astropy.io.ascii`` + + - In the ``read`` method of ``astropy.io.ascii``, empty column values in an + ASCII table are now treated as missing values instead of the previous + treatment as a zero-length string "". This now corresponds to the behavior + of other table readers like ``numpy.genfromtxt``. To restore the previous + behavior set ``fill_values=None`` in the call to ``ascii.read()``. [#919] + + - The ``read`` and ``write`` methods of ``astropy.io.ascii`` now have a + ``format`` argument for specifying the file format. This is the preferred + way to choose the format instead of the ``Reader`` and ``Writer`` + arguments. [#961] + + - The ``include_names`` and ``exclude_names`` arguments were removed from + the ``BaseHeader`` initializer, and now instead handled by the reader and + writer classes directly. [#1350] + + - Allow numeric and otherwise unusual column names when reading a table + where the ``format`` argument is specified, but other format details such + as the delimiter or quote character are being guessed. [#1692] + + - When reading an ASCII table using the ``Table.read()`` method, the default + has changed from ``guess=False`` to ``guess=True`` to allow auto-detection + of file format. This matches the default behavior of ``ascii.read()``. + +- ``astropy.io.fits`` + + - The ``astropy.io.fits.new_table`` function is marked "pending deprecation". + This does not mean it will be removed outright or that its functionality + has changed. It will likely be replaced in the future for a function with + similar, if not subtly different functionality. A better, if not slightly + more verbose approach is to use ``pyfits.FITS_rec.from_columns`` to create + a new ``FITS_rec`` table--this has the same interface as + ``pyfits.new_table``. The difference is that it returns a plan + ``FITS_rec`` array, and not an HDU instance. This ``FITS_rec`` object can + then be used as the data argument in the constructors for ``BinTableHDU`` + (for binary tables) or ``TableHDU`` (for ASCII tables). This is analogous + to creating an ``ImageHDU`` by passing in an image array. + ``pyfits.FITS_rec.from_columns`` is just a simpler way of creating a + FITS-compatible recarray from a FITS column specification. + + - The ``updateHeader``, ``updateHeaderData``, and ``updateCompressedData`` + methods of the ``CompDataHDU`` class are pending deprecation and moved to + internal methods. The operation of these methods depended too much on + internal state to be used safely by users; instead they are invoked + automatically in the appropriate places when reading/writing compressed + image HDUs. + + - The ``CompDataHDU.compData`` attribute is pending deprecation in favor of + the clearer and more PEP-8 compatible ``CompDataHDU.compressed_data``. + + - The constructor for ``CompDataHDU`` has been changed to accept new keyword + arguments. The new keyword arguments are essentially the same, but are in + underscore_separated format rather than camelCase format. The old + arguments are still pending deprecation. + + - The internal attributes of HDU classes ``_hdrLoc``, ``_datLoc``, and + ``_datSpan`` have been replaced with ``_header_offset``, ``_data_offset``, + and ``_data_size`` respectively. The old attribute names are still pending + deprecation. This should only be of interest to advanced users who have + created their own HDU subclasses. + + - The following previously deprecated functions and methods have been removed + entirely: ``createCard``, ``createCardFromString``, ``upperKey``, + ``ColDefs.data``, ``setExtensionNameCaseSensitive``, ``_File.getfile``, + ``_TableBaseHDU.get_coldefs``, ``Header.has_key``, ``Header.ascardlist``. + + - Interfaces that were pending deprecation are now fully deprecated. These + include: ``create_card``, ``create_card_from_string``, ``upper_key``, + ``Header.get_history``, and ``Header.get_comment``. + + - The ``.name`` attribute on HDUs is now directly tied to the HDU's header, so + that if ``.header['EXTNAME']`` changes so does ``.name`` and vice-versa. + +- ``astropy.io.registry`` + + - Identifier functions for reading/writing Table and NDData objects should + now accept ``(origin, *args, **kwargs)`` instead of ``(origin, args, + kwargs)``. [#591] + + - Added a new ``astropy.io.registry.get_formats`` function for listing + registered I/O formats and details about the their readers/writers. [#1669] + +- ``astropy.io.votable`` + + - Added a new option ``use_names_over_ids`` option to use when converting + from VOTable objects to Astropy Tables. This can prevent a situation where + column names are not preserved when converting from a VOTable. [#609] + +- ``astropy.nddata`` + + - The ``astropy.nddata.convolution`` sub-package has now been moved to + ``astropy.convolution``, and the ``make_kernel`` function has been removed. + (the kernel classes should be used instead) [#1451] + +- ``astropy.stats.funcs`` + + - For ``sigma_clip``, the ``maout`` optional parameter has been removed, and + the function now always returns a masked array. A new boolean parameter + ``copy`` can be used to indicated whether the input data should be copied + (``copy=True``, default) or used by reference (``copy=False``) in the + output masked array. [#1083] + +- ``astropy.table`` + + - The first argument to the ``Column`` and ``MaskedColumn`` classes is now + the data array--the ``name`` argument has been changed to an optional + keyword argument. [#840] + + - Added support for instantiating a ``Table`` from a list of dict, each one + representing a single row with the keys mapping to column names. [#901] + + - The plural 'units' and 'dtypes' have been switched to 'unit' and 'dtype' + where appropriate. The original attributes are still present in this + version as deprecated attributes, but will be removed in the next version. + [#1174] + + - The ``copy`` methods of ``Column`` and ``MaskedColumn`` were changed so + that the first argument is now ``order='C'``. This is required for + compatibility with Numpy 1.8 which is currently in development. [#1250] + + - Comparing a column (with == or !=) to a scalar, an array, or another column + now always returns a boolean Numpy array (which is a masked array if either + of the arguments in the comparison was masked). This is in contrast to the + previous behavior, which in some cases returned a boolean Numpy array, and + in some cases returned a boolean Column object. [#1446] + +- ``astropy.time`` + + - For consistency with ``Quantity``, the attributes ``val`` and + ``is_scalar`` have been renamed to ``value`` and ``isscalar``, + respectively, and the attribute ``vals`` has been dropped. [#767] + + - The double-float64 internal representation of time is used more + efficiently to enable better accuracy. [#366] + + - Format and scale arguments are now allowed to be case-insensitive. [#1128] + +- ``astropy.units`` + + - The ``Quantity`` class now inherits from the Numpy array class, and + includes the following API changes [#929]: + + - Using ``float(...)``, ``int(...)``, and ``long(...)`` on a quantity will + now only work if the quantity is dimensionless and unscaled. + + - All Numpy ufuncs should now treat units correctly (or raise an exception + if not supported), rather than extract the value of quantities and + operate on this, emitting a warning about the implicit loss of units. + + - When using relevant Numpy ufuncs on dimensionless quantities (e.g. + ``np.exp(h * nu / (k_B * T))``), or combining dimensionless quantities + with Python scalars or plain Numpy arrays ``1 + v / c``, the + dimensionless Quantity will automatically be converted to an unscaled + dimensionless Quantity. + + - When initializing a quantity from a value with no unit, it is now set to + be dimensionless and unscaled by default. When initializing a Quantity + from another Quantity and with no unit specified in the initializer, the + unit is now taken from the unit of the Quantity being initialized from. + + - Strings are no longer allowed as the values for Quantities. [#1005] + + - Quantities are always comparable with zero regardless of their units. + [#1254] + + - The exception ``astropy.units.UnitsException`` has been renamed to + ``astropy.units.UnitsError`` to be more consistent with the naming + of built-in Python exceptions. [#1406] + + - Multiplication with and division by a string now always returns a Unit + (rather than a Quantity when the string was first) [#1408] + + - Imperial units are disabled by default. + +- ``astropy.wcs`` + + - For those including the ``astropy.wcs`` C headers in their project, they + should now include it as: + + #include "astropy_wcs/astropy_wcs_api.h" + + instead of: + + #include "astropy_wcs_api.h" + + [#1631] + +- The ``--enable-legacy`` option for ``setup.py`` has been removed. [#1493] + +Bug Fixes +^^^^^^^^^^ + +- ``astropy.io.ascii`` + + - The ``write()`` function was ignoring the ``fill_values`` argument. [#910] + + - Fixed an issue in ``DefaultSplitter.join`` where the delimiter attribute + was ignored when writing the CSV. [#1020] + + - Fixed writing of IPAC tables containing null values. [#1366] + + - When a table with no header row was read without specifying the format and + using the ``names`` argument, then the first row could be dropped. [#1692] + +- ``astropy.io.fits`` + + - Binary tables containing compressed images may, optionally, contain other + columns unrelated to the tile compression convention. Although this is an + uncommon use case, it is permitted by the standard. + + - Reworked some of the file I/O routines to allow simpler, more consistent + mapping between OS-level file modes ('rb', 'wb', 'ab', etc.) and the more + "PyFITS-specific" modes used by PyFITS like "readonly" and "update". That + is, if reading a FITS file from an open file object, it doesn't matter as + much what "mode" it was opened in so long as it has the right capabilities + (read/write/etc.) Also works around bugs in the Python io module in 2.6+ + with regard to file modes. + + - Fixed a long-standing issue where writing binary tables did not correctly + write the TFORMn keywords for variable-length array columns (they omitted + the max array length parameter of the format). This was thought fixed in + an earlier version, but it was only fixed for compressed image HDUs and + not for binary tables in general. + +- ``astropy.nddata`` + + - Fixed crash when trying to multiple or divide ``NDData`` objects with + uncertainties. [#1547] + +- ``astropy.table`` + + - Using a list of strings to index a table now correctly returns a new table + with the columns named in the list. [#1454] + + - Inequality operators now work properly with ``Column`` objects. [#1685] + +- ``astropy.time`` + + - ``Time`` scale and format attributes are now shown when calling ``dir()`` + on a ``Time`` object. [#1130] + +- ``astropy.wcs`` + + - Fixed assignment to string-like WCS attributes on Python 3. [#956] + +- ``astropy.units`` + + - Fixed a bug that caused the order of multiplication/division of plain + Numpy arrays with Quantities to matter (i.e. if the plain array comes + first the units were not preserved in the output). [#899] + + - Directly instantiated ``CompositeUnits`` were made printable without + crashing. [#1576] + +- Misc + + - Fixed various modules that hard-coded ``sys.stdout`` as default arguments + to functions at import time, rather than using the runtime value of + ``sys.stdout``. [#1648] + + - Minor documentation fixes and enhancements [#922, #1034, #1210, #1217, + #1491, #1492, #1498, #1582, #1608, #1621, #1646, #1670, #1756] + + - Fixed a crash that could sometimes occur when running the test suite on + systems with platform names containing non-ASCII characters. [#1698] + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- General + + - Astropy now follows the PSF Code of Conduct. [#1216] + + - Astropy's test suite now tests all doctests in inline docstrings. Support + for running doctests in the reST documentation is planned to follow in + v0.3.1. + + - Astropy's test suite can be run on multiple CPUs in parallel, often + greatly improving runtime, using the ``--parallel`` option. [#1040] + + - A warning is now issued when using Astropy with Numpy < 1.5--much of + Astropy may still work in this case but it shouldn't be expected to + either. [#1479] + + - Added automatic download/build/installation of Numpy during Astropy + installation if not already found. [#1483] + + - Handling of metadata for the ``NDData`` and ``Table`` classes has been + unified by way of a common ``MetaData`` descriptor--it allows instantiating + an object with metadata of any mapping type, and subsequently prevents + replacing the mapping stored in the ``.meta`` attribute (only direct + updates to that object are allowed). [#1686] + +- ``astropy.coordinates`` + + - Angles containing out of bounds minutes or seconds (eg. 60) can be + parsed--the value modulo 60 is used with carry to the hours/minutes, and a + warning is issued rather than raising an exception. [#990] + +- ``astropy.io.fits`` + + - The new compression code also adds support for the ZQUANTIZ and ZDITHER0 + keywords added in more recent versions of this FITS Tile Compression spec. + This includes support for lossless compression with GZIP. (#198) By default + no dithering is used, but the ``SUBTRACTIVE_DITHER_1`` and + ``SUBTRACTIVE_DITHER_2`` methods can be enabled by passing the correct + constants to the ``quantize_method`` argument to the ``CompImageHDU`` + constructor. A seed can be manually specified, or automatically generated + using either the system clock or checksum-based methods via the + ``dither_seed`` argument. See the documentation for ``CompImageHDU`` for + more details. + + - Images compressed with the Tile Compression standard can now be larger than + 4 GB through support of the Q format. + + - All HDUs now have a ``.ver`` ``.level`` attribute that returns the value of + the EXTVAL and EXTLEVEL keywords from that HDU's header, if the exist. + This was added for consistency with the ``.name`` attribute which returns + the EXTNAME value from the header. + + - Then ``Column`` and ``ColDefs`` classes have new ``.dtype`` attributes + which give the Numpy dtype for the column data in the first case, and the + full Numpy compound dtype for each table row in the latter case. + + - There was an issue where new tables created defaulted the values in all + string columns to '0.0'. Now string columns are filled with empty strings + by default--this seems a less surprising default, but it may cause + differences with tables created with older versions of PyFITS or Astropy. + +- ``astropy.io.misc`` + + - The HDF5 reader can now refer to groups in the path as well as datasets; + if given a group, the first dataset in that group is read. [#1159] + +- ``astropy.nddata`` + + - ``NDData`` objects have more helpful, though still rudimentary ``__str__` + and ``__repr__`` displays. [#1313] + +- ``astropy.units`` + + - Added 'cycle' unit. [#1160] + + - Extended units supported by the CDS formatter/parser. [#1468] + + - Added unicode an LaTeX symbols for liter. [#1618] + +- ``astropy.wcs`` + + - Redundant SCAMP distortion parameters are removed with SIP distortions are + also present. [#1278] + + - Added iterative implementation of ``all_world2pix`` that can be reliably + inverted. [#1281] + + +0.2.5 (2013-10-25) +------------------ + +Bug Fixes +^^^^^^^^^ + +- ``astropy.coordinates`` + + - Fixed incorrect string formatting of Angles using ``precision=0``. [#1319] + + - Fixed string formatting of Angles using ``decimal=True`` which ignored the + ``precision`` argument. [#1323] + + - Fixed parsing of format strings using appropriate unicode characters + instead of the ASCII ``-`` for minus signs. [#1429] + +- ``astropy.io.ascii`` + + - Fixed a crash in the IPAC table reader when the ``include/exclude_names`` + option is set. [#1348] + + - Fixed writing AASTex tables to honor the ``tabletype`` option. [#1372] + +- ``astropy.io.fits`` + + - Improved round-tripping and preservation of manually assigned column + attributes (``TNULLn``, ``TSCALn``, etc.) in table HDU headers. (Note: This + issue was previously reported as fixed in Astropy v0.2.2 by mistake; it is + not fixed until v0.3.) [#996] + + - Fixed a bug that could cause a segfault when trying to decompress an + compressed HDU whose contents are truncated (due to a corrupt file, for + example). This still causes a Python traceback but better that than a + segfault. [#1332] + + - Newly created ``CompImageHDU`` HDUs use the correct value of the + ``DEFAULT_COMPRESSION_TYPE`` module-level constant instead of hard-coding + "RICE_1" in the header. + + - Fixed a corner case where when extra memory is allocated to compress an + image, it could lead to unnecessary in-memory copying of the compressed + image data and a possible memory leak through Numpy. + + - Fixed a bug where assigning from an mmap'd array in one FITS file over + the old (also mmap'd) array in another FITS file failed to update the + destination file. Corresponds to PyFITS issue 25. + + - Some miscellaneous documentation fixes. + +- ``astropy.io.votable`` + + - Added a warning for when a VOTable 1.2 file contains no ``RESOURCES`` + elements (at least one should be present). [#1337] + + - Fixed a test failure specific to MIPS architecture caused by an errant + floating point warning. [#1179] + +- ``astropy.nddata.convolution`` + + - Prevented in-place modification of the input arrays to ``convolve()``. + [#1153] + +- ``astropy.table`` + + - Added HTML escaping for string values in tables when outputting the table + as HTML. [#1347] + + - Added a workaround in a bug in Numpy that could cause a crash when + accessing a table row in a masked table containing ``dtype=object`` + columns. [#1229] + + - Fixed an issue similar to the one in #1229, but specific to unmasked + tables. [#1403] + +- ``astropy.units`` + + - Improved error handling for unparseable units and fixed parsing CDS units + without mantissas in the exponent. [#1288] + + - Added a physical type for spectral flux density. [#1410] + + - Normalized conversions that should result in a scale of exactly 1.0 to + round off slight floating point imprecisions. [#1407] + + - Added support in the CDS unit parser/formatter for unusual unit prefixes + that are nonetheless required to be supported by that convention. [#1426] + + - Fixed the parsing of ``sqrt()`` in unit format strings which was returning + ``unit ** 2`` instead of ``unit ** 0.5``. [#1458] + +- ``astropy.wcs`` + + - When passing a single array to the wcs transformation functions, + (``astropy.wcs.Wcs.all_pix2world``, etc.), its second dimension must now + exactly match the number of dimensions in the transformation. [#1395] + + - Improved error message when incorrect arguments are passed to + ``WCS.wcs_world2pix``. [#1394] + + - Fixed a crash when trying to read WCS from FITS headers on Python 3.3 + in Windows. [#1363] + + - Only headers that are required as part of the WCSLIB C API are installed + by the package, per request of system packagers. [#1666] + +- Misc + + - Fixed crash when the ``COLUMNS`` environment variable is set to a + non-integer value. [#1291] + + - Fixed a bug in ``ProgressBar.map`` where ``multiprocess=True`` could cause + it to hang on waiting for the process pool to be destroyed. [#1381] + + - Fixed a crash on Python 3.2 when affiliated packages try to use the + ``astropy.utils.data.get_pkg_data_*`` functions. [#1256] + + - Fixed a minor path normalization issue that could occur on Windows in + ``astropy.utils.data.get_pkg_data_filename``. [#1444] + + - Fixed an annoyance where configuration items intended only for testing + showed up in users' astropy.cfg files. [#1477] + + - Prevented crashes in exception logging in unusual cases where no traceback + is associated with the exception. [#1518] + + - Fixed a crash when running the tests in unusual environments where + ``sys.stdout.encoding`` is ``None``. [#1530] + + - Miscellaneous documentation fixes and improvements [#1308, #1317, #1377, + #1393, #1362, #1516] + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Astropy installation now requests setuptools >= 0.7 during build/installation + if neither distribute or setuptools >= 0.7 is already installed. In other + words, if ``import setuptools`` fails, ``ez_setup.py`` is used to bootstrap + the latest setuptools (rather than using ``distribute_setup.py`` to bootstrap + the now obsolete distribute package). [#1197] + +- When importing Astropy from a source checkout without having built the + extension modules first an ``ImportError`` is raised rather than a + ``SystemExit`` exception. [#1269] + + +0.2.4 (2013-07-24) +------------------ + +Bug Fixes +^^^^^^^^^ + +- ``astropy.coordinates`` + + - Fixed the angle parser to support parsing the string "1 degree". [#1168] + +- ``astropy.cosmology`` + + - Fixed a crash in the ``comoving_volume`` method on non-flat cosmologies + when passing it an array of redshifts. + +- ``astropy.io.ascii`` + + - Fixed a bug that prevented saving changes to the comment symbol when + writing changes to a table. [#1167] + +- ``astropy.io.fits`` + + - Added a workaround for a bug in 64-bit OSX that could cause truncation when + writing files greater than 2^32 bytes in size. [#839] + +- ``astropy.io.votable`` + + - Fixed incorrect reading of tables containing multiple ```` + elements. [#1223] + +- ``astropy.table`` + + - Fixed a bug where ``Table.remove_column`` and ``Table.rename_column`` + could cause a masked table to lose its masking. [#1120] + + - Fixed bugs where subclasses of ``Table`` did not preserver their class in + certain operations. [#1142] + + - Fixed a bug where slicing a masked table did not preserve the mask. [#1187] + +- ``astropy.units`` + + - Fixed a bug where the ``.si`` and ``.cgs`` properties of dimensionless + ``Quantity`` objects raised a ``ZeroDivisionError``. [#1150] + + - Fixed a bug where multiple subsequent calls to the ``.decompose()`` method + on array quantities applied a scale factor each time. [#1163] + +- Misc + + - Fixed an installation crash that could occur sometimes on Debian/Ubuntu + and other \*NIX systems where ``pkg_resources`` can be installed without + installing ``setuptools``. [#1150] + + - Updated the ``distribute_setup.py`` bootstrapper to use setuptools >= 0.7 + when installing on systems that don't already have an up to date version + of distribute/setuptools. [#1180] + + - Changed the ``version.py`` template so that Astropy affiliated packages can + (and they should) use their own ``cython_version.py`` and + ``utils._compiler`` modules where appropriate. This issue only pertains to + affiliated package maintainers. [#1198] + + - Fixed a corner case where the default config file generation could crash + if building with matplotlib but *not* Sphinx installed in a virtualenv. + [#1225] + + - Fixed a crash that could occur in the logging module on systems that + don't have a default preferred encoding (in particular this happened + in some versions of PyCharm). [#1244] + + - The Astropy log now supports passing non-string objects (and calling + ``str()`` on them by default) to the logging methods, in line with Python's + standard logging API. [#1267] + + - Minor documentation fixes [#582, #696, #1154, #1194, #1212, #1213, #1246, + #1252] + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- ``astropy.cosmology`` + + - Added a new ``Plank13`` object representing the Plank 2013 results. [#895] + +- ``astropy.units`` + + - Performance improvements in initialization of ``Quantity`` objects with + a large number of elements. [#1231] + + +0.2.3 (2013-05-30) +------------------ + +Bug Fixes +^^^^^^^^^ + +- ``astropy.time`` + + - Fixed inaccurate handling of leap seconds when converting from UTC to UNIX + timestamps. [#1118] + + - Tightened required accuracy in many of the time conversion tests. [#1121] + +- Misc + + - Fixed a regression that was introduced in v0.2.2 by the fix to issue #992 + that was preventing installation of Astropy affiliated packages that use + Astropy's setup framework. [#1124] + + +0.2.2 (2013-05-21) +------------------ + +Bug Fixes +^^^^^^^^^ + +- ``astropy.io`` + + - Fixed issues in both the ``fits`` and ``votable`` sub-packages where array + byte order was not being handled consistently, leading to possible crashes + especially on big-endian systems. [#1003] + +- ``astropy.io.fits`` + + - When an error occurs opening a file in fitsdiff the exception message will + now at least mention which file had the error. + + - Fixed a couple cases where creating a new table using TDIMn in some of the + columns could cause a crash. + + - Slightly refactored how tables containing variable-length array columns are + handled to add two improvements: Fixes an issue where accessing the data + after a call to the ``astropy.io.fits.getdata`` convenience function caused + an exception, and allows the VLA data to be read from an existing mmap of + the FITS file. + + - Fixed a bug on Python 3 where attempting to open a non-existent file on + Python 3 caused a seemingly unrelated traceback. + + - Fixed an issue in the tests that caused some tests to fail if Astropy is + installed with read-only permissions. + + - Fixed a bug where instantiating a ``BinTableHDU`` from a numpy array + containing boolean fields converted all the values to ``False``. + + - Fixed an issue where passing an array of integers into the constructor of + ``Column()`` when the column type is floats of the same byte width caused + the column array to become garbled. + + - Fixed inconsistent behavior in creating CONTINUE cards from byte strings + versus unicode strings in Python 2--CONTINUE cards can now be created + properly from unicode strings (so long as they are convertable to ASCII). + + - Fixed a bug in parsing HIERARCH keywords that do not have a space after the + first equals sign (before the value). + + - Prevented extra leading whitespace on HIERARCH keywords from being treated + as part of the keyword. + + - Fixed a bug where HIERARCH keywords containing lower-case letters was + mistakenly marked as invalid during header validation along with an + ancillary issue where the ``Header.index()`` method id not work correctly + with HIERARCH keywords containing lower-case letters. + + - Disallowed assigning NaN and Inf floating point values as header values, + since the FITS standard does not define a way to represent them in. Because + this is undefined, the previous behavior did not make sense and produced + invalid FITS files. [#954] + + - Fixed an obscure issue that can occur on systems that don't have flush to + memory-mapped files implemented (namely GNU Hurd). [#968] + +- ``astropy.io.votable`` + + - Stopped deprecation warnings from the ``astropy.io.votable`` package that + could occur during setup. [#970] + + - Fixed an issue where INFO elements were being incorrectly dropped when + occurring inside a TABLE element. [#1000] + + - Fixed obscure test failures on MIPS platforms. [#1010] + +- ``astropy.nddata.convolution`` + + - Fixed an issue in ``make_kernel()`` when using an Airy function kernel. + Also removed the superfluous 'brickwall' option. [#939] + +- ``astropy.table`` + + - Fixed a crash that could occur when adding a row to an empty (rowless) + table with masked columns. [#973] + + - Made it possible to assign to one table row from the value of another row, + effectively making it easier to copy rows, for example. [#1019] + +- ``astropy.time`` + + - Added appropriate ``__copy__`` and ``__deepcopy__`` behavior; this + omission caused a seemingly unrelated error in FK5 coordinate separation. + [#891] + +- ``astropy.units`` + + - Fixed an issue where the ``isiterable()`` utility returned ``True`` for + quantities with scalar values. Added an ``__iter__`` method for the + ``Quantity`` class and fixed ``isiterable()`` to catch false positives. + [#878] + + - Fixed previously undefined behavior when multiplying a unit by a string. + [#949] + + - Added 'time' as a physical type--this was a simple omission. [#959] + + - Fixed issues with pickling unit objects so as to play nicer with the + multiprocessing module. [#974] + + - Made it more difficult to accidentally override existing units with a new + unit of the same name. [#1070] + + - Added several more physical types and units that were previously omitted, + including 'mass density', 'specific volume', 'molar volume', 'momentum', + 'angular momentum', 'angular speed', 'angular acceleration', 'electric + current', 'electric current density', 'electric field strength', 'electric + flux density', 'electric charge density', 'permittivity', 'electromagnetic + field strength', 'radiant intensity', 'data quantity', 'bandwidth'; and + 'knots', 'nautical miles', 'becquerels', and 'curies' respectively. [#1072] + +- Misc + + - Fixed a permission error that could occur when running ``astropy.test()`` + on Python 3 when Astropy is installed as root. [#811] + + - Made it easier to filter warnings from the ``convolve()`` function and + from ``Quantity`` objects. [#853] + + - Fixed a crash that could occur in Python 3 when generation of the default + config file fails during setup. [#952] + + - Fixed an unrelated error message that could occur when trying to import + astropy from a source checkout without having build the extension modules + first. This issue was claimed to be fixed in v0.2.1, but the fix itself had + a bug. [#971] + + - Fixed a crash that could occur when running the ``build_sphinx`` setup + command in Python 3. [#977] + + - Added a more helpful error message when trying to run the + ``setup.py build_sphinx`` command when Sphinx is not installed. [#1027] + + - Minor documentation fixes and restructuring. + [#935, #967, #978, #1004, #1028, #1047] + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Some performance improvements to the ``astropy.units`` package, in particular + improving the time it takes to import the sub-package. [#1015] + + +0.2.1 (2013-04-03) +------------------ + +Bug Fixes +^^^^^^^^^ + +- ``astropy.coordinates`` + + - Fixed encoding errors that could occur when formatting coordinate objects + in code using ``from __future__ import unicode_literals``. [#817] + + - Fixed a bug where the minus sign was dropped when string formatting dms + coordinates with -0 degrees. [#875] + +- ``astropy.io.fits`` + + - Properly supports the ZQUANTIZ keyword used to support quantization + level--this includes working support for lossless GZIP compression of + images. + + - Fixed support for opening gzipped FITS files in a writeable mode. [#256] + + - Added a more helpful exception message when trying to read invalid values + from a table when the required ``TNULLn`` keyword is missing. [#309] + + - More refactoring of the tile compression handling to work around a + potential memory access violation that was particularly prevalent on + Windows. [#507] + + - Fixed an integer size mismatch in the compression module that could affect + 32-bit systems. [#786] + + - Fixed malformatting of the ``TFORMn`` keywords when writing compressed + image tables (they omitted the max array length parameter from the + variable-length array format). + + - Fixed a crash that could occur when writing a table containing multi- + dimensional array columns from an existing file into a new file. + + - Fixed a bug in fitsdiff that reported two header keywords containing NaN + as having different values. + +- ``astropy.io.votable`` + + - Fixed links to the ``astropy.io.votable`` documentation in the VOTable + validator output. [#806] + + - When reading VOTables containing integers that are out of range for their + column type, display a warning rather than raising an exception. [#825] + + - Changed the default string format for floating point values for better + round-tripping. [#856] + + - Fixed opening VOTables through the ``Table.read()`` interface for tables + that have no names. [#927] + + - Fixed creation of VOTables from an Astropy table that does not have a data + mask. [#928] + + - Minor documentation fixes. [#932] + +- ``astropy.nddata.convolution`` + + - Added better handling of ``inf`` values to the ``convolve_fft`` family of + functions. [#893] + +- ``astropy.table`` + + - Fixed silent failure to assign values to a row on multiple columns. [#764] + + - Fixed various buggy behavior when viewing a table after sorting by one of + its columns. [#829] + + - Fixed using ``numpy.where()`` with table indexing. [#838] + + - Fixed a bug where opening a remote table with ``Table.read()`` could cause + the entire table to be downloaded twice. [#845] + + - Fixed a bug where ``MaskedColumn`` no longer worked if the column being + masked is renamed. [#916] + +- ``astropy.units`` + + - Added missing capability for array ``Quantity``\s to be initializable by + a list of ``Quantity``\s. [#835] + + - Fixed the definition of year and lightyear to be in terms of Julian year + per the IAU definition. [#861] + + - "degree" was removed from the list of SI base units. [#863] + +- ``astropy.wcs`` + + - Fixed ``TypeError`` when calling ``WCS.to_header_string()``. [#822] + + - Added new method ``WCS.all_world2pix`` for converting from world + coordinates to pixel space, including inversion of the astrometric + distortion correction. [#1066, #1281] + + +- Misc + + - Fixed a minor issue when installing with ``./setup.py develop`` on a fresh + git clone. This is likely only of interest to developers on Astropy. + [#725] + + - Fixes a crash with ``ImportError: No module named 'astropy.version'`` when + running setup.py from a source checkout for the first time on OSX with + Python 3.3. [#820] + + - Fixed an installation issue where running ``./setup.py install`` or when + installing with pip the ``.astropy`` directory gets created in the home + directory of the user running the command. The user's ``.astropy`` + directory should only be created when they use Astropy, not when they + install it. [#867] + + - Fixed an exception when creating a ``ProgressBar`` with a "total" of 0. + [#752] + + - Added better documentation of behavior that can occur when trying to import + the astropy package from within a source checkout without first building + the extension modules. [#795, #864] + + - Added link to the installation instructions in the README. [#797] + + - Catches segfaults in xmllint which can occur sometimes and is otherwise out + of our control. [#803] + + - Minor changes to the documentation template. [#805] + + - Fixed a minor exception handling bug in ``download_file()``. [#808] + + - Added cleanup of any temporary files if an error occurs in + ``download_file()``. [#857] + + - Filesystem free space is checked for before attempting to download a file + with ``download_file()``. [#858] + + - Fixed package data locating to work across symlinks--required to work with + some OS packaging layouts. [#827] + + - Fixed a bug when building Cython extensions where hidden files containing + ``.pyx`` extensions could cause the build to crash. This can be an issue + with software and filesystems that autogenerate hidden files. [#834] + + - Fixed bug that could cause a "script" called README.rst to be installed + in a bin directory. [#852] + + - Fixed some miscellaneous and mostly rare reference leaks caught by + cpychecker. [#914] + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Added logo and branding for Windows binary installers. [#741] + +- Upgraded included version libexpat to 2.1.0. [#781] + +- ~25% performance improvement in unit composition/decomposition. [#836] + +- Added previously missing LaTeX formatting for ``L_sun`` and ``R_sun``. [#841] + +- ``ConfigurationItem``\s now have a more useful and informative ``__repr__`` + and improved documentation for how to use them. [#855] + +- Added a friendlier error message when trying to import astropy from a source + checkout without first building the extension modules inplace. [#864] + +- ``py.test`` now outputs more system information for help in debugging issues + from users. [#869] + +- Added unit definitions "mas" and "uas" for "milliarcsecond" and + "microarcsecond" respectively. [#892] + + +0.2 (2013-02-19) +---------------- + +New Features +^^^^^^^^^^^^ + +This is a brief overview of the new features included in Astropy 0.2--please +see the "What's New" section of the documentation for more details. + +- ``astropy.coordinates`` + + - This new subpackage contains a representation of celestial coordinates, + and provides a wide range of related functionality. While + fully-functional, it is a work in progress and parts of the API may + change in subsequent releases. + +- ``astropy.cosmology`` + + - Update to include cosmologies with variable dark energy equations of state. + (This introduces some API incompatibilities with the older Cosmology + objects). + + - Added parameters for relativistic species (photons, neutrinos) to the + astropy.cosmology classes. The current treatment assumes that neutrinos are + massless. [#365] + + - Add a WMAP9 object using the final (9-year) WMAP parameters from + Hinshaw et al. 2013. It has also been made the default cosmology. + [#629, #724] + +- ``astropy.table`` I/O infrastructure for custom readers/writers + implemented. [#305] + + - Added support for reading/writing HDF5 files [#461] + + - Added support for masked tables with missing or invalid data [#451] + +- New ``astropy.time`` sub-package. [#332] + +- New ``astropy.units`` sub-package that includes a class for units + (``astropy.units.Unit``) and scalar quantities that have units + (``astropy.units.Quantity``). [#370, #445] + + This has the following effects on other sub-packages: + + - In ``astropy.wcs``, the ``wcs.cunit`` list now takes and returns + ``astropy.units.Unit`` objects. [#379] + + - In ``astropy.nddata``, units are now stored as ``astropy.units.Unit`` + objects. [#382] + + - In ``astropy.table``, units on columns are now stored as + ``astropy.units.Unit`` objects. [#380] + + - In ``astropy.constants``, constants are now stored as + ``astropy.units.Quantity`` objects. [#529] + +- ``astropy.io.ascii`` + + - Improved integration with the ``astropy.table`` Table class so that + table and column metadata (e.g. keywords, units, description, + formatting) are directly available in the output table object. The + CDS, DAOphot, and IPAC format readers now provide this type of + integrated metadata. + + - Changed to using `astropy.table` masked tables instead of NumPy + masked arrays for tables with missing values. + + - Added SExtractor table reader to ``astropy.io.ascii`` [#420] + + - Removed the Memory reader class which was used to convert data input + passed to the ``write`` function into an internal table. Instead + ``write`` instantiates an astropy Table object using the data + input to ``write``. + + - Removed the NumpyOutputter as the output of reading a table is now + always a ``Table`` object. + + - Removed the option of supplying a function as a column output + formatter. + + - Added a new ``strip_whitespace`` keyword argument to the ``write`` + function. This controls whether whitespace is stripped from + the left and right sides of table elements before writing. + Default is True. + + - Fixed a bug in reading IPAC tables with null values. + +- Generalized I/O infrastructure so that ``astropy.nddata`` can also have + custom readers/writers [#659] + +- ``astropy.wcs`` + + - From updating the the underlying wcslib 4.16: + + - When ``astropy.wcs.WCS`` constructs a default coordinate representation + it will give it the special name "DEFAULTS", and will not report "Found + one coordinate representation". + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- A configuration file with all options set to their defaults is now generated + when astropy is installed. This file will be pulled in as the users' + astropy configuration file the first time they ``import astropy``. [#498] + +- Astropy doc themes moved into ``astropy.sphinx`` to allow affiliated packages + to access them. + +- Added expanded documentation for the ``astropy.cosmology`` sub-package. + [#272] + +- Added option to disable building of "legacy" packages (pyfits, vo, etc.). + +- The value of the astronomical unit (au) has been updated to that adopted by + IAU 2012 Resolution B2, and the values of the pc and kpc constants have been + updated to reflect this. [#368] + +- Added links to the documentation pages to directly edit the documentation on + GitHub. [#347] + +- Several updates merged from ``pywcs`` into ``astropy.wcs`` [#384]: + + - Improved the reading of distortion images. + + - Added a new option to choose whether or not to write SIP coefficients. + + - Uses the ``relax`` option by default so that non-standard keywords are + allowed. [#585] + + +- Added HTML representation of tables in IPython notebook [#409] + +- Rewrote CFITSIO-based backend for handling tile compression of FITS files. + It now uses a standard CFITSIO instead of heavily modified pieces of CFITSIO + as before. Astropy ships with its own copy of CFITSIO v3.30, but system + packagers may choose instead to strip this out in favor of a + system-installed version of CFITSIO. This corresponds to PyFITS ticket 169. + [#318] + +- Moved ``astropy.config.data`` to ``astropy.utils.data`` and re-factored the + I/O routines to separate out the generic I/O code that can be used to open + any file or resource from the code used to access Astropy-related data. The + 'core' I/O routine is now ``get_readable_fileobj``, which can be used to + access any local as well as remote data, supports caching, and can decompress + gzip and bzip2 files on-the-fly. [#425] + +- Added a classmethod to + ``astropy.coordinates.coordsystems.SphericalCoordinatesBase`` that performs a + name resolve query using Sesame to retrieve coordinates for the requested + object. This works for any subclass of ``SphericalCoordinatesBase``, but + requires an internet connection. [#556] + +- ``astropy.nddata.convolution`` removed requirement of PyFFTW3; uses Numpy's + FFT by default instead with the added ability to specify an FFT + implementation to use. [#660] + + +Bug Fixes +^^^^^^^^^ + +- ``astropy.io.ascii`` + + - Fixed crash when pprinting a row with INDEF values. [#511] + + - Fixed failure when reading DAOphot files with empty keyword values. [#666] + +- ``astropy.io.fits`` + + - Improved handling of scaled images and pseudo-unsigned integer images in + compressed image HDUs. They now work more transparently like normal image + HDUs with support for the ``do_not_scale_image_data`` and ``uint`` options, + as well as ``scale_back`` and ``save_backup``. The ``.scale()`` method + works better too. Corresponds to PyFITS ticket 88. + + - Permits non-string values for the EXTNAME keyword when reading in a file, + rather than throwing an exception due to the malformatting. Added + verification for the format of the EXTNAME keyword when writing. + Corresponds to PyFITS ticket 96. + + - Added support for EXTNAME and EXTVER in PRIMARY HDUs. That is, if EXTNAME + is specified in the header, it will also be reflected in the ``.name`` + attribute and in ``fits.info()``. These keywords used to be verboten in + PRIMARY HDUs, but the latest version of the FITS standard allows them. + Corresponds to PyFITS ticket 151. + + - HCOMPRESS can again be used to compress data cubes (and higher-dimensional + arrays) so long as the tile size is effectively 2-dimensional. In fact, + compatible tile sizes will automatically be used even if they're not + explicitly specified. Corresponds to PyFITS ticket 171. + + - Fixed a bug that could cause a deadlock in the filesystem on OSX when + reading the data from certain types of FITS files. This only occurred + when used in conjunction with Numpy 1.7. [#369] + + - Added support for the optional ``endcard`` parameter in the + ``Header.fromtextfile()`` and ``Header.totextfile()`` methods. Although + ``endcard=False`` was a reasonable default assumption, there are still text + dumps of FITS headers that include the END card, so this should have been + more flexible. Corresponds to PyFITS ticket 176. + + - Fixed a crash when running fitsdiff on two empty (that is, zero row) tables. + Corresponds to PyFITS ticket 178. + + - Fixed an issue where opening a FITS file containing a random group HDU in + update mode could result in an unnecessary rewriting of the file even if + no changes were made. This corresponds to PyFITS ticket 179. + + - Fixed a crash when generating diff reports from diffs using the + ``ignore_comments`` options. Corresponds to PyFITS ticket 181. + + - Fixed some bugs with WCS Paper IV record-valued keyword cards: + + - Cards that looked kind of like RVKCs but were not intended to be were + over-permissively treated as such--commentary keywords like COMMENT and + HISTORY were particularly affected. Corresponds to PyFITS ticket 183. + + - Looking up a card in a header by its standard FITS keyword only should + always return the raw value of that card. That way cards containing + values that happen to valid RVKCs but were not intended to be will still + be treated like normal cards. Corresponds to PyFITS ticket 184. + + - Looking up a RVKC in a header with only part of the field-specifier (for + example "DP1.AXIS" instead of "DP1.AXIS.1") was implicitly treated as a + wildcard lookup. Corresponds to PyFITS ticket 184. + + - Fixed a crash when diffing two FITS files where at least one contains a + compressed image HDU which was not recognized as an image instead of a + table. Corresponds to PyFITS ticket 187. + + - Fixed a bug where opening a file containing compressed image HDUs in + 'update' mode and then immediately closing it without making any changes + caused the file to be rewritten unnecessarily. + + - Fixed two memory leaks that could occur when writing compressed image data, + or in some cases when opening files containing compressed image HDUs in + 'update' mode. + + - Fixed a bug where ``ImageHDU.scale(option='old')`` wasn't working at + all--it was not restoring the image to its original BSCALE and BZERO + values. + + - Fixed a bug when writing out files containing zero-width table columns, + where the TFIELDS keyword would be updated incorrectly, leaving the table + largely unreadable. + + - Fixed a minor string formatting issue. + + - Fixed bugs in the backwards compatibility layer for the ``CardList.index`` + and ``CardList.count`` methods. Corresponds to PyFITS ticket 190. + + - Improved ``__repr__`` and text file representation of cards with long + values that are split into CONTINUE cards. Corresponds to PyFITS ticket + 193. + + - Fixed a crash when trying to assign a long (> 72 character) value to blank + ('') keywords. This also changed how blank keywords are represented--there + are still exactly 8 spaces before any commentary content can begin; this + *may* affect the exact display of header cards that assumed there could be + fewer spaces in a blank keyword card before the content begins. However, + the current approach is more in line with the requirements of the FITS + standard. Corresponds to PyFITS ticket 194. + +- ``astropy.io.votable`` + + - The ``Table`` class now maintains a single array object which is a + Numpy masked array. For variable-length columns, the object that + is stored there is also a Numpy masked array. + + - Changed the ``pedantic`` configuration option to be ``False`` by default + due to the vast proliferation of non-compliant VO Tables. [#296] + + - Renamed ``astropy.io.vo`` to ``astropy.io.votable``. + +- ``astropy.table`` + + - Added a workaround for an upstream bug in Numpy 1.6.2 that could cause + a maximum recursion depth RuntimeError when printing table rows. [#341] + +- ``astropy.wcs`` + + - Updated to wcslib 4.15 [#418] + + - Fixed a problem with handling FITS headers on locales that do not use + dot as a decimal separator. This required an upstream fix to wcslib which + is included in wcslib 4.14. [#313] + +- Fixed some tests that could fail due to missing/incorrect logging + configuration--ensures that tests don't have any impact on the default log + location or contents. [#291] + +- Various minor documentation fixes [#293 and others] + +- Fixed a bug where running the tests with the ``py.test`` command still tried + to replace the system-installed pytest with the one bundled with Astropy. + [#454] + +- Improved multiprocessing compatibility for file downloads. [#615] + +- Fixed handling of Cython modules when building from a source checkout of a + tagged release version. [#594] + +- Added a workaround for a bug in Sphinx that could occur when using the + ``:tocdepth:`` directive. [#595] + +- Minor VOTable fixes [#596] + +- Fixed how ``setup.py`` uses ``distribute_setup.py`` to prevent possible + ``VersionConflict`` errors when an older version of distribute is already + installed on the user's system. [#616][#640] + +- Changed use of ``log.warn`` in the logging module to ``log.warning`` since + the former is deprecated. [#624] + + +0.1 (2012-06-19) +---------------- + +- Initial release. diff --git a/PKG-INFO b/PKG-INFO new file mode 100644 index 0000000..569d6eb --- /dev/null +++ b/PKG-INFO @@ -0,0 +1,29 @@ +Metadata-Version: 1.1 +Name: astropy +Version: 0.4.2 +Summary: Community-developed python astronomy tools +Home-page: http://astropy.org +Author: The Astropy Developers +Author-email: astropy.team@gmail.com +License: BSD +Download-URL: http://pypi.python.org/packages/source/a/astropy/astropy-0.4.2.tar.gz +Description: + Astropy is a package intended to contain core functionality and some + common tools needed for performing astronomy and astrophysics research with + Python. It also provides an index for other astronomy packages and tools for + managing them. + +Platform: UNKNOWN +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: C +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Topic :: Scientific/Engineering :: Astronomy +Classifier: Topic :: Scientific/Engineering :: Physics +Requires: numpy +Provides: astropy diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..3740958 --- /dev/null +++ b/README.rst @@ -0,0 +1,45 @@ +======= +Astropy +======= + +.. image:: https://pypip.in/v/astropy/badge.png + :target: https://pypi.python.org/pypi/astropy + +.. image:: https://pypip.in/d/astropy/badge.png + :target: https://pypi.python.org/pypi/astropy + +Astropy (http://astropy.org/) is a package intended to contain much of +the core functionality and some common tools needed for performing +astronomy and astrophysics with Python. + +Releases are `registered on PyPI `_, +and development is occuring at the +`project's github page `_. + +For installation instructions, see the `online documentation `_ +or ``docs/install.rst`` in this source distribution. + +For system packagers: Please install Astropy with the command:: + + $ python setup.py --offline install + +This will prevent the astropy_helpers bootstrap script from attempting to +reach out to PyPI. + + +Travis Build Status +------------------- +.. image:: https://travis-ci.org/astropy/astropy.png + :target: https://travis-ci.org/astropy/astropy + + +Test Coverage Status +-------------------- + +.. image:: https://coveralls.io/repos/astropy/astropy/badge.png + :target: https://coveralls.io/r/astropy/astropy + +License +------- +Astropy is licensed under a 3-clause BSD style license - see the +``licenses/LICENSE.rst`` file. \ No newline at end of file diff --git a/ah_bootstrap.py b/ah_bootstrap.py new file mode 100644 index 0000000..fdf64fb --- /dev/null +++ b/ah_bootstrap.py @@ -0,0 +1,748 @@ +""" +This bootstrap module contains code for ensuring that the astropy_helpers +package will be importable by the time the setup.py script runs. It also +includes some workarounds to ensure that a recent-enough version of setuptools +is being used for the installation. + +This module should be the first thing imported in the setup.py of distributions +that make use of the utilities in astropy_helpers. If the distribution ships +with its own copy of astropy_helpers, this module will first attempt to import +from the shipped copy. However, it will also check PyPI to see if there are +any bug-fix releases on top of the current version that may be useful to get +past platform-specific bugs that have been fixed. When running setup.py, use +the ``--offline`` command-line option to disable the auto-upgrade checks. + +When this module is imported or otherwise executed it automatically calls a +main function that attempts to read the project's setup.cfg file, which it +checks for a configuration section called ``[ah_bootstrap]`` the presences of +that section, and options therein, determine the next step taken: If it +contains an option called ``auto_use`` with a value of ``True``, it will +automatically call the main function of this module called +`use_astropy_helpers` (see that function's docstring for full details). +Otherwise no further action is taken (however, +``ah_bootstrap.use_astropy_helpers`` may be called manually from within the +setup.py script). + +Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same +names as the arguments to `use_astropy_helpers`, and can be used to configure +the bootstrap script when ``auto_use = True``. + +See https://github.com/astropy/astropy-helpers for more details, and for the +latest version of this module. +""" + +import contextlib +import errno +import imp +import io +import locale +import os +import re +import subprocess as sp +import sys + +try: + from ConfigParser import ConfigParser, RawConfigParser +except ImportError: + from configparser import ConfigParser, RawConfigParser + + +if sys.version_info[0] < 3: + _str_types = (str, unicode) + _text_type = unicode + PY3 = False +else: + _str_types = (str, bytes) + _text_type = str + PY3 = True + +# Some pre-setuptools checks to ensure that either distribute or setuptools >= +# 0.7 is used (over pre-distribute setuptools) if it is available on the path; +# otherwise the latest setuptools will be downloaded and bootstrapped with +# ``ez_setup.py``. This used to be included in a separate file called +# setuptools_bootstrap.py; but it was combined into ah_bootstrap.py +try: + import pkg_resources + _setuptools_req = pkg_resources.Requirement.parse('setuptools>=0.7') + # This may raise a DistributionNotFound in which case no version of + # setuptools or distribute is properly installed + _setuptools = pkg_resources.get_distribution('setuptools') + if _setuptools not in _setuptools_req: + # Older version of setuptools; check if we have distribute; again if + # this results in DistributionNotFound we want to give up + _distribute = pkg_resources.get_distribution('distribute') + if _setuptools != _distribute: + # It's possible on some pathological systems to have an old version + # of setuptools and distribute on sys.path simultaneously; make + # sure distribute is the one that's used + sys.path.insert(1, _distribute.location) + _distribute.activate() + imp.reload(pkg_resources) +except: + # There are several types of exceptions that can occur here; if all else + # fails bootstrap and use the bootstrapped version + from ez_setup import use_setuptools + use_setuptools() + +from distutils import log +from distutils.debug import DEBUG + +# In case it didn't successfully import before the ez_setup checks +import pkg_resources + +from setuptools import Distribution +from setuptools.package_index import PackageIndex +from setuptools.sandbox import run_setup + +# TODO: Maybe enable checking for a specific version of astropy_helpers? +DIST_NAME = 'astropy-helpers' +PACKAGE_NAME = 'astropy_helpers' + +# Defaults for other options +DOWNLOAD_IF_NEEDED = True +INDEX_URL = 'https://pypi.python.org/simple' +USE_GIT = True +AUTO_UPGRADE = True + + +def use_astropy_helpers(path=None, download_if_needed=None, index_url=None, + use_git=None, auto_upgrade=None): + """ + Ensure that the `astropy_helpers` module is available and is importable. + This supports automatic submodule initialization if astropy_helpers is + included in a project as a git submodule, or will download it from PyPI if + necessary. + + Parameters + ---------- + + path : str or None, optional + A filesystem path relative to the root of the project's source code + that should be added to `sys.path` so that `astropy_helpers` can be + imported from that path. + + If the path is a git submodule it will automatically be initialzed + and/or updated. + + The path may also be to a ``.tar.gz`` archive of the astropy_helpers + source distribution. In this case the archive is automatically + unpacked and made temporarily available on `sys.path` as a ``.egg`` + archive. + + If `None` skip straight to downloading. + + download_if_needed : bool, optional + If the provided filesystem path is not found an attempt will be made to + download astropy_helpers from PyPI. It will then be made temporarily + available on `sys.path` as a ``.egg`` archive (using the + ``setup_requires`` feature of setuptools. If the ``--offline`` option + is given at the command line the value of this argument is overridden + to `False`. + + index_url : str, optional + If provided, use a different URL for the Python package index than the + main PyPI server. + + use_git : bool, optional + If `False` no git commands will be used--this effectively disables + support for git submodules. If the ``--no-git`` option is given at the + command line the value of this argument is overridden to `False`. + + auto_upgrade : bool, optional + By default, when installing a package from a non-development source + distribution ah_boostrap will try to automatically check for patch + releases to astropy-helpers on PyPI and use the patched version over + any bundled versions. Setting this to `False` will disable that + functionality. If the ``--offline`` option is given at the command line + the value of this argument is overridden to `False`. + """ + + # True by default, unless the --offline option was provided on the command + # line + if '--offline' in sys.argv: + download_if_needed = False + auto_upgrade = False + offline = True + sys.argv.remove('--offline') + else: + offline = False + + if '--no-git' in sys.argv: + use_git = False + sys.argv.remove('--no-git') + + if path is None: + path = PACKAGE_NAME + + if download_if_needed is None: + download_if_needed = DOWNLOAD_IF_NEEDED + + if index_url is None: + index_url = INDEX_URL + + if use_git is None: + use_git = USE_GIT + + if auto_upgrade is None: + auto_upgrade = AUTO_UPGRADE + + # Declared as False by default--later we check if astropy-helpers can be + # upgraded from PyPI, but only if not using a source distribution (as in + # the case of import from a git submodule) + is_submodule = False + + if not isinstance(path, _str_types): + if path is not None: + raise TypeError('path must be a string or None') + + if not download_if_needed: + log.debug('a path was not given and download from PyPI was not ' + 'allowed so this is effectively a no-op') + return + elif not os.path.exists(path) or os.path.isdir(path): + # Even if the given path does not exist on the filesystem, if it *is* a + # submodule, `git submodule init` will create it + is_submodule = _check_submodule(path, use_git=use_git, + offline=offline) + + if is_submodule or os.path.isdir(path): + log.info( + 'Attempting to import astropy_helpers from {0} {1!r}'.format( + 'submodule' if is_submodule else 'directory', path)) + dist = _directory_import(path) + else: + dist = None + + if dist is None: + msg = ( + 'The requested path {0!r} for importing {1} does not ' + 'exist, or does not contain a copy of the {1} package. ' + 'Attempting download instead.'.format(path, PACKAGE_NAME)) + if download_if_needed: + log.warn(msg) + else: + raise _AHBootstrapSystemExit(msg) + elif os.path.isfile(path): + # Handle importing from a source archive; this also uses setup_requires + # but points easy_install directly to the source archive + try: + dist = _do_download(find_links=[path]) + except Exception as e: + if download_if_needed: + log.warn('{0}\nWill attempt to download astropy_helpers from ' + 'PyPI instead.'.format(str(e))) + dist = None + else: + raise _AHBootstrapSystemExit(e.args[0]) + else: + msg = ('{0!r} is not a valid file or directory (it could be a ' + 'symlink?)'.format(path)) + if download_if_needed: + log.warn(msg) + dist = None + else: + raise _AHBootstrapSystemExit(msg) + + if dist is not None and auto_upgrade and not is_submodule: + # A version of astropy-helpers was found on the available path, but + # check to see if a bugfix release is available on PyPI + upgrade = _do_upgrade(dist, index_url) + if upgrade is not None: + dist = upgrade + elif dist is None: + # Last resort--go ahead and try to download the latest version from + # PyPI + try: + if download_if_needed: + log.warn( + "Downloading astropy_helpers; run setup.py with the " + "--offline option to force offline installation.") + dist = _do_download(index_url=index_url) + else: + raise _AHBootstrapSystemExit( + "No source for the astropy_helpers package; " + "astropy_helpers must be available as a prerequisite to " + "installing this package.") + except Exception as e: + if DEBUG: + raise + else: + raise _AHBootstrapSystemExit(e.args[0]) + + if dist is not None: + # Otherwise we found a version of astropy-helpers so we're done + # Just activate the found distribibution on sys.path--if we did a + # download this usually happens automatically but do it again just to + # be sure + # Note: Adding the dist to the global working set also activates it by + # default + pkg_resources.working_set.add(dist) + + +def _do_download(version='', find_links=None, index_url=None): + try: + if find_links: + allow_hosts = '' + index_url = None + else: + allow_hosts = None + # Annoyingly, setuptools will not handle other arguments to + # Distribution (such as options) before handling setup_requires, so it + # is not straightfoward to programmatically augment the arguments which + # are passed to easy_install + class _Distribution(Distribution): + def get_option_dict(self, command_name): + opts = Distribution.get_option_dict(self, command_name) + if command_name == 'easy_install': + if find_links is not None: + opts['find_links'] = ('setup script', find_links) + if index_url is not None: + opts['index_url'] = ('setup script', index_url) + if allow_hosts is not None: + opts['allow_hosts'] = ('setup script', allow_hosts) + return opts + + if version: + req = '{0}=={1}'.format(DIST_NAME, version) + else: + req = DIST_NAME + + attrs = {'setup_requires': [req]} + if DEBUG: + dist = _Distribution(attrs=attrs) + else: + with _silence(): + dist = _Distribution(attrs=attrs) + + # If the setup_requires succeeded it will have added the new dist to + # the main working_set + return pkg_resources.working_set.by_key.get(DIST_NAME) + except Exception as e: + if DEBUG: + raise + + msg = 'Error retrieving astropy helpers from {0}:\n{1}' + if find_links: + source = find_links[0] + elif index_url: + source = index_url + else: + source = 'PyPI' + + raise Exception(msg.format(source, repr(e))) + + +def _do_upgrade(dist, index_url): + # Build up a requirement for a higher bugfix release but a lower minor + # release (so API compatibility is guaranteed) + # sketchy version parsing--maybe come up with something a bit more + # robust for this + major, minor = (int(part) for part in dist.parsed_version[:2]) + next_minor = '.'.join([str(major), str(minor + 1), '0']) + req = pkg_resources.Requirement.parse( + '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_minor)) + + package_index = PackageIndex(index_url=index_url) + + upgrade = package_index.obtain(req) + + if upgrade is not None: + return _do_download(version=upgrade.version, index_url=index_url) + + +def _directory_import(path): + """ + Import astropy_helpers from the given path, which will be added to + sys.path. + + Must return True if the import succeeded, and False otherwise. + """ + + # Return True on success, False on failure but download is allowed, and + # otherwise raise SystemExit + path = os.path.abspath(path) + + # Use an empty WorkingSet rather than the man pkg_resources.working_set, + # since on older versions of setuptools this will invoke a VersionConflict + # when trying to install an upgrade + ws = pkg_resources.WorkingSet([]) + ws.add_entry(path) + dist = ws.by_key.get(DIST_NAME) + + if dist is None: + # We didn't find an egg-info/dist-info in the given path, but if a + # setup.py exists we can generate it + setup_py = os.path.join(path, 'setup.py') + if os.path.isfile(setup_py): + with _silence(): + run_setup(os.path.join(path, 'setup.py'), ['egg_info']) + + for dist in pkg_resources.find_distributions(path, True): + # There should be only one... + return dist + + return dist + + +def _check_submodule(path, use_git=True, offline=False): + """ + Check if the given path is a git submodule. + + See the docstrings for ``_check_submodule_using_git`` and + ``_check_submodule_no_git`` for futher details. + """ + + if use_git: + return _check_submodule_using_git(path, offline) + else: + return _check_submodule_no_git(path) + + +def _check_submodule_using_git(path, offline): + """ + Check if the given path is a git submodule. If so, attempt to initialize + and/or update the submodule if needed. + + This function makes calls to the ``git`` command in subprocesses. The + ``_check_submodule_no_git`` option uses pure Python to check if the given + path looks like a git submodule, but it cannot perform updates. + """ + + if PY3 and not isinstance(path, _text_type): + fs_encoding = sys.getfilesystemencoding() + path = path.decode(fs_encoding) + + try: + p = sp.Popen(['git', 'submodule', 'status', '--', path], + stdout=sp.PIPE, stderr=sp.PIPE) + stdout, stderr = p.communicate() + except OSError as e: + if DEBUG: + raise + + if e.errno == errno.ENOENT: + # The git command simply wasn't found; this is most likely the + # case on user systems that don't have git and are simply + # trying to install the package from PyPI or a source + # distribution. Silently ignore this case and simply don't try + # to use submodules + return False + else: + raise _AHBoostrapSystemExit( + 'An unexpected error occurred when running the ' + '`git submodule status` command:\n{0}'.format(str(e))) + + + # Can fail of the default locale is not configured properly. See + # https://github.com/astropy/astropy/issues/2749. For the purposes under + # consideration 'latin1' is an acceptable fallback. + try: + stdio_encoding = locale.getdefaultlocale()[1] or 'latin1' + except ValueError: + # Due to an OSX oddity locale.getdefaultlocale() can also crash + # depending on the user's locale/language settings. See: + # http://bugs.python.org/issue18378 + stdio_encoding = 'latin1' + + if p.returncode != 0 or stderr: + # Unfortunately the return code alone cannot be relied on, as + # earlier versions of git returned 0 even if the requested submodule + # does not exist + stderr = stderr.decode(stdio_encoding) + + # This is a warning that occurs in perl (from running git submodule) + # which only occurs with a malformatted locale setting which can + # happen sometimes on OSX. See again + # https://github.com/astropy/astropy/issues/2749 + perl_warning = ('perl: warning: Falling back to the standard locale ' + '("C").') + if not stderr.strip().endswith(perl_warning): + # Some other uknown error condition occurred + log.warn('git submodule command failed ' + 'unexpectedly:\n{0}'.format(stderr)) + return False + + stdout = stdout.decode(stdio_encoding) + # The stdout should only contain one line--the status of the + # requested submodule + m = _git_submodule_status_re.match(stdout) + if m: + # Yes, the path *is* a git submodule + _update_submodule(m.group('submodule'), m.group('status'), offline) + return True + else: + log.warn( + 'Unexpected output from `git submodule status`:\n{0}\n' + 'Will attempt import from {1!r} regardless.'.format( + stdout, path)) + return False + + +def _check_submodule_no_git(path): + """ + Like ``_check_submodule_using_git``, but simply parses the .gitmodules file + to determine if the supplied path is a git submodule, and does not exec any + subprocesses. + + This can only determine if a path is a submodule--it does not perform + updates, etc. This function may need to be updated if the format of the + .gitmodules file is changed between git versions. + """ + + gitmodules_path = os.path.abspath('.gitmodules') + + if not os.path.isfile(gitmodules_path): + return False + + # This is a minimal reader for gitconfig-style files. It handles a few of + # the quirks that make gitconfig files incompatible with ConfigParser-style + # files, but does not support the full gitconfig syntaix (just enough + # needed to read a .gitmodules file). + gitmodules_fileobj = io.StringIO() + + # Must use io.open for cross-Python-compatible behavior wrt unicode + with io.open(gitmodules_path) as f: + for line in f: + # gitconfig files are more flexible with leading whitespace; just + # go ahead and remove it + line = line.lstrip() + + # comments can start with either # or ; + if line and line[0] in (':', ';'): + continue + + gitmodules_fileobj.write(line) + + gitmodules_fileobj.seek(0) + + cfg = RawConfigParser() + + try: + cfg.readfp(gitmodules_fileobj) + except Exception as exc: + log.warn('Malformatted .gitmodules file: {0}\n' + '{1} cannot be assumed to be a git submodule.'.format( + exc, path)) + return False + + for section in cfg.sections(): + if not cfg.has_option(section, 'path'): + continue + + submodule_path = cfg.get(section, 'path').rstrip(os.sep) + + if submodule_path == path.rstrip(os.sep): + return True + + return False + + +def _update_submodule(submodule, status, offline): + if status == ' ': + # The submodule is up to date; no action necessary + return + elif status == '-': + if offline: + raise _AHBootstrapSystemExit( + "Cannot initialize the {0} submodule in --offline mode; this " + "requires being able to clone the submodule from an online " + "repository.".format(submodule)) + cmd = ['update', '--init'] + action = 'Initializing' + elif status == '+': + cmd = ['update'] + action = 'Updating' + if offline: + cmd.append('--no-fetch') + elif status == 'U': + raise _AHBoostrapSystemExit( + 'Error: Submodule {0} contains unresolved merge conflicts. ' + 'Please complete or abandon any changes in the submodule so that ' + 'it is in a usable state, then try again.'.format(submodule)) + else: + log.warn('Unknown status {0!r} for git submodule {1!r}. Will ' + 'attempt to use the submodule as-is, but try to ensure ' + 'that the submodule is in a clean state and contains no ' + 'conflicts or errors.\n{2}'.format(status, submodule, + _err_help_msg)) + return + + err_msg = None + + cmd = ['git', 'submodule'] + cmd + ['--', submodule] + log.warn('{0} {1} submodule with: `{2}`'.format( + action, submodule, ' '.join(cmd))) + + try: + p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) + stdout, stderr = p.communicate() + except OSError as e: + err_msg = str(e) + else: + if p.returncode != 0: + stderr_encoding = locale.getdefaultlocale()[1] + err_msg = stderr.decode(stderr_encoding) + + if err_msg: + log.warn('An unexpected error occurred updating the git submodule ' + '{0!r}:\n{1}\n{2}'.format(submodule, err_msg, _err_help_msg)) + + +class _DummyFile(object): + """A noop writeable object.""" + + errors = '' # Required for Python 3.x + encoding = 'utf-8' + + def write(self, s): + pass + + def flush(self): + pass + + +@contextlib.contextmanager +def _silence(): + """A context manager that silences sys.stdout and sys.stderr.""" + + old_stdout = sys.stdout + old_stderr = sys.stderr + sys.stdout = _DummyFile() + sys.stderr = _DummyFile() + exception_occurred = False + try: + yield + except: + exception_occurred = True + # Go ahead and clean up so that exception handling can work normally + sys.stdout = old_stdout + sys.stderr = old_stderr + raise + + if not exception_occurred: + sys.stdout = old_stdout + sys.stderr = old_stderr + + +_err_help_msg = """ +If the problem persists consider installing astropy_helpers manually using pip +(`pip install astropy_helpers`) or by manually downloading the source archive, +extracting it, and installing by running `python setup.py install` from the +root of the extracted source code. +""" + + +class _AHBootstrapSystemExit(SystemExit): + def __init__(self, *args): + if not args: + msg = 'An unknown problem occurred bootstrapping astropy_helpers.' + else: + msg = args[0] + + msg += '\n' + _err_help_msg + + super(_AHBootstrapSystemExit, self).__init__(msg, *args[1:]) + + +if sys.version_info[:2] < (2, 7): + # In Python 2.6 the distutils log does not log warnings, errors, etc. to + # stderr so we have to wrap it to ensure consistency at least in this + # module + import distutils + + class log(object): + def __getattr__(self, attr): + return getattr(distutils.log, attr) + + def warn(self, msg, *args): + self._log_to_stderr(distutils.log.WARN, msg, *args) + + def error(self, msg): + self._log_to_stderr(distutils.log.ERROR, msg, *args) + + def fatal(self, msg): + self._log_to_stderr(distutils.log.FATAL, msg, *args) + + def log(self, level, msg, *args): + if level in (distutils.log.WARN, distutils.log.ERROR, + distutils.log.FATAL): + self._log_to_stderr(level, msg, *args) + else: + distutils.log.log(level, msg, *args) + + def _log_to_stderr(self, level, msg, *args): + # This is the only truly 'public' way to get the current threshold + # of the log + current_threshold = distutils.log.set_threshold(distutils.log.WARN) + distutils.log.set_threshold(current_threshold) + if level >= current_threshold: + if args: + msg = msg % args + sys.stderr.write('%s\n' % msg) + sys.stderr.flush() + + log = log() + +# Output of `git submodule status` is as follows: +# +# 1: Status indicator: '-' for submodule is uninitialized, '+' if submodule is +# initialized but is not at the commit currently indicated in .gitmodules (and +# thus needs to be updated), or 'U' if the submodule is in an unstable state +# (i.e. has merge conflicts) +# +# 2. SHA-1 hash of the current commit of the submodule (we don't really need +# this information but it's useful for checking that the output is correct) +# +# 3. The output of `git describe` for the submodule's current commit hash (this +# includes for example what branches the commit is on) but only if the +# submodule is initialized. We ignore this information for now +_git_submodule_status_re = re.compile( + '^(?P[+-U ])(?P[0-9a-f]{40}) (?P\S+)( .*)?$') + + +# Implement the auto-use feature; this allows use_astropy_helpers() to be used +# at import-time automatically so long as the correct options are specified in +# setup.cfg +_CFG_OPTIONS = [('auto_use', bool), ('path', str), + ('download_if_needed', bool), ('index_url', str), + ('use_git', bool), ('auto_upgrade', bool)] + +def _main(): + if not os.path.exists('setup.cfg'): + return + + cfg = ConfigParser() + + try: + cfg.read('setup.cfg') + except Exception as e: + if DEBUG: + raise + + log.error( + "Error reading setup.cfg: {0!r}\nastropy_helpers will not be " + "automatically bootstrapped and package installation may fail." + "\n{1}".format(e, _err_help_msg)) + return + + if not cfg.has_section('ah_bootstrap'): + return + + kwargs = {} + + for option, type_ in _CFG_OPTIONS: + if not cfg.has_option('ah_bootstrap', option): + continue + + if type_ is bool: + value = cfg.getboolean('ah_bootstrap', option) + else: + value = cfg.get('ah_bootstrap', option) + + kwargs[option] = value + + if kwargs.pop('auto_use', False): + use_astropy_helpers(**kwargs) + + +_main() diff --git a/astropy/__init__.py b/astropy/__init__.py new file mode 100644 index 0000000..3cac206 --- /dev/null +++ b/astropy/__init__.py @@ -0,0 +1,270 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" +Astropy is a package intended to contain core functionality and some +common tools needed for performing astronomy and astrophysics research with +Python. It also provides an index for other astronomy packages and tools for +managing them. +""" + +# this indicates whether or not we are in astropy's setup.py +try: + _ASTROPY_SETUP_ +except NameError: + from sys import version_info + if version_info[0] >= 3: + import builtins + else: + import __builtin__ as builtins + builtins._ASTROPY_SETUP_ = False + del version_info + del builtins + +try: + from .version import version as __version__ +except ImportError: + # TODO: Issue a warning using the logging framework + __version__ = '' +try: + from .version import githash as __githash__ +except ImportError: + # TODO: Issue a warning using the logging framework + __githash__ = '' + + +__minimum_numpy_version__ = '1.5.1' + + +# The location of the online documentation for astropy +# This location will normally point to the current released version of astropy +if 'dev' in __version__: + online_docs_root = 'http://docs.astropy.org/en/latest/' +else: + online_docs_root = 'http://docs.astropy.org/en/{0}/'.format(__version__) + + +def _check_numpy(): + """ + Check that Numpy is installed and it is of the minimum version we + require. + """ + # Note: We could have used distutils.version for this comparison, + # but it seems like overkill to import distutils at runtime. + requirement_met = False + + try: + import numpy + except ImportError: + pass + else: + major, minor, rest = numpy.__version__.split(".", 2) + rmajor, rminor, rest = __minimum_numpy_version__.split(".", 2) + requirement_met = ((int(major), int(minor)) >= + (int(rmajor), int(rminor))) + + if not requirement_met: + msg = ("numpy version {0} or later must be installed to use " + "astropy".format( + __minimum_numpy_version__)) + raise ImportError(msg) + + return numpy + + +if not _ASTROPY_SETUP_: + _check_numpy() + + +from . import config as _config +import sys + + +class Conf(_config.ConfigNamespace): + """ + Configuration parameters for `astropy`. + """ + + unicode_output = _config.ConfigItem( + False, + 'When True, use Unicode characters when outputting values, and ' + 'displaying widgets at the console.') + use_color = _config.ConfigItem( + sys.platform != 'win32', + 'When True, use ANSI color escape sequences when writing to the console.', + aliases=['astropy.utils.console.USE_COLOR', 'astropy.logger.USE_COLOR']) + max_lines = _config.ConfigItem( + None, + description='Maximum number of lines in the display of pretty-printed ' + 'objects. If not provided, try to determine automatically from the ' + 'terminal size. Negative numbers mean no limit.', + cfgtype='integer(default=None)', + aliases=['astropy.table.pprint.max_lines']) + max_width = _config.ConfigItem( + None, + description='Maximum number of characters per line in the display of ' + 'pretty-printed objects. If not provided, try to determine ' + 'automatically from the terminal size. Negative numbers mean no ' + 'limit.', + cfgtype='integer(default=None)', + aliases=['astropy.table.pprint.max_width']) + +conf = Conf() + + +UNICODE_OUTPUT = _config.ConfigAlias( + '0.4', 'UNICODE_OUTPUT', 'unicode_output') + + +del sys + + +# set up the test command +def _get_test_runner(): + from .tests.helper import TestRunner + return TestRunner(__path__[0]) + + +def test(package=None, test_path=None, args=None, plugins=None, + verbose=False, pastebin=None, remote_data=False, pep8=False, + pdb=False, open_files=False, parallel=0, docs_path=None, + skip_docs=False): + """ + Run Astropy tests using py.test. A proper set of arguments is + constructed and passed to `pytest.main`. + + Parameters + ---------- + package : str, optional + The name of a specific package to test, e.g. 'io.fits' or 'utils'. + If nothing is specified all default Astropy tests are run. + + test_path : str, optional + Specify location to test by path. May be a single file or + directory. Must be specified absolutely or relative to the + calling directory. + + args : str, optional + Additional arguments to be passed to `pytest.main` in the `args` + keyword argument. + + plugins : list, optional + Plugins to be passed to `pytest.main` in the `plugins` keyword + argument. + + verbose : bool, optional + Convenience option to turn on verbose output from py.test. Passing + True is the same as specifying `-v` in `args`. + + pastebin : {'failed','all',None}, optional + Convenience option for turning on py.test pastebin output. Set to + 'failed' to upload info for failed tests, or 'all' to upload info + for all tests. + + remote_data : bool, optional + Controls whether to run tests marked with @remote_data. These + tests use online data and are not run by default. Set to True to + run these tests. + + pep8 : bool, optional + Turn on PEP8 checking via the pytest-pep8 plugin and disable normal + tests. Same as specifying `--pep8 -k pep8` in `args`. + + pdb : bool, optional + Turn on PDB post-mortem analysis for failing tests. Same as + specifying `--pdb` in `args`. + + open_files : bool, optional + Fail when any tests leave files open. Off by default, because + this adds extra run time to the test suite. Works only on + platforms with a working `lsof` command. + + parallel : int, optional + When provided, run the tests in parallel on the specified + number of CPUs. If parallel is negative, it will use the all + the cores on the machine. Requires the `pytest-xdist` plugin + is installed. + + docs_path : str, optional + The path to the documentation .rst files. + + skip_docs : bool, optional + When `True`, skips running the doctests in the .rst files. + + See Also + -------- + pytest.main : py.test function wrapped by `run_tests`. + + """ + test_runner = _get_test_runner() + return test_runner.run_tests( + package=package, test_path=test_path, args=args, + plugins=plugins, verbose=verbose, pastebin=pastebin, + remote_data=remote_data, pep8=pep8, pdb=pdb, + open_files=open_files, parallel=parallel, docs_path=docs_path, + skip_docs=skip_docs) + + +# if we are *not* in setup mode, import the logger and possibly populate the +# configuration file with the defaults +def _initialize_astropy(): + from . import config + + import os + import sys + from warnings import warn + + # If this __init__.py file is in ./astropy/ then import is within a source dir + is_astropy_source_dir = (os.path.abspath(os.path.dirname(__file__)) == + os.path.abspath('astropy') and os.path.exists('setup.py')) + + def _rollback_import(message): + log.error(message) + # Now disable exception logging to avoid an annoying error in the + # exception logger before we raise the import error: + _teardown_log() + + # Roll back any astropy sub-modules that have been imported thus + # far + + for key in list(sys.modules): + if key.startswith('astropy.'): + del sys.modules[key] + raise ImportError('astropy') + + try: + from .utils import _compiler + except ImportError: + if is_astropy_source_dir: + _rollback_import( + 'You appear to be trying to import astropy from within a ' + 'source checkout; please run `./setup.py develop` or ' + '`./setup.py build_ext --inplace` first so that extension ' + 'modules can be compiled and made importable.') + else: + # Outright broken installation; don't be nice. + raise + + # add these here so we only need to cleanup the namespace at the end + config_dir = os.path.dirname(__file__) + + try: + config.configuration.update_default_config(__package__, config_dir) + except config.configuration.ConfigurationDefaultMissingError as e: + wmsg = (e.args[0] + " Cannot install default profile. If you are " + "importing from source, this is expected.") + warn(config.configuration.ConfigurationDefaultMissingWarning(wmsg)) + + +import logging + +# Use the root logger as a dummy log before initilizing Astropy's logger +log = logging.getLogger() + + +if not _ASTROPY_SETUP_: + from .logger import _init_log, _teardown_log + + log = _init_log() + + _initialize_astropy() + + from .utils.misc import find_api_page diff --git a/astropy/astropy.cfg b/astropy/astropy.cfg new file mode 100644 index 0000000..e067635 --- /dev/null +++ b/astropy/astropy.cfg @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- + +### CONSOLE SETTINGS + +## Use Unicode characters when outputting values, and writing widgets to the +## console. +# unicode_output = False + +## When True, use ANSI color escape sequences when writing to the console. +# use_color = True + +## Maximum number of lines for the pretty-printer. If not provided, +## determine automatically from the size of the terminal. -1 means no +## limit. +# max_lines = + +## Maximum number of characters-per-line for the pretty-printer. If +## not provided, determine automatically from the size of the +## terminal, if possible. -1 means no limit. +# max_width = + + +### CORE DATA STRUCTURES AND TRANSFORMATIONS + +[nddata] + +## Whether to issue a warning if NDData arithmetic is performed with +## uncertainties and the uncertainties do not support the propagation of +## correlated uncertainties. +# warn_unsupported_correlated = True + +## Whether to issue a warning when the `~astropy.nddata.NDData` unit +## attribute is changed from a non-``None`` value to another value +## that data values/uncertainties are not scaled with the unit change. +# warn_setting_unit_directly = True + +[table] + +## The template that determines the name of a column if it cannot be +## determined. Uses new-style (format method) string formatting +# auto_colname = col{0} + +[table.jsviewer] + +## The URL to the jQuery library to use. If not provided, uses the +## internal copy installed with astropy. +# jquery_url = + +## The URL to the jQuery datatables library to use. If not provided, +## uses the internal copy installed with astropy. +# datatables_url = + +### ASTRONOMY COMPUTATIONS AND UTILITIES + +[vo] + +## URL where VO Service database file is stored. +# vos_baseurl = http://stsdas.stsci.edu/astrolib/vo_databases/ + +## Conesearch database name. +# conesearch_dbname = conesearch_good + +[vo.samp] + +## Whether to allow astropy.vo.samp to use the internet, if available +# use_internet = True + +[vo.validator] + +## Cone Search services master list for validation. +# conesearch_master_list = http://vao.stsci.edu/directory/NVORegInt.asmx/VOTCapabilityPredOpt?predicate=1%3D1&capability=conesearch&VOTStyleOption=2 + +## Only check these Cone Search URLs. +# conesearch_urls = + +## VO Table warning codes that are considered non-critical +# noncritical_warnings = W03, W06, W07, W09, W10, W15, W17, W20, W21, W22, W27, W28, W29, W41, W42, W48, W50 + + +### INPUT/OUTPUT + +[io.fits] + +## If True, enable support for record-valued keywords as described by FITS WCS +## Paper IV. Otherwise they are treated as normal keywords. +# enable_record_valued_keyword_cards = True + +## If True, extension names (i.e. the EXTNAME keyword) should be treated as +## case-sensitive. +# extension_name_case_sensitive = False + +## If True, automatically remove trailing whitespace for string values in +## headers. Otherwise the values are returned verbatim, with all whitespace +## intact. +# strip_header_whitespace = True + +## If True, use memory-mapped file access to read/write the data in FITS files. +## This generally provides better performance, especially for large files, but +## may affect performance in I/O-heavy applications. +# use_memmap = True + +[io.votable] + +## When True, treat fixable violations of the VOTable spec as exceptions. +# pedantic = True + + +### NUTS AND BOLTS OF ASTROPY + + +[logger] + +## Threshold for the logging messages. Logging messages that are less severe +## than this level will be ignored. The levels are 'DEBUG', 'INFO', 'WARNING', +## 'ERROR' +# log_level = INFO + +## Whether to log warnings.warn calls +# log_warnings = True + +## Whether to log exceptions before raising them +# log_exceptions = False + +## Whether to always log messages to a log file +# log_to_file = False + +## The file to log messages to. When '', it defaults to a file 'astropy.log' in +## the astropy config directory. +# log_file_path = "" + +## Threshold for logging messages to log_file_path +# log_file_level = INFO + +## Format for log file entries +# log_file_format = "%(asctime)r, %(origin)r, %(levelname)r, %(message)r" + +[utils.data] + +## URL for astropy remote data site. +# dataurl = http://data.astropy.org/ + +## Time to wait for remote data query (in seconds). +# remote_timeout = 3.0 + +## Block size for computing MD5 file hashes. +# hash_block_size = 65536 + +## Number of bytes of remote data to download per step. +# download_block_size = 65536 + +## Number of times to try to get the lock while accessing the data cache before +## giving up. +# download_cache_lock_attempts = 5 + +## If True, temporary download files created when the cache is inacessible will +## be deleted at the end of the python session. +# delete_temporary_downloads_at_exit = True diff --git a/astropy/config/__init__.py b/astropy/config/__init__.py new file mode 100644 index 0000000..0643f7c --- /dev/null +++ b/astropy/config/__init__.py @@ -0,0 +1,13 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This module contains configuration and setup utilities for the +Astropy project. This includes all functionality related to the +affiliated package index. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from .paths import * +from .configuration import * +from .affiliated import * diff --git a/astropy/config/affiliated.py b/astropy/config/affiliated.py new file mode 100644 index 0000000..305dfed --- /dev/null +++ b/astropy/config/affiliated.py @@ -0,0 +1,9 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +"""This module contains functions and classes for finding information about +affiliated packages and installing them. +""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +__all__ = [] diff --git a/astropy/config/configuration.py b/astropy/config/configuration.py new file mode 100644 index 0000000..1c8cc6c --- /dev/null +++ b/astropy/config/configuration.py @@ -0,0 +1,1067 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +"""This module contains classes and functions to standardize access to +configuration files for Astropy and affiliated packages. + +.. note:: + The configuration system makes use of the 'configobj' package, which stores + configuration in a text format like that used in the standard library + `ConfigParser`. More information and documentation for configobj can be + found at http://www.voidspace.org.uk/python/configobj.html. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) +from ..extern import six + +from contextlib import contextmanager +import hashlib +import inspect +import io +from os import path +import pkgutil +import re +import sys +import types +from warnings import warn + +from ..extern.configobj import configobj, validate +from ..utils.exceptions import AstropyWarning, AstropyDeprecationWarning +from ..utils import find_current_module +from ..utils.misc import InheritDocstrings +from .paths import get_config_dir + + +__all__ = ['ConfigurationItem', 'InvalidConfigurationItemWarning', + 'ConfigurationMissingWarning', 'get_config', 'save_config', + 'reload_config', 'ConfigNamespace', 'ConfigItem', 'ConfigAlias'] + + +class InvalidConfigurationItemWarning(AstropyWarning): + """ A Warning that is issued when the configuration value specified in the + astropy configuration file does not match the type expected for that + configuration value. + """ + + +class ConfigurationMissingWarning(AstropyWarning): + """ A Warning that is issued when the configuration directory cannot be + accessed (usually due to a permissions problem). If this warning appears, + configuration items will be set to their defaults rather than read from the + configuration file, and no configuration will persist across sessions. + """ + + +# these are not in __all__ because it's not intended that a user ever see them +class ConfigurationDefaultMissingError(ValueError): + """ An exception that is raised when the configuration defaults (which + should be generated at build-time) are missing. + """ + + +# this is used in astropy/__init__.py +class ConfigurationDefaultMissingWarning(AstropyWarning): + """ A warning that is issued when the configuration defaults (which + should be generated at build-time) are missing. + """ + + +class ConfigurationChangedWarning(AstropyWarning): + """ + A warning that the configuration options have changed. + """ + + +class _ConfigNamespaceMeta(type): + def __init__(cls, name, bases, dict): + if cls.__bases__[0] is object: + return + + for key, val in six.iteritems(dict): + if isinstance(val, ConfigItem): + val.name = key + + +@six.add_metaclass(_ConfigNamespaceMeta) +class ConfigNamespace(object): + """ + A namespace of configuration items. Each subpackage with + configuration items should define a subclass of this class, + containing `ConfigItem` instances as members. + + For example:: + + class Conf(_config.ConfigNamespace): + unicode_output = _config.ConfigItem( + False, + 'Use Unicode characters when outputting values, ...') + use_color = _config.ConfigItem( + sys.platform != 'win32', + 'When True, use ANSI color escape sequences when ...', + aliases=['astropy.utils.console.USE_COLOR']) + conf = Conf() + """ + def set_temp(self, attr, value): + """ + Temporarily set a configuration value. + + Parameters + ---------- + attr : str + Configuration item name + + value : object + The value to set temporarily. + + Examples + -------- + >>> import astropy + >>> with astropy.conf.set_temp('use_color', False): + ... pass + ... # console output will not contain color + >>> # console output contains color again... + """ + if hasattr(self, attr): + return self.__class__.__dict__[attr].set_temp(value) + raise AttributeError("No configuration parameter '{0}'".format(attr)) + + def reload(self, attr=None): + """ + Reload a configuration item from the configuration file. + + Parameters + ---------- + attr : str, optional + The name of the configuration parameter to reload. If not + provided, reload all configuration parameters. + """ + if attr is not None: + if hasattr(self, attr): + return self.__class__.__dict__[attr].reload() + raise AttributeError("No configuration parameter '{0}'".format(attr)) + + for item in six.itervalues(self.__class__.__dict__): + if isinstance(item, ConfigItem): + item.reload() + + def reset(self, attr=None): + """ + Reset a configuration item to its default. + + Parameters + ---------- + attr : str, optional + The name of the configuration parameter to reload. If not + provided, reset all configuration parameters. + """ + if attr is not None: + if hasattr(self, attr): + prop = self.__class__.__dict__[attr] + prop.set(prop.defaultvalue) + return + raise AttributeError("No configuration parameter '{0}'".format(attr)) + + for item in six.itervalues(self.__class__.__dict__): + if isinstance(item, ConfigItem): + item.set(item.defaultvalue) + + +@six.add_metaclass(InheritDocstrings) +class ConfigItem(object): + """ + A setting and associated value stored in a configuration file. + + These objects should be created as members of + `ConfigNamespace` subclasses, for example:: + + class _Conf(config.ConfigNamespace): + unicode_output = config.ConfigItem( + False, + 'Use Unicode characters when outputting values, and writing widgets ' + 'to the console.') + conf = _Conf() + + Parameters + ---------- + defaultvalue : object, optional + The default value for this item. If this is a list of strings, this + item will be interpreted as an 'options' value - this item must be one + of those values, and the first in the list will be taken as the default + value. + + description : str or None, optional + A description of this item (will be shown as a comment in the + configuration file) + + cfgtype : str or None, optional + A type specifier like those used as the *values* of a particular key + in a ``configspec`` file of ``configobj``. If None, the type will be + inferred from the default value. + + module : str or None, optional + The full module name that this item is associated with. The first + element (e.g. 'astropy' if this is 'astropy.config.configuration') + will be used to determine the name of the configuration file, while + the remaining items determine the section. If None, the package will be + inferred from the package within whiich this object's initializer is + called. + + aliases : str, or list of str, optional + The deprecated location(s) of this configuration item. If the + config item is not found at the new location, it will be + searched for at all of the old locations. + + Raises + ------ + RuntimeError + If ``module`` is `None`, but the module this item is created from + cannot be determined. + """ + + # this is used to make validation faster so a Validator object doesn't + # have to be created every time + _validator = validate.Validator() + + def __init__(self, defaultvalue='', description=None, cfgtype=None, + module=None, aliases=None): + from ..utils import isiterable + + if module is None: + module = find_current_module(2) + if module is None: + msg1 = 'Cannot automatically determine get_config module, ' + msg2 = 'because it is not called from inside a valid module' + raise RuntimeError(msg1 + msg2) + else: + module = module.__name__ + + self.module = module + self.description = description + self.__doc__ = description + + # now determine cfgtype if it is not given + if cfgtype is None: + if (isiterable(defaultvalue) and not + isinstance(defaultvalue, six.string_types)): + # it is an options list + dvstr = [six.text_type(v) for v in defaultvalue] + cfgtype = 'option(' + ', '.join(dvstr) + ')' + defaultvalue = dvstr[0] + elif isinstance(defaultvalue, bool): + cfgtype = 'boolean' + elif isinstance(defaultvalue, int): + cfgtype = 'integer' + elif isinstance(defaultvalue, float): + cfgtype = 'float' + elif isinstance(defaultvalue, six.string_types): + cfgtype = 'string' + defaultvalue = six.text_type(defaultvalue) + + self.cfgtype = cfgtype + + self._validate_val(defaultvalue) + self.defaultvalue = defaultvalue + + if aliases is None: + self.aliases = [] + elif isinstance(aliases, six.string_types): + self.aliases = [aliases] + else: + self.aliases = aliases + + def __set__(self, obj, value): + return self.set(value) + + def __get__(self, obj, objtype=None): + if obj is None: + return self + return self() + + def set(self, value): + """ Sets the current value of this `ConfigItem`. + + This also updates the comments that give the description and type + information. + + Parameters + ---------- + value + The value this item should be set to. + + Raises + ------ + TypeError + If the provided ``value`` is not valid for this `ConfigItem`. + """ + try: + value = self._validate_val(value) + except validate.ValidateError as e: + msg = 'Provided value for configuration item {0} not valid: {1}' + raise TypeError(msg.format(self.name, e.args[0])) + + sec = get_config(self.module) + + sec[self.name] = value + + @contextmanager + def set_temp(self, value): + """ + Sets this item to a specified value only inside a with block. + + Use as:: + ITEM = ConfigItem('ITEM', 'default', 'description') + + with ITEM.set_temp('newval'): + ... do something that wants ITEM's value to be 'newval' ... + + # ITEM is now 'default' after the with block + + Parameters + ---------- + value + The value to set this item to inside the with block. + + """ + initval = self() + self.set(value) + try: + yield + finally: + self.set(initval) + + def reload(self): + """ Reloads the value of this `ConfigItem` from the relevant + configuration file. + + Returns + ------- + val + The new value loaded from the configuration file. + """ + self.set(self.defaultvalue) + baseobj = get_config(self.module, True) + secname = baseobj.name + + cobj = baseobj + # a ConfigObj's parent is itself, so we look for the parent with that + while cobj.parent is not cobj: + cobj = cobj.parent + + newobj = configobj.ConfigObj(cobj.filename, interpolation=False) + if secname is not None: + if secname not in newobj: + return baseobj.get(self.name) + newobj = newobj[secname] + + if self.name in newobj: + baseobj[self.name] = newobj[self.name] + return baseobj.get(self.name) + + def __repr__(self): + out = '<{0}: name={1!r} value={2!r} at 0x{3:x}>'.format( + self.__class__.__name__, self.name, self(), id(self)) + return out + + def __str__(self): + out = '\n'.join(('{0}: {1}', + ' cfgtype={2!r}', + ' defaultvalue={3!r}', + ' description={4!r}', + ' module={5}', + ' value={6!r}')) + out = out.format(self.__class__.__name__, self.name, self.cfgtype, + self.defaultvalue, self.description, self.module, + self()) + return out + + def __call__(self): + """ Returns the value of this `ConfigItem` + + Returns + ------- + val + This item's value, with a type determined by the `cfgtype` + attribute. + + Raises + ------ + TypeError + If the configuration value as stored is not this item's type. + """ + def section_name(section): + if section == '': + return 'at the top-level' + else: + return 'in section [{0}]'.format(section) + + options = [] + sec = get_config(self.module) + if self.name in sec: + options.append((sec[self.name], self.module, self.name)) + + for alias in self.aliases: + module, name = alias.rsplit('.', 1) + sec = get_config(module) + if '.' in module: + filename, module = module.split('.', 1) + else: + filename = module + module = '' + if name in sec: + if '.' in self.module: + new_module = self.module.split('.', 1)[1] + else: + new_module = '' + warn( + "Config parameter '{0}' {1} of the file '{2}' " + "is deprecated. Use '{3}' {4} instead.".format( + name, section_name(module), get_config_filename(filename), + self.name, section_name(new_module)), + AstropyDeprecationWarning) + options.append((sec[name], module, name)) + + if len(options) == 0: + self.set(self.defaultvalue) + options.append((self.defaultvalue, None, None)) + + if len(options) > 1: + filename, sec = self.module.split('.', 1) + warn( + "Config parameter '{0}' {1} of the file '{2}' is " + "given by more than one alias ({3}). Using the first.".format( + self.name, section_name(sec), get_config_filename(filename), + ', '.join([ + '.'.join(x[1:3]) for x in options if x[1] is not None])), + AstropyDeprecationWarning) + + val = options[0][0] + + try: + return self._validate_val(val) + except validate.ValidateError as e: + raise TypeError('Configuration value not valid:' + e.args[0]) + + def _validate_val(self, val): + """ Validates the provided value based on cfgtype and returns the + type-cast value + + throws the underlying configobj exception if it fails + """ + # note that this will normally use the *class* attribute `_validator`, + # but if some arcane reason is needed for making a special one for an + # instance or sub-class, it will be used + return self._validator.check(self.cfgtype, val) + + +class ConfigurationItem(ConfigItem): + """ + A backward-compatibility layer to support the old + `ConfigurationItem` API. The only difference between this and + `ConfigItem` is that this requires an explicit name to be set as + the first argument. + """ + # REMOVE in astropy 0.5 + + def __init__(self, name, defaultvalue='', description=None, cfgtype=None, + module=None, aliases=None): + warn( + "ConfigurationItem has been deprecated in astropy 0.4. " + "Use ConfigItem objects as members of ConfigNamespace subclasses " + "instead. See ConfigNamespace for an example.", + AstropyDeprecationWarning) + + # We have to do the automatic module determination here, not + # just in ConfigItem, otherwise the extra stack frame will + # make it come up with the wrong answer. + if module is None: + module = find_current_module(2) + if module is None: + msg1 = 'Cannot automatically determine get_config module, ' + msg2 = 'because it is not called from inside a valid module' + raise RuntimeError(msg1 + msg2) + else: + module = module.__name__ + + super(ConfigurationItem, self).__init__( + defaultvalue=defaultvalue, + description=description, + cfgtype=cfgtype, + module=module, + aliases=aliases) + self.name = name + + def save(self, value=None): + """ + Removed in astropy 0.4. + """ + raise NotImplementedError( + "The ability to save config options was removed in astropy 0.4. " + "To change config settings, edit '{0}' directly.". + format(get_config_filename(self.module))) + + +class ConfigAlias(ConfigItem): + """ + A class that exists to support backward compatibility only. + + This is an alias for a `ConfigItem` that has been moved elsewhere. + It inherits from `ConfigItem` only because it implements the same + interface, not because any of the methods are reused. + + Parameters + ---------- + since : str + The version in which the configuration item was moved. + + old_name : str + The old name of the configuration item. This should be the + name of the variable in Python, not in the configuration file. + + new_name : str + The new name of the configuration item. This is both the name + of the item in Python and in the configuration file (since as of + astropy 0.4, those are always the same thing). + + old_module : str, optional + A fully-qualified, dot-separated path to the module in which + the configuration item used to be defined. If not provided, it + is the name of the module in which `ConfigAlias` is called. + + new_module : str, optional + A fully-qualified, dot-separated path to the module in which + the configuration item is now defined. If not provided, it is + the name of the module in which `ConfigAlias` is called. This + string should not contain the ``.conf`` object. For example, if + the new configuration item is in ``astropy.conf.use_unicode``, this + value only needs to be ``'astropy'``. + """ + # REMOVE in astropy 0.5 + + def __init__(self, since, old_name, new_name, old_module=None, new_module=None): + if old_module is None: + old_module = find_current_module(2) + if old_module is None: + msg1 = 'Cannot automatically determine get_config module, ' + msg2 = 'because it is not called from inside a valid module' + raise RuntimeError(msg1 + msg2) + else: + old_module = old_module.__name__ + + if new_module is None: + new_module = old_module + + self._since = since + self._old_name = old_name + self._new_name = new_name + self._old_module = old_module + self._new_module = new_module + + def _deprecation_warning(self): + warn( + "Since {0}, config parameter '{1}.{2}' is deprecated. " + "Use '{3}.conf.{4}' instead.".format( + self._since, + self._old_module, self._old_name, + self._new_module, self._new_name), + AstropyDeprecationWarning) + + def _get_target(self): + if self._new_module not in sys.modules: + __import__(self._new_module) + mod = sys.modules[self._new_module] + cfg = getattr(mod, 'conf') + return cfg + + def set(self, value): + self._deprecation_warning() + setattr(self._get_target(), self._new_name, value) + + def set_temp(self, value): + self._deprecation_warning() + return self._get_target().set_temp(self._new_name, value) + + def save(self, value=None): + self._deprecation_warning() + return self._get_target().save(value) + + def reload(self): + self._deprecation_warning() + return self._get_target().reload(self._new_name) + + def __repr__(self): + return repr(getattr(self._get_target().__class__, self._new_name)) + + def __str__(self): + return str(getattr(self._get_target().__class__, self._new_name)) + + def __call__(self): + self._deprecation_warning() + return getattr(self._get_target(), self._new_name) + + +# this dictionary stores the master copy of the ConfigObj's for each +# root package +_cfgobjs = {} + + +def get_config_filename(packageormod=None): + """ + Get the filename of the config file associated with the given + package or module. + """ + cfg = get_config(packageormod) + while cfg.parent is not cfg: + cfg = cfg.parent + return cfg.filename + + +# This is used by testing to override the config file, so we can test +# with various config files that exercise different features of the +# config system. +_override_config_file = None + + +def get_config(packageormod=None, reload=False): + """ Gets the configuration object or section associated with a particular + package or module. + + Parameters + ----------- + packageormod : str or None + The package for which to retrieve the configuration object. If a + string, it must be a valid package name, or if `None`, the package from + which this function is called will be used. + + reload : bool, optional + Reload the file, even if we have it cached. + + Returns + ------- + cfgobj : ``configobj.ConfigObj`` or ``configobj.Section`` + If the requested package is a base package, this will be the + ``configobj.ConfigObj`` for that package, or if it is a subpackage or + module, it will return the relevant ``configobj.Section`` object. + + Raises + ------ + RuntimeError + If ``packageormod`` is `None`, but the package this item is created + from cannot be determined. + """ + if packageormod is None: + packageormod = find_current_module(2) + if packageormod is None: + msg1 = 'Cannot automatically determine get_config module, ' + msg2 = 'because it is not called from inside a valid module' + raise RuntimeError(msg1 + msg2) + else: + packageormod = packageormod.__name__ + + packageormodspl = packageormod.split('.') + rootname = packageormodspl[0] + secname = '.'.join(packageormodspl[1:]) + + cobj = _cfgobjs.get(rootname, None) + + if cobj is None or reload: + if _ASTROPY_SETUP_: + # There's no reason to use anything but the default config + cobj = configobj.ConfigObj(interpolation=False) + else: + cfgfn = None + try: + # This feature is intended only for use by the unit tests + if _override_config_file is not None: + cfgfn = _override_config_file + else: + cfgfn = path.join(get_config_dir(), rootname + '.cfg') + cobj = configobj.ConfigObj(cfgfn, interpolation=False) + except (IOError, OSError) as e: + msg = ('Configuration defaults will be used due to ') + errstr = '' if len(e.args) < 1 else (':' + str(e.args[0])) + msg += e.__class__.__name__ + errstr + msg += ' on {0}'.format(cfgfn) + warn(ConfigurationMissingWarning(msg)) + + # This caches the object, so if the file becomes accessible, this + # function won't see it unless the module is reloaded + cobj = configobj.ConfigObj(interpolation=False) + + _cfgobjs[rootname] = cobj + + if secname: # not the root package + if secname not in cobj: + cobj[secname] = {} + return cobj[secname] + else: + return cobj + + +def save_config(packageormod=None, filename=None): + """ + Removed in astropy 0.4. + """ + raise NotImplementedError( + "The ability to save config options was removed in astropy 0.4. " + "To change config settings, edit '{0}' directly.". + format(get_config_filename(packageormod))) + + +def reload_config(packageormod=None): + """ Reloads configuration settings from a configuration file for the root + package of the requested package/module. + + This overwrites any changes that may have been made in `ConfigItem` + objects. This applies for any items that are based on this file, which + is determined by the *root* package of ``packageormod`` + (e.g. ``'astropy.cfg'`` for the ``'astropy.config.configuration'`` + module). + + Parameters + ---------- + packageormod : str or None + The package or module name - see `get_config` for details. + """ + sec = get_config(packageormod, True) + # look for the section that is its own parent - that's the base object + while sec.parent is not sec: + sec = sec.parent + sec.reload() + + +def is_unedited_config_file(filename, template_content=None): + """ + Determines if a config file can be safely replaced because it doesn't + actually contain any meaningful content. + + To meet this criteria, the config file must be either: + + - All comments or completely empty + + - An exact match to a "legacy" version of the config file prior to + Astropy 0.4, when APE3 was implemented and the config file + contained commented-out values by default. + + If the config file is already identical to the template config + file, `False` is returned so it is not needlessly overwritten. + """ + # We want to calculate the md5sum using universal line endings, so + # that even if the files had their line endings converted to \r\n + # on Windows, this will still work. + + with io.open(filename, 'rt', encoding='latin-1') as fd: + content = fd.read() + + if content == template_content: + return False + + content = content.encode('latin-1') + + # The jquery_url setting, present in 0.3.2 and later only, is + # effectively auto-generated by the build system, so we need to + # ignore it in the md5sum calculation for 0.3.2. + content = re.sub(b'\njquery_url\s*=\s*[^\n]+', b'', content) + + # First determine if the config file has any effective content + buffer = io.BytesIO(content) + buffer.seek(0) + raw_cfg = configobj.ConfigObj(buffer, interpolation=True) + for v in six.itervalues(raw_cfg): + if len(v): + break + else: + return True + + # Now determine if it matches the md5sum of a known, unedited + # config file. + known_configs = set([ + '7d4b4f1120304b286d71f205975b1286', # v0.3.2 + '5df7e409425e5bfe7ed041513fda3288', # v0.3 + '8355f99a01b3bdfd8761ef45d5d8b7e5', # v0.2 + '4ea5a84de146dc3fcea2a5b93735e634' # v0.2.1, v0.2.2, v0.2.3, v0.2.4, v0.2.5 + ]) + + md5 = hashlib.md5() + md5.update(content) + digest = md5.hexdigest() + return digest in known_configs + + +# this is not in __all__ because it's not intended that a user uses it +def update_default_config(pkg, default_cfg_dir_or_fn, version=None): + """ + Checks if the configuration file for the specified package exists, + and if not, copy over the default configuration. If the + configuration file looks like it has already been edited, we do + not write over it, but instead write a file alongside it named + ``pkg.version.cfg`` as a "template" for the user. + + Parameters + ---------- + pkg : str + The package to be updated. + default_cfg_dir_or_fn : str + The filename or directory name where the default configuration file is. + If a directory name, ``'pkg.cfg'`` will be used in that directory. + version : str, optional + The current version of the given package. If not provided, it will + be obtained from ``pkg.__version__``. + + Returns + ------- + updated : bool + If the profile was updated, `True`, otherwise `False`. + + Raises + ------ + ConfigurationDefaultMissingError + If the default configuration could not be found. + + """ + + if path.isdir(default_cfg_dir_or_fn): + default_cfgfn = path.join(default_cfg_dir_or_fn, pkg + '.cfg') + else: + default_cfgfn = default_cfg_dir_or_fn + + if not path.isfile(default_cfgfn): + # There is no template configuration file, which basically + # means the affiliated package is not using the configuration + # system, so just return. + return + + cfgfn = get_config(pkg).filename + + with io.open(default_cfgfn, 'rt', encoding='latin-1') as fr: + template_content = fr.read() + + doupdate = False + if cfgfn is not None: + if path.exists(cfgfn): + doupdate = is_unedited_config_file(cfgfn, template_content) + elif path.exists(path.dirname(cfgfn)): + doupdate = True + + if version is None: + mod = __import__(pkg) + if not hasattr(mod, '__version__'): + raise ConfigurationDefaultMissingError( + 'Could not determine version of package {0}'.format(pkg)) + version = mod.__version__ + + # Don't install template files for dev versions, or we'll end up + # spamming `~/.astropy/config`. + if not 'dev' in version and cfgfn is not None: + template_path = path.join( + get_config_dir(), '{0}.{1}.cfg'.format(pkg, version)) + needs_template = not path.exists(template_path) + else: + needs_template = False + + if doupdate or needs_template: + if needs_template: + with io.open(template_path, 'wt', encoding='latin-1') as fw: + fw.write(template_content) + # If we just installed a new template file and we can't + # update the main configuration file because it has user + # changes, display a warning. + if not doupdate: + warn( + "The configuration options in {0} {1} may have changed, " + "your configuration file was not updated in order to " + "preserve local changes. A new configuration template " + "has been saved to '{2}'.".format( + pkg, version, template_path), + ConfigurationChangedWarning) + + if doupdate: + with io.open(cfgfn, 'wt', encoding='latin-1') as fw: + fw.write(template_content) + return True + + return False + + +# DEPRECATED FUNCTIONALITY ---------------------------------------- +# Everything below this point should be removed in astropy 0.5 + +def get_config_items(packageormod=None): + """ Returns the `ConfigurationItem` objects associated with a particular + module. + + Parameters + ---------- + packageormod : str or None + The package or module name or None to get the current module's items. + + Returns + ------- + configitems : dict + A dictionary where the keys are the name of the items as the are named + in the module, and the values are the associated `ConfigurationItem` + objects. + + """ + + from ..utils import find_current_module + + if packageormod is None: + packageormod = find_current_module(2) + if packageormod is None: + msg1 = 'Cannot automatically determine get_config module, ' + msg2 = 'because it is not called from inside a valid module' + raise RuntimeError(msg1 + msg2) + elif isinstance(packageormod, six.string_types): + __import__(packageormod) + packageormod = sys.modules[packageormod] + elif inspect.ismodule(packageormod): + pass + else: + raise TypeError('packageormod in get_config_items is invalid') + + configitems = {} + for n, obj in six.iteritems(packageormod.__dict__): + # if it's not a new-style object, it's certainly not a ConfigurationItem + if hasattr(obj, '__class__'): + fqn = obj.__class__.__module__ + '.' + obj.__class__.__name__ + if fqn == 'astropy.config.configuration.ConfigurationItem': + configitems[n] = obj + + return configitems + + +def _fix_section_blank_lines(sec, recurse=True, gotoroot=True): + """ + Adds a blank line to the comments of any sections in the requested sections, + recursing into subsections if `recurse` is True. If `gotoroot` is True, + this first goes to the root of the requested section, just like + `save_config` and `reload_config` - this does nothing if `sec` is a + configobj already. + """ + + if not hasattr(sec, 'sections'): + sec = get_config(sec) + + # look for the section that is its own parent - that's the base object + if gotoroot: + while sec.parent is not sec: + sec = sec.parent + + for isec, snm in enumerate(sec.sections): + comm = sec.comments[snm] + if len(comm) == 0 or comm[-1] != '': + if sec.parent is sec and isec == 0: + pass # don't do it for first section + else: + comm.append('') + if recurse: + _fix_section_blank_lines(sec[snm], True, False) + + +def _save_config(packageormod=None, filename=None): + """ Saves all configuration settings to the configuration file for the + root package of the requested package/module. + + This overwrites any configuration items that have been changed in + `ConfigurationItem` objects that are based on the configuration file + determined by the *root* package of ``packageormod`` (e.g. 'astropy.cfg' + for the 'astropy.config.configuration' module). + + .. note:: + To save only a single item, use the `ConfigurationItem.save` method - + this will save all options in the current session that may have been + changed. + + Parameters + ---------- + packageormod : str or None + The package or module name - see `get_config` for details. + + filename : str, optional + Save the config to a given filename instead of to the default location. + + """ + + sec = get_config(packageormod) + # look for the section that is its own parent - that's the base object + while sec.parent is not sec: + sec = sec.parent + if filename is not None: + with io.open(filename, 'w', encoding='utf-8') as f: + sec.write(outfile=f) + else: + sec.write() + + +_unsafe_import_regex = [r'.*.setup_package'] +_unsafe_import_regex = [('(' + pat + ')') for pat in _unsafe_import_regex] +_unsafe_import_regex = re.compile('|'.join(_unsafe_import_regex)) + + +def generate_all_config_items(pkgornm=None, reset_to_default=False, + filename=None): + """ Given a root package name or package, this function walks + through all the subpackages and modules, which should populate any + ConfigurationItem objects defined at the module level. If + `reset_to_default` is True, it also sets all of the items to their default + values, regardless of what the file's value currently is. It then saves the + `ConfigObj`. + + Parameters + ---------- + pkgname : str, module, or None + The package for which to generate configuration items. If None, + the package of the function that calls this one will be used. + + reset_to_default : bool + If True, the configuration items will all be set to their defaults. + + filename : str, optional + Save the generated config items to the given filename instead of to + the default config file path. + + Returns + ------- + cfgfn : str + The filename of the generated configuration item. + + """ + + from ..utils import find_current_module + + if pkgornm is None: + pkgornm = find_current_module(1).__name__.split('.')[0] + + if isinstance(pkgornm, six.string_types): + package = pkgutil.get_loader(pkgornm).load_module(pkgornm) + elif (isinstance(pkgornm, types.ModuleType) and + '__init__' in pkgornm.__file__): + package = pkgornm + else: + msg = 'generate_all_config_items was not given a package/package name' + raise TypeError(msg) + + if hasattr(package, '__path__'): + pkgpath = package.__path__ + elif hasattr(package, '__file__'): + pkgpath = path.split(package.__file__)[0] + else: + raise AttributeError('package to generate config items for does not ' + 'have __file__ or __path__') + + prefix = package.__name__ + '.' + for imper, nm, ispkg in pkgutil.walk_packages(pkgpath, prefix): + if nm == 'astropy.config.tests.test_configs': + continue + if not _unsafe_import_regex.match(nm): + imper.find_module(nm) + if reset_to_default: + for cfgitem in six.itervalues(get_config_items(nm)): + cfgitem.set(cfgitem.defaultvalue) + + _fix_section_blank_lines(package.__name__, True, True) + + _save_config(package.__name__, filename=filename) + + if filename is None: + return get_config(package.__name__).filename + else: + return filename diff --git a/astropy/config/paths.py b/astropy/config/paths.py new file mode 100644 index 0000000..cc6b2e9 --- /dev/null +++ b/astropy/config/paths.py @@ -0,0 +1,185 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" This module contains functions to determine where configuration and +data/cache files used by Astropy should be placed. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from ..extern import six + +import os +import sys + + +__all__ = ['get_config_dir', 'get_cache_dir'] + + +def _find_home(): + """ Locates and return the home directory (or best approximation) on this + system. + + Raises + ------ + OSError + If the home directory cannot be located - usually means you are running + Astropy on some obscure platform that doesn't have standard home + directories. + """ + + + # this is used below to make fix up encoding issues that sometimes crop up + # in py2.x but not in py3.x + if six.PY2: + decodepath = lambda pth: pth.decode(sys.getfilesystemencoding()) + elif six.PY3: + decodepath = lambda pth: pth + + # First find the home directory - this is inspired by the scheme ipython + # uses to identify "home" + if os.name == 'posix': + # Linux, Unix, AIX, OS X + if 'HOME' in os.environ: + homedir = decodepath(os.environ['HOME']) + else: + raise OSError('Could not find unix home directory to search for ' + 'astropy config dir') + elif os.name == 'nt': # This is for all modern Windows (NT or after) + if 'MSYSTEM' in os.environ and os.environ.get('HOME'): + # Likely using an msys shell; use whatever it is using for its + # $HOME directory + homedir = decodepath(os.environ['HOME']) + # Next try for a network home + elif 'HOMESHARE' in os.environ: + homedir = decodepath(os.environ['HOMESHARE']) + # See if there's a local home + elif 'HOMEDRIVE' in os.environ and 'HOMEPATH' in os.environ: + homedir = os.path.join(os.environ['HOMEDRIVE'], + os.environ['HOMEPATH']) + homedir = decodepath(homedir) + # Maybe a user profile? + elif 'USERPROFILE' in os.environ: + homedir = decodepath(os.path.join(os.environ['USERPROFILE'])) + else: + try: + from ..extern.six.moves import winreg as wreg + shell_folders = r'Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders' + key = wreg.OpenKey(wreg.HKEY_CURRENT_USER, shell_folders) + + homedir = wreg.QueryValueEx(key, 'Personal')[0] + homedir = decodepath(homedir) + key.Close() + except: + # As a final possible resort, see if HOME is present + if 'HOME' in os.environ: + homedir = decodepath(os.environ['HOME']) + else: + raise OSError('Could not find windows home directory to ' + 'search for astropy config dir') + else: + # for other platforms, try HOME, although it probably isn't there + if 'HOME' in os.environ: + homedir = decodepath(os.environ['HOME']) + else: + raise OSError('Could not find a home directory to search for ' + 'astropy config dir - are you on an unspported ' + 'platform?') + return homedir + + +def get_config_dir(create=True): + """ + Determines the Astropy configuration directory name and creates the + directory if it doesn't exist. + + This directory is typically ``$HOME/.astropy/config``, but if the + XDG_CONFIG_HOME environment variable is set and the + ``$XDG_CONFIG_HOME/astropy`` directory exists, it will be that directory. + If neither exists, the former will be created and symlinked to the latter. + + Returns + ------- + configdir : str + The absolute path to the configuration directory. + + """ + + # symlink will be set to this if the directory is created + linkto = None + # first look for XDG_CONFIG_HOME + xch = os.environ.get('XDG_CONFIG_HOME') + + if xch is not None and os.path.exists(xch): + xchpth = os.path.join(xch, 'astropy') + if not os.path.islink(xchpth): + if os.path.exists(xchpth): + return os.path.abspath(xchpth) + else: + linkto = xchpth + return os.path.abspath(_find_or_create_astropy_dir('config', linkto)) + + +def get_cache_dir(): + """ + Determines the Astropy cache directory name and creates the directory if it + doesn't exist. + + This directory is typically ``$HOME/.astropy/cache``, but if the + XDG_CACHE_HOME environment variable is set and the + ``$XDG_CACHE_HOME/astropy`` directory exists, it will be that directory. + If neither exists, the former will be created and symlinked to the latter. + + Returns + ------- + cachedir : str + The absolute path to the cache directory. + + """ + + # symlink will be set to this if the directory is created + linkto = None + # first look for XDG_CACHE_HOME + xch = os.environ.get('XDG_CACHE_HOME') + + if xch is not None and os.path.exists(xch): + xchpth = os.path.join(xch, 'astropy') + if not os.path.islink(xchpth): + if os.path.exists(xchpth): + return os.path.abspath(xchpth) + else: + linkto = xchpth + + return os.path.abspath(_find_or_create_astropy_dir('cache', linkto)) + + +def _find_or_create_astropy_dir(dirnm, linkto): + innerdir = os.path.join(_find_home(), '.astropy') + maindir = os.path.join(_find_home(), '.astropy', dirnm) + + if not os.path.exists(maindir): + # first create .astropy dir if needed + if not os.path.exists(innerdir): + try: + os.mkdir(innerdir) + except OSError: + if not os.path.isdir(innerdir): + raise + elif not os.path.isdir(innerdir): + msg = 'Intended Astropy directory {0} is actually a file.' + raise IOError(msg.format(innerdir)) + + try: + os.mkdir(maindir) + except OSError: + if not os.path.isdir(maindir): + raise + + if (not sys.platform.startswith('win') and + linkto is not None and + not os.path.exists(linkto)): + os.symlink(maindir, linkto) + + elif not os.path.isdir(maindir): + msg = 'Intended Astropy {0} directory {1} is actually a file.' + raise IOError(msg.format(dirnm, maindir)) + + return os.path.abspath(maindir) diff --git a/astropy/config/setup_package.py b/astropy/config/setup_package.py new file mode 100644 index 0000000..e4a1c0f --- /dev/null +++ b/astropy/config/setup_package.py @@ -0,0 +1,11 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + + +def get_package_data(): + return { + str('astropy.config.tests'): ['data/*.cfg'] + } + + +def requires_2to3(): + return False diff --git a/astropy/config/tests/__init__.py b/astropy/config/tests/__init__.py new file mode 100644 index 0000000..800d82e --- /dev/null +++ b/astropy/config/tests/__init__.py @@ -0,0 +1,2 @@ +from __future__ import (absolute_import, division, print_function, + unicode_literals) diff --git a/astropy/config/tests/data/alias.cfg b/astropy/config/tests/data/alias.cfg new file mode 100644 index 0000000..612cdd9 --- /dev/null +++ b/astropy/config/tests/data/alias.cfg @@ -0,0 +1,2 @@ +[coordinates.name_resolve] +name_resolve_timeout = 42.0 \ No newline at end of file diff --git a/astropy/config/tests/data/astropy.0.3.cfg b/astropy/config/tests/data/astropy.0.3.cfg new file mode 100644 index 0000000..cafa0e4 --- /dev/null +++ b/astropy/config/tests/data/astropy.0.3.cfg @@ -0,0 +1,149 @@ + +# Use Unicode characters when outputting values, and writing widgets to the +# console. +unicode_output = False +[utils.console] + +# When True, use ANSI color escape sequences when writing to the console. +use_color = True + +[logger] + +# Threshold for the logging messages. Logging messages that are less severe +# than this level will be ignored. The levels are 'DEBUG', 'INFO', 'WARNING', +# 'ERROR' +log_level = INFO + +# Whether to use color for the level names +use_color = True + +# Whether to log warnings.warn calls +log_warnings = True + +# Whether to log exceptions before raising them +log_exceptions = False + +# Whether to always log messages to a log file +log_to_file = False + +# The file to log messages to. When '', it defaults to a file 'astropy.log' in +# the astropy config directory. +log_file_path = "" + +# Threshold for logging messages to log_file_path +log_file_level = INFO + +# Format for log file entries +log_file_format = "%(asctime)r, %(origin)r, %(levelname)r, %(message)r" + +[coordinates.name_resolve] + +# The URL to Sesame's web-queryable database. +sesame_url = http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame/, http://vizier.cfa.harvard.edu/viz-bin/nph-sesame/ + +# This specifies the default database that SESAME will query when using the +# name resolve mechanism in the coordinates subpackage. Default is to search +# all databases, but this can be 'all', 'simbad', 'ned', or 'vizier'. +# Options: all, simbad, ned, vizier +sesame_database = all + +# This is the maximum time to wait for a response from a name resolve query to +# SESAME in seconds. +name_resolve_timeout = 5 + +[table.pprint] + +# Maximum number of lines for the pretty-printer to use if it cannot determine +# the terminal size. Negative numbers mean no limit. +max_lines = 25 + +# Maximum number of characters for the pretty-printer to use per line if it +# cannot determine the terminal size. Negative numbers mean no limit. +max_width = 80 + +[table.table] + +# The template that determines the name of a column if it cannot be +# determined. Uses new-style (format method) string formatting +auto_colname = col{0} + +[utils.data] + +# URL for astropy remote data site. +dataurl = http://data.astropy.org/ + +# Time to wait for remote data query (in seconds). +remote_timeout = 3.0 + +# Block size for computing MD5 file hashes. +hash_block_size = 65536 + +# Number of bytes of remote data to download per step. +download_block_size = 65536 + +# Number of times to try to get the lock while accessing the data cache before +# giving up. +download_cache_lock_attempts = 5 + +# If True, temporary download files created when the cache is inacessible will +# be deleted at the end of the python session. +delete_temporary_downloads_at_exit = True + +[io.fits] + +# If True, enable support for record-valued keywords as described by FITS WCS +# Paper IV. Otherwise they are treated as normal keywords. +enabled_record_valued_keyword_cards = True + +# If True, extension names (i.e. the EXTNAME keyword) should be treated as +# case-sensitive. +extension_name_case_sensitive = False + +# If True, automatically remove trailing whitespace for string values in +# headers. Otherwise the values are returned verbatim, with all whitespace +# intact. +strip_header_whitespace = True + +# If True, use memory-mapped file access to read/write the data in FITS files. +# This generally provides better performance, especially for large files, but +# may affect performance in I/O-heavy applications. +use_memmap = True + +[io.votable.table] + +# When True, treat fixable violations of the VOTable spec as exceptions. +pedantic = False + +[cosmology.core] + +# The default cosmology to use. Note this is only read on import, so changing +# this value at runtime has no effect. +default_cosmology = no_default + +[nddata.nddata] + +# Whether to issue a warning if NDData arithmetic is performed with +# uncertainties and the uncertainties do not support the propagation of +# correlated uncertainties. +warn_unsupported_correlated = True + +[vo.client.vos_catalog] + +# URL where VO Service database file is stored. +vos_baseurl = http://stsdas.stsci.edu/astrolib/vo_databases/ + +[vo.client.conesearch] + +# Conesearch database name. +conesearch_dbname = conesearch_good + +[vo.validator.validate] + +# Cone Search services master list for validation. +cs_mstr_list = http://vao.stsci.edu/directory/NVORegInt.asmx/VOTCapabilityPredOpt?predicate=1%3D1&capability=conesearch&VOTStyleOption=2 + +# Only check these Cone Search URLs. +cs_urls = http://archive.noao.edu/nvo/usno.php?cat=a&, http://gsss.stsci.edu/webservices/vo/ConeSearch.aspx?CAT=GSC23&, http://irsa.ipac.caltech.edu/cgi-bin/Oasis/CatSearch/nph-catsearch?CAT=fp_psc&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/220/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/243/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/252/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/254/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/255/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/284/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=II/246/out&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=field&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=photoobjall&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=phototag&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=specobjall&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=specphotoall&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=sppparams&, http://vo.astronet.ru/sai_cas/conesearch?cat=twomass&tab=psc&, http://vo.astronet.ru/sai_cas/conesearch?cat=twomass&tab=xsc&, http://vo.astronet.ru/sai_cas/conesearch?cat=usnoa2&tab=main&, http://vo.astronet.ru/sai_cas/conesearch?cat=usnob1&tab=main&, http://wfaudata.roe.ac.uk/sdssdr7-dsa/DirectCone?DSACAT=SDSS_DR7&DSATAB=Galaxy&, http://wfaudata.roe.ac.uk/sdssdr7-dsa/DirectCone?DSACAT=SDSS_DR7&DSATAB=PhotoObj&, http://wfaudata.roe.ac.uk/sdssdr7-dsa/DirectCone?DSACAT=SDSS_DR7&DSATAB=PhotoObjAll&, http://wfaudata.roe.ac.uk/sdssdr7-dsa/DirectCone?DSACAT=SDSS_DR7&DSATAB=Star&, http://wfaudata.roe.ac.uk/sdssdr8-dsa/DirectCone?DSACAT=SDSS_DR8&DSATAB=PhotoObjAll&, http://wfaudata.roe.ac.uk/sdssdr8-dsa/DirectCone?DSACAT=SDSS_DR8&DSATAB=SpecObjAll&, http://wfaudata.roe.ac.uk/twomass-dsa/DirectCone?DSACAT=TWOMASS&DSATAB=twomass_psc&, http://wfaudata.roe.ac.uk/twomass-dsa/DirectCone?DSACAT=TWOMASS&DSATAB=twomass_xsc&, http://www.nofs.navy.mil/cgi-bin/vo_cone.cgi?CAT=USNO-A2&, http://www.nofs.navy.mil/cgi-bin/vo_cone.cgi?CAT=USNO-B1& + +# VO Table warning codes that are considered non-critical +noncrit_warnings = W03, W06, W07, W09, W10, W15, W17, W20, W21, W22, W27, W28, W29, W41, W42, W48, W50 diff --git a/astropy/config/tests/data/astropy.0.3.windows.cfg b/astropy/config/tests/data/astropy.0.3.windows.cfg new file mode 100644 index 0000000..589703f --- /dev/null +++ b/astropy/config/tests/data/astropy.0.3.windows.cfg @@ -0,0 +1,149 @@ + +# Use Unicode characters when outputting values, and writing widgets to the +# console. +unicode_output = False +[utils.console] + +# When True, use ANSI color escape sequences when writing to the console. +use_color = True + +[logger] + +# Threshold for the logging messages. Logging messages that are less severe +# than this level will be ignored. The levels are 'DEBUG', 'INFO', 'WARNING', +# 'ERROR' +log_level = INFO + +# Whether to use color for the level names +use_color = True + +# Whether to log warnings.warn calls +log_warnings = True + +# Whether to log exceptions before raising them +log_exceptions = False + +# Whether to always log messages to a log file +log_to_file = False + +# The file to log messages to. When '', it defaults to a file 'astropy.log' in +# the astropy config directory. +log_file_path = "" + +# Threshold for logging messages to log_file_path +log_file_level = INFO + +# Format for log file entries +log_file_format = "%(asctime)r, %(origin)r, %(levelname)r, %(message)r" + +[coordinates.name_resolve] + +# The URL to Sesame's web-queryable database. +sesame_url = http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame/, http://vizier.cfa.harvard.edu/viz-bin/nph-sesame/ + +# This specifies the default database that SESAME will query when using the +# name resolve mechanism in the coordinates subpackage. Default is to search +# all databases, but this can be 'all', 'simbad', 'ned', or 'vizier'. +# Options: all, simbad, ned, vizier +sesame_database = all + +# This is the maximum time to wait for a response from a name resolve query to +# SESAME in seconds. +name_resolve_timeout = 5 + +[table.pprint] + +# Maximum number of lines for the pretty-printer to use if it cannot determine +# the terminal size. Negative numbers mean no limit. +max_lines = 25 + +# Maximum number of characters for the pretty-printer to use per line if it +# cannot determine the terminal size. Negative numbers mean no limit. +max_width = 80 + +[table.table] + +# The template that determines the name of a column if it cannot be +# determined. Uses new-style (format method) string formatting +auto_colname = col{0} + +[utils.data] + +# URL for astropy remote data site. +dataurl = http://data.astropy.org/ + +# Time to wait for remote data query (in seconds). +remote_timeout = 3.0 + +# Block size for computing MD5 file hashes. +hash_block_size = 65536 + +# Number of bytes of remote data to download per step. +download_block_size = 65536 + +# Number of times to try to get the lock while accessing the data cache before +# giving up. +download_cache_lock_attempts = 5 + +# If True, temporary download files created when the cache is inacessible will +# be deleted at the end of the python session. +delete_temporary_downloads_at_exit = True + +[io.fits] + +# If True, enable support for record-valued keywords as described by FITS WCS +# Paper IV. Otherwise they are treated as normal keywords. +enabled_record_valued_keyword_cards = True + +# If True, extension names (i.e. the EXTNAME keyword) should be treated as +# case-sensitive. +extension_name_case_sensitive = False + +# If True, automatically remove trailing whitespace for string values in +# headers. Otherwise the values are returned verbatim, with all whitespace +# intact. +strip_header_whitespace = True + +# If True, use memory-mapped file access to read/write the data in FITS files. +# This generally provides better performance, especially for large files, but +# may affect performance in I/O-heavy applications. +use_memmap = True + +[io.votable.table] + +# When True, treat fixable violations of the VOTable spec as exceptions. +pedantic = False + +[cosmology.core] + +# The default cosmology to use. Note this is only read on import, so changing +# this value at runtime has no effect. +default_cosmology = no_default + +[nddata.nddata] + +# Whether to issue a warning if NDData arithmetic is performed with +# uncertainties and the uncertainties do not support the propagation of +# correlated uncertainties. +warn_unsupported_correlated = True + +[vo.client.vos_catalog] + +# URL where VO Service database file is stored. +vos_baseurl = http://stsdas.stsci.edu/astrolib/vo_databases/ + +[vo.client.conesearch] + +# Conesearch database name. +conesearch_dbname = conesearch_good + +[vo.validator.validate] + +# Cone Search services master list for validation. +cs_mstr_list = http://vao.stsci.edu/directory/NVORegInt.asmx/VOTCapabilityPredOpt?predicate=1%3D1&capability=conesearch&VOTStyleOption=2 + +# Only check these Cone Search URLs. +cs_urls = http://archive.noao.edu/nvo/usno.php?cat=a&, http://gsss.stsci.edu/webservices/vo/ConeSearch.aspx?CAT=GSC23&, http://irsa.ipac.caltech.edu/cgi-bin/Oasis/CatSearch/nph-catsearch?CAT=fp_psc&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/220/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/243/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/252/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/254/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/255/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/284/out&, http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=II/246/out&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=field&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=photoobjall&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=phototag&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=specobjall&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=specphotoall&, http://vo.astronet.ru/sai_cas/conesearch?cat=sdssdr7&tab=sppparams&, http://vo.astronet.ru/sai_cas/conesearch?cat=twomass&tab=psc&, http://vo.astronet.ru/sai_cas/conesearch?cat=twomass&tab=xsc&, http://vo.astronet.ru/sai_cas/conesearch?cat=usnoa2&tab=main&, http://vo.astronet.ru/sai_cas/conesearch?cat=usnob1&tab=main&, http://wfaudata.roe.ac.uk/sdssdr7-dsa/DirectCone?DSACAT=SDSS_DR7&DSATAB=Galaxy&, http://wfaudata.roe.ac.uk/sdssdr7-dsa/DirectCone?DSACAT=SDSS_DR7&DSATAB=PhotoObj&, http://wfaudata.roe.ac.uk/sdssdr7-dsa/DirectCone?DSACAT=SDSS_DR7&DSATAB=PhotoObjAll&, http://wfaudata.roe.ac.uk/sdssdr7-dsa/DirectCone?DSACAT=SDSS_DR7&DSATAB=Star&, http://wfaudata.roe.ac.uk/sdssdr8-dsa/DirectCone?DSACAT=SDSS_DR8&DSATAB=PhotoObjAll&, http://wfaudata.roe.ac.uk/sdssdr8-dsa/DirectCone?DSACAT=SDSS_DR8&DSATAB=SpecObjAll&, http://wfaudata.roe.ac.uk/twomass-dsa/DirectCone?DSACAT=TWOMASS&DSATAB=twomass_psc&, http://wfaudata.roe.ac.uk/twomass-dsa/DirectCone?DSACAT=TWOMASS&DSATAB=twomass_xsc&, http://www.nofs.navy.mil/cgi-bin/vo_cone.cgi?CAT=USNO-A2&, http://www.nofs.navy.mil/cgi-bin/vo_cone.cgi?CAT=USNO-B1& + +# VO Table warning codes that are considered non-critical +noncrit_warnings = W03, W06, W07, W09, W10, W15, W17, W20, W21, W22, W27, W28, W29, W41, W42, W48, W50 diff --git a/astropy/config/tests/data/deprecated.cfg b/astropy/config/tests/data/deprecated.cfg new file mode 100644 index 0000000..a6cb084 --- /dev/null +++ b/astropy/config/tests/data/deprecated.cfg @@ -0,0 +1,2 @@ +[table.pprint] +max_lines = 25 diff --git a/astropy/config/tests/data/empty.cfg b/astropy/config/tests/data/empty.cfg new file mode 100644 index 0000000..a069dfd --- /dev/null +++ b/astropy/config/tests/data/empty.cfg @@ -0,0 +1,15 @@ +## Use Unicode characters when outputting values, and writing widgets to the +## console. +#unicode_output = False + +[utils.console] + +## When True, use ANSI color escape sequences when writing to the console. +# use_color = True + +[logger] + +## Threshold for the logging messages. Logging messages that are less severe +## than this level will be ignored. The levels are 'DEBUG', 'INFO', 'WARNING', +## 'ERROR' +# log_level = INFO diff --git a/astropy/config/tests/data/not_empty.cfg b/astropy/config/tests/data/not_empty.cfg new file mode 100644 index 0000000..c7a660f --- /dev/null +++ b/astropy/config/tests/data/not_empty.cfg @@ -0,0 +1,15 @@ +## Use Unicode characters when outputting values, and writing widgets to the +## console. +#unicode_output = False + +[utils.console] + +## When True, use ANSI color escape sequences when writing to the console. +# use_color = True + +[logger] + +## Threshold for the logging messages. Logging messages that are less severe +## than this level will be ignored. The levels are 'DEBUG', 'INFO', 'WARNING', +## 'ERROR' +log_level = INFO diff --git a/astropy/config/tests/test_configs.py b/astropy/config/tests/test_configs.py new file mode 100644 index 0000000..81193bc --- /dev/null +++ b/astropy/config/tests/test_configs.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import io +import os +import sys + +from ...tests.helper import catch_warnings +from ...extern import six +from ...utils.compat import subprocess + +from ...utils.data import get_pkg_data_filename +from .. import configuration +from ...utils.exceptions import AstropyDeprecationWarning + + +def test_paths(): + from ..paths import get_config_dir, get_cache_dir + + assert 'astropy' in get_config_dir() + assert 'astropy' in get_cache_dir() + + +def test_config_file(): + from ..configuration import get_config, reload_config, save_config + + apycfg = get_config('astropy') + assert apycfg.filename.endswith('astropy.cfg') + + cfgsec = get_config('astropy.config') + assert cfgsec.depth == 1 + assert cfgsec.name == 'config' + assert cfgsec.parent.filename.endswith('astropy.cfg') + + reload_config('astropy') + + +def test_configitem(): + from ..configuration import ConfigurationItem, get_config + + ci = ConfigurationItem('tstnm', 34, 'this is a Description') + + assert ci.module == 'astropy.config.tests.test_configs' + assert ci() == 34 + assert ci.description == 'this is a Description' + + sec = get_config(ci.module) + assert sec['tstnm'] == 34 + + ci.description = 'updated Descr' + ci.set(32) + assert ci() == 32 + + # It's useful to go back to the default to allow other test functions to + # call this one and still be in the default configuration. + ci.description = 'this is a Description' + ci.set(34) + assert ci() == 34 + + +def test_configitem_types(): + from ..configuration import ConfigurationItem + from ...tests.helper import pytest + + ci1 = ConfigurationItem('tstnm1', 34) + assert isinstance(ci1(), int) + + ci2 = ConfigurationItem('tstnm2', 34.3) + assert isinstance(ci2(), float) + + ci3 = ConfigurationItem('tstnm3', True) + assert isinstance(ci3(), bool) + + ci4 = ConfigurationItem('tstnm4', 'astring') + assert isinstance(ci4(), six.text_type) + + with pytest.raises(TypeError): + ci1.set(34.3) + ci2.set(12) # this would should succeed as up-casting + with pytest.raises(TypeError): + ci3.set('fasd') + with pytest.raises(TypeError): + ci4.set(546.245) + + +def test_configitem_options(tmpdir): + from ..configuration import ConfigurationItem, get_config + from ...tests.helper import pytest + + cio = ConfigurationItem('tstnmo', ['op1', 'op2', 'op3']) + sec = get_config(cio.module) + + assert isinstance(cio(), six.text_type) + assert cio() == 'op1' + assert sec['tstnmo'] == 'op1' + + cio.set('op2') + with pytest.raises(TypeError): + cio.set('op5') + assert sec['tstnmo'] == 'op2' + + # now try saving + apycfg = sec + while apycfg.parent is not apycfg: + apycfg = apycfg.parent + f = tmpdir.join('astropy.cfg') + with io.open(f.strpath, 'w', encoding='utf-8') as fd: + apycfg.write(fd) + with io.open(f.strpath, 'rU', encoding='utf-8') as fd: + lns = [x.strip() for x in f.readlines()] + + assert 'tstnmo = op2' in lns + + +def test_config_noastropy_fallback(monkeypatch): + """ + Tests to make sure configuration items fall back to their defaults when + there's a problem accessing the astropy directory + """ + from ...tests.helper import pytest + from .. import paths, configuration + + # make sure the config directory is not searched + monkeypatch.setenv(str('XDG_CONFIG_HOME'), 'foo') + monkeypatch.delenv(str('XDG_CONFIG_HOME')) + + # make sure the _find_or_create_astropy_dir function fails as though the + # astropy dir could not be accessed + def osraiser(dirnm, linkto): + raise OSError + monkeypatch.setattr(paths, '_find_or_create_astropy_dir', osraiser) + + # also have to make sure the stored configuration objects are cleared + monkeypatch.setattr(configuration, '_cfgobjs', {}) + + with pytest.raises(OSError): + # make sure the config dir search fails + paths.get_config_dir() + + # now run the basic tests, and make sure the warning about no astropy + # is present + with catch_warnings(configuration.ConfigurationMissingWarning) as w: + test_configitem() + assert len(w) == 1 + w = w[0] + assert 'Configuration defaults will be used' in str(w.message) + + +def test_configitem_setters(): + from ..configuration import ConfigurationItem + + ci = ConfigurationItem('tstnm12', 42, 'this is another Description') + + assert ci() == 42 + with ci.set_temp(45): + assert ci() == 45 + assert ci() == 42 + + ci.set(43) + assert ci() == 43 + + with ci.set_temp(46): + assert ci() == 46 + + # Make sure it is reset even with Exception + try: + with ci.set_temp(47): + raise Exception + except: + pass + + assert ci() == 43 + + +def test_empty_config_file(): + from ..configuration import is_unedited_config_file + + fn = get_pkg_data_filename('data/empty.cfg') + assert is_unedited_config_file(fn) + + fn = get_pkg_data_filename('data/not_empty.cfg') + assert not is_unedited_config_file(fn) + + fn = get_pkg_data_filename('data/astropy.0.3.cfg') + assert is_unedited_config_file(fn) + + fn = get_pkg_data_filename('data/astropy.0.3.windows.cfg') + assert is_unedited_config_file(fn) + + +def test_alias(): + import astropy + + with catch_warnings() as w: + with astropy.UNICODE_OUTPUT.set_temp(False): + pass + + assert len(w) == 1 + assert str(w[0].message) == ( + "Since 0.4, config parameter 'astropy.UNICODE_OUTPUT' is deprecated. " + "Use 'astropy.conf.unicode_output' instead.") + + +def test_alias2(): + from ...coordinates import name_resolve + from ...utils.data import conf + + # REMOVE in astropy 0.5 + + with catch_warnings() as w: + x = name_resolve.NAME_RESOLVE_TIMEOUT() + assert x == 3 + assert len(w) == 1 + assert str(w[0].message) == ( + "Since 0.4, config parameter " + "'astropy.coordinates.name_resolve.NAME_RESOLVE_TIMEOUT' is deprecated. " + "Use 'astropy.utils.data.conf.remote_timeout' instead.") + + with catch_warnings() as w: + name_resolve.NAME_RESOLVE_TIMEOUT.set(10) + assert conf.remote_timeout == 10 + assert len(w) == 1 + assert str(w[0].message) == ( + "Since 0.4, config parameter " + "'astropy.coordinates.name_resolve.NAME_RESOLVE_TIMEOUT' is deprecated. " + "Use 'astropy.utils.data.conf.remote_timeout' instead.") + + with catch_warnings() as w: + with name_resolve.NAME_RESOLVE_TIMEOUT.set_temp(42): + assert conf.remote_timeout == 42 + assert len(w) == 1 + assert str(w[0].message) == ( + "Since 0.4, config parameter " + "'astropy.coordinates.name_resolve.NAME_RESOLVE_TIMEOUT' is deprecated. " + "Use 'astropy.utils.data.conf.remote_timeout' instead.") + assert name_resolve.NAME_RESOLVE_TIMEOUT() == 10 + assert conf.remote_timeout == 10 + + with catch_warnings() as w: + name_resolve.NAME_RESOLVE_TIMEOUT.reload() + assert len(w) == 1 + assert str(w[0].message) == ( + "Since 0.4, config parameter " + "'astropy.coordinates.name_resolve.NAME_RESOLVE_TIMEOUT' is deprecated. " + "Use 'astropy.utils.data.conf.remote_timeout' instead.") + assert x == 3 + assert name_resolve.NAME_RESOLVE_TIMEOUT() == 3 + + +class TestAliasRead(object): + def setup_class(self): + configuration._override_config_file = get_pkg_data_filename('data/alias.cfg') + + def test_alias_read(self): + from astropy.utils.data import conf + + with catch_warnings() as w: + conf.reload() + assert conf.remote_timeout == 42 + + assert len(w) == 1 + assert str(w[0].message).startswith( + "Config parameter 'name_resolve_timeout' in section " + "[coordinates.name_resolve]") + + def teardown_class(self): + from astropy.utils.data import conf + + configuration._override_config_file = None + conf.reload() + + +def test_configitem_unicode(tmpdir): + from ..configuration import ConfigurationItem, get_config + + cio = ConfigurationItem('астрономия', 'ასტრონომიის') + sec = get_config(cio.module) + + assert isinstance(cio(), six.text_type) + assert cio() == 'ასტრონომიის' + assert sec['астрономия'] == 'ასტრონომიის' + + +def test_warning_move_to_top_level(): + # Check that the warning about deprecation config items in the + # file works. See #2514 + from ... import conf + + configuration._override_config_file = get_pkg_data_filename('data/deprecated.cfg') + + try: + with catch_warnings(AstropyDeprecationWarning) as w: + conf.reload() + conf.max_lines + assert len(w) == 1 + finally: + configuration._override_config_file = None + conf.reload() + + +def test_no_home(): + # "import astropy" fails when neither $HOME or $XDG_CONFIG_HOME + # are set. To test, we unset those environment variables for a + # subprocess and try to import astropy. + + test_path = os.path.dirname(__file__) + astropy_path = os.path.abspath( + os.path.join(test_path, '..', '..', '..')) + + env = os.environ.copy() + paths = [astropy_path] + if env.get('PYTHONPATH'): + paths.append(env.get('PYTHONPATH')) + env[str('PYTHONPATH')] = str(os.pathsep.join(paths)) + + for val in ['HOME', 'XDG_CONFIG_HOME']: + if val in env: + del env[val] + + retcode = subprocess.check_call( + [sys.executable, '-c', 'import astropy'], + env=env) + + assert retcode == 0 + + +def test_unedited_template(): + # Test that the config file is written at most once + config_dir = os.path.join(os.path.dirname(__file__), '..', '..') + configuration.update_default_config('astropy', config_dir) + assert configuration.update_default_config('astropy', config_dir) is False diff --git a/astropy/conftest.py b/astropy/conftest.py new file mode 100644 index 0000000..ef05070 --- /dev/null +++ b/astropy/conftest.py @@ -0,0 +1,7 @@ +# this contains imports plugins that configure py.test for astropy tests. +# by importing them here in conftest.py they are discoverable by py.test +# no matter how it is invoked within the astropy tree. + +from .tests.pytest_plugins import * + +enable_deprecations_as_exceptions() diff --git a/astropy/constants/__init__.py b/astropy/constants/__init__.py new file mode 100644 index 0000000..6920148 --- /dev/null +++ b/astropy/constants/__init__.py @@ -0,0 +1,54 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" +Contains astronomical and physical constants for use in Astropy or other +places. + +A typical use case might be:: + + >>> from astropy.constants import c, m_e + >>> # ... define the mass of something you want the rest energy of as m ... + >>> m = m_e + >>> E = m * c**2 + >>> E.to('MeV') # doctest: +FLOAT_CMP + + +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import itertools + +# Hack to make circular imports with units work +try: + from .. import units + del units +except ImportError: + pass + +from .constant import Constant, EMConstant +from . import si +from . import cgs + +# for updating the constants module docstring +_lines = [ + 'The following constants are available:\n', + '========== ============== ================ =========================', + ' Name Value Unit Description', + '========== ============== ================ =========================', +] + +for _nm, _c in itertools.chain(sorted(vars(si).items()), + sorted(vars(cgs).items())): + if isinstance(_c, Constant) and _c.abbrev not in locals(): + locals()[_c.abbrev] = _c.__class__(_c.abbrev, _c.name, _c.value, + _c._unit, _c.uncertainty, + _c.reference) + + _lines.append('{0:^10} {1:^14.9g} {2:^16} {3}'.format( + _c.abbrev, _c.value, _c._unit, _c.name)) + +_lines.append(_lines[1]) + +__doc__ += '\n'.join(_lines) + +del _lines, _nm, _c diff --git a/astropy/constants/cgs.py b/astropy/constants/cgs.py new file mode 100644 index 0000000..af31eae --- /dev/null +++ b/astropy/constants/cgs.py @@ -0,0 +1,28 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" +Astronomical and physics constants in cgs units. See :mod:`astropy.constants` +for a complete listing of constants defined in Astropy. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +# Only constants that cannot be converted directly from S.I. are defined here. + +from .constant import EMConstant +from . import si + +# PHYSICAL CONSTANTS + +# Electron charge +e_esu = EMConstant(si.e.abbrev, si.e.name, si.e.value * si.c.value * 10.0, + 'statC', si.e.uncertainty * si.c.value * 10.0, + si.e.reference, system='esu') + + +e_emu = EMConstant(si.e.abbrev, si.e.name, si.e.value / 10, 'abC', + si.e.uncertainty / 10, si.e.reference, system='emu') + + +e_gauss = EMConstant(si.e.abbrev, si.e.name, si.e.value * si.c.value * 10.0, + 'Fr', si.e.uncertainty * si.c.value * 10.0, + si.e.reference, system='gauss') diff --git a/astropy/constants/constant.py b/astropy/constants/constant.py new file mode 100644 index 0000000..c01590f --- /dev/null +++ b/astropy/constants/constant.py @@ -0,0 +1,220 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) +from ..extern import six + +import functools +import types +import warnings + +from ..units.core import Unit, UnitsError +from ..units.quantity import Quantity +from ..utils import lazyproperty +from ..utils.exceptions import AstropyUserWarning +from ..utils.misc import InheritDocstrings + +__all__ = ['Constant', 'EMConstant'] + + +class ConstantMeta(InheritDocstrings): + """Metaclass for the :class:`Constant`. The primary purpose of this is to + wrap the double-underscore methods of :class:`Quantity` which is the + superclass of :class:`Constant`. + + In particular this wraps the operator overloads such as `__add__` to + prevent their use with constants such as ``e`` from being used in + expressions without specifying a system. The wrapper checks to see if the + constant is listed (by name) in ``Constant._has_incompatible_units``, a set + of those constants that are defined in different systems of units are + physically incompatible. It also performs this check on each `Constant` if + it hasn't already been performed (the check is deferred until the + `Constant` is actually used in an expression to speed up import times, + among other reasons). + """ + + def __new__(mcls, name, bases, d): + def wrap(meth): + @functools.wraps(meth) + def wrapper(self, *args, **kwargs): + name_lower = self.name.lower() + instances = Constant._registry[name_lower] + if not self._checked_units: + for inst in six.itervalues(instances): + try: + self.unit.to(inst.unit) + except UnitsError: + Constant._has_incompatible_units.add(name_lower) + self._checked_units = True + + if (not self.system and + name_lower in Constant._has_incompatible_units): + systems = sorted([x for x in instances if x]) + raise TypeError( + 'Constant {0!r} does not have physically compatible ' + 'units across all systems of units and cannot be ' + 'combined with other values without specifying a ' + 'system (eg. {1}.{2})'.format(self.abbrev, self.abbrev, + systems[0])) + + return meth(self, *args, **kwargs) + + return wrapper + + # The wrapper applies to so many of the __ methods that it's easier to + # just exclude the ones it doesn't apply to + exclude = set(['__new__', '__array_finalize__', '__array_wrap__', + '__dir__', '__getattr__', '__init__', '__str__', + '__repr__', '__hash__', '__iter__', '__getitem__', + '__len__', '__nonzero__', '__quantity_subclass__']) + for attr, value in list(six.iteritems(vars(Quantity))): + if (isinstance(value, types.FunctionType) and + attr.startswith('__') and attr.endswith('__') and + attr not in exclude): + d[attr] = wrap(value) + + return super(ConstantMeta, mcls).__new__(mcls, name, bases, d) + + +@six.add_metaclass(ConstantMeta) +class Constant(Quantity): + """A physical or astronomical constant. + + These objects are quantities that are meant to represent physical + constants. + """ + _registry = {} + _has_incompatible_units = set() + + def __new__(cls, abbrev, name, value, unit, uncertainty, reference, + system=None): + name_lower = name.lower() + instances = Constant._registry.setdefault(name_lower, {}) + if system in instances: + warnings.warn('Constant {0!r} is already has a definition in the ' + '{1!r} system'.format(name, system), AstropyUserWarning) + + inst = super(Constant, cls).__new__(cls, value) + + for c in six.itervalues(instances): + if system is not None and not hasattr(c.__class__, system): + setattr(c, system, inst) + if c.system is not None and not hasattr(inst.__class__, c.system): + setattr(inst, c.system, c) + + instances[system] = inst + + return inst + + def __init__(self, abbrev, name, value, unit, uncertainty, reference, + system=None): + self._abbrev = abbrev + self._name = name + self._value = value + self._unit = unit + self._uncertainty = uncertainty + self._reference = reference + self._system = system + + self._checked_units = False + + def __repr__(self): + return (''.format(self.name, self.value, + self.uncertainty, str(self.unit), + self.reference)) + + def __str__(self): + return (' Name = {0}\n' + ' Value = {1}\n' + ' Error = {2}\n' + ' Units = {3}\n' + ' Reference = {4}'.format(self.name, self.value, + self.uncertainty, self.unit, + self.reference)) + + def __quantity_subclass__(self, unit): + return super(Constant, self).__quantity_subclass__(unit)[0], False + + def copy(self): + """ + Return a copy of this `Constant` instance. Since they are by + definition immutable, this merely returns another reference to + ``self``. + """ + return self + __deepcopy__ = __copy__ = copy + + @property + def abbrev(self): + """A typical ASCII text abbreviation of the constant, also generally + the same as the Python variable used for this constant. + """ + + return self._abbrev + + @property + def name(self): + """The full name of the constant.""" + + return self._name + + @lazyproperty + def unit(self): + """The unit(s) in which this constant is defined.""" + + return Unit(self._unit) + + @property + def uncertainty(self): + """The known uncertainty in this constant's value.""" + + return self._uncertainty + + @property + def reference(self): + """The source used for the value of this constant.""" + + return self._reference + + @property + def system(self): + """The system of units in which this constant is defined (typically + `None` so long as the constant's units can be directly converted + between systems). + """ + + return self._system + + @property + def si(self): + """If the Constant is defined in the SI system return that instance of + the constant, else convert to a Quantity in the appropriate SI units. + """ + + instances = Constant._registry[self.name.lower()] + return instances.get('si') or super(Constant, self).si + + @property + def cgs(self): + """If the Constant is defined in the CGS system return that instance of + the constant, else convert to a Quantity in the appropriate CGS units. + """ + + instances = Constant._registry[self.name.lower()] + return instances.get('cgs') or super(Constant, self).cgs + + +class EMConstant(Constant): + """An electromagnetic constant.""" + + @property + def cgs(self): + """Overridden for EMConstant to raise a `~.exceptions.TypeError` + emphasizing that there are multiple EM extensions to CGS. + """ + + raise TypeError("Cannot convert EM constants to cgs because there " + "are different systems for E.M constants within the " + "c.g.s system (ESU, Gaussian, etc.). Instead, " + "directly use the constant with the appropriate " + "suffix (e.g. e.esu, e.gauss, etc.).") diff --git a/astropy/constants/setup_package.py b/astropy/constants/setup_package.py new file mode 100644 index 0000000..3cd9f7c --- /dev/null +++ b/astropy/constants/setup_package.py @@ -0,0 +1,5 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + + +def requires_2to3(): + return False diff --git a/astropy/constants/si.py b/astropy/constants/si.py new file mode 100644 index 0000000..6f6f7dc --- /dev/null +++ b/astropy/constants/si.py @@ -0,0 +1,154 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" +Astronomical and physics constants in SI units. See :mod:`astropy.constants` +for a complete listing of constants defined in Astropy. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import numpy as np + +from .constant import Constant, EMConstant + +# PHYSICAL CONSTANTS + +# Planck constant +h = Constant('h', "Planck constant", 6.62606957e-34, 'J s', 0.00000029e-34, + 'CODATA 2010', system='si') + +# Reduced Planck constant +hbar = Constant('hbar', "Reduced Planck constant", h.value * 0.5 / np.pi, + 'J s', h.uncertainty * 0.5 / np.pi, h.reference, system='si') + +# Boltzmann constant +k_B = Constant('k_B', "Boltzmann constant", 1.3806488e-23, 'J / (K)', + 0.0000013e-23, 'CODATA 2010', system='si') + +# Speed of light +c = Constant('c', "Speed of light in vacuum", 2.99792458e8, 'm / (s)', 0., + 'CODATA 2010', system='si') + +# Gravitional constant +G = Constant('G', "Gravitational constant", 6.67384e-11, 'm3 / (kg s2)', + 0.00080e-11, 'CODATA 2010', system='si') + +# Standard acceleration of gravity +g0 = Constant('g0', "Standard acceleration of gravity", 9.80665, 'm / s2', 0.0, + 'CODATA 2010', system='si') + +# Proton mass +m_p = Constant('m_p', "Proton mass", 1.672621777e-27, 'kg', 0.000000074e-27, + 'CODATA 2010', system='si') + +# Neutron mass +m_n = Constant('m_n', "Neutron mass", 1.674927351e-27, 'kg', 0.000000074e-27, + 'CODATA 2010', system='si') + +# Electron mass +m_e = Constant('m_e', "Electron mass", 9.10938291e-31, 'kg', 0.00000040e-31, + 'CODATA 2010', system='si') + +# Atomic mass +u = Constant('u', "Atomic mass", 1.660538921e-27, 'kg', 0.000000073e-27, + 'CODATA 2010', system='si') + +# Stefan-Boltzmann constant +sigma_sb = Constant('sigma_sb', "Stefan-Boltzmann constant", 5.670373e-8, + 'W / (K4 m2)', 0.000021e-8, 'CODATA 2010', system='si') + +# Electron charge; EM constants require a system to be specified +e = EMConstant('e', 'Electron charge', 1.602176565e-19, 'C', 0.000000035e-19, + 'CODATA 2010', system='si') + +# Electric constant +eps0 = EMConstant('eps0', 'Electric constant', 8.854187817e-12, 'F/m', 0.0, + 'CODATA 2010', system='si') + +# Avogadro's number +N_A = Constant('N_A', "Avogadro's number", 6.02214129e23, '1 / (mol)', + 0.00000027e23, 'CODATA 2010', system='si') + +# Gas constant +R = Constant('R', "Gas constant", 8.3144621, 'J / (K mol)', 0.0000075, + 'CODATA 2010', system='si') + +# Rydberg constant +Ryd = Constant('Ryd', 'Rydberg constant', 10973731.568539, '1 / (m)', 0.000055, + 'CODATA 2010', system='si') + +# Bohr radius +a0 = Constant('a0', "Bohr radius", 0.52917721092e-10, 'm', 0.00000000017e-10, + 'CODATA 2010', system='si') + +# Bohr magneton +muB = Constant('muB', "Bohr magneton", 927.400968e-26, 'J/T', 0.00002e-26, + 'CODATA 2010', system='si') + +# Fine structure constant +alpha = Constant('alpha', "Fine-structure constant", 7.2973525698e-3, '', + 0.0000000024e-3, 'CODATA 2010', system='si') + +# Atmosphere +atm = Constant('atmosphere', "Atmosphere", 101325, 'Pa', 0.0, + 'CODATA 2010', system='si') + +# Magnetic constant +mu0 = Constant('mu0', "Magnetic constant", 4.0e-7 * np.pi, 'N/A2', 0.0, + 'CODATA 2010', system='si') + +# DISTANCE + +# Astronomical Unit +au = Constant('au', "Astronomical Unit", 1.49597870700e11, 'm', 0.0, + "IAU 2012 Resolution B2", system='si') + +# Parsec + +pc = Constant('pc', "Parsec", au.value / np.tan(np.radians(1. / 3600.)), 'm', + au.uncertainty / np.tan(np.radians(1. / 3600.)), + "Derived from au", system='si') + +# Kiloparsec +kpc = Constant('kpc', "Kiloparsec", + 1000. * au.value / np.tan(np.radians(1. / 3600.)), 'm', + 1000. * au.uncertainty / np.tan(np.radians(1. / 3600.)), + "Derived from au", system='si') + +# Wien wavelength displacement law constant +b_wien = Constant('b_wien', 'Wien wavelength displacement law constant', + 2.8977721e-3, 'm K', 0.0000026e-3, 'CODATA 2010', system='si') + +# SOLAR QUANTITIES + +# Solar luminosity +L_sun = Constant('L_sun', "Solar luminosity", 3.846e26, 'W', 0.0005e26, + "Allen's Astrophysical Quantities 4th Ed.", system='si') + +# Solar mass +M_sun = Constant('M_sun', "Solar mass", 1.9891e30, 'kg', 0.00005e30, + "Allen's Astrophysical Quantities 4th Ed.", system='si') + +# Solar radius +R_sun = Constant('R_sun', "Solar radius", 6.95508e8, 'm', 0.00026e8, + "Allen's Astrophysical Quantities 4th Ed.", system='si') + + +# OTHER SOLAR SYSTEM QUANTITIES + +# Jupiter mass +M_jup = Constant('M_jup', "Jupiter mass", 1.8987e27, 'kg', 0.00005e27, + "Allen's Astrophysical Quantities 4th Ed.", system='si') + +# Jupiter equatorial radius +R_jup = Constant('R_jup', "Jupiter equatorial radius", 7.1492e7, 'm', + 0.00005e7, "Allen's Astrophysical Quantities 4th Ed.", + system='si') + +# Earth mass +M_earth = Constant('M_earth', "Earth mass", 5.9742e24, 'kg', 0.00005e24, + "Allen's Astrophysical Quantities 4th Ed.", system='si') + +# Earth equatorial radius +R_earth = Constant('R_earth', "Earth equatorial radius", 6.378136e6, 'm', + 0.0000005e6, "Allen's Astrophysical Quantities 4th Ed.", + system='si') diff --git a/astropy/constants/tests/__init__.py b/astropy/constants/tests/__init__.py new file mode 100644 index 0000000..800d82e --- /dev/null +++ b/astropy/constants/tests/__init__.py @@ -0,0 +1,2 @@ +from __future__ import (absolute_import, division, print_function, + unicode_literals) diff --git a/astropy/constants/tests/test_constant.py b/astropy/constants/tests/test_constant.py new file mode 100644 index 0000000..8ce5c46 --- /dev/null +++ b/astropy/constants/tests/test_constant.py @@ -0,0 +1,127 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +# TEST_UNICODE_LITERALS + +from __future__ import (absolute_import, division, print_function, + unicode_literals) +from ...extern import six + +import copy + +from .. import Constant +from ...units import Quantity as Q +from ...tests.helper import pytest + + +def test_c(): + + from .. import c + + # c is an exactly defined constant, so it shouldn't be changing + assert c.value == 2.99792458e8 # default is S.I. + assert c.si.value == 2.99792458e8 + assert c.cgs.value == 2.99792458e10 + + # make sure it has the necessary attributes and they're not blank + assert c.uncertainty == 0 # c is a *defined* quantity + assert c.name + assert c.reference + assert c.unit + + +def test_h(): + + from .. import h + + # check that the value is fairly close to what it should be (not exactly + # checking because this might get updated in the future) + assert abs(h.value - 6.626e-34) < 1e-38 + assert abs(h.si.value - 6.626e-34) < 1e-38 + assert abs(h.cgs.value - 6.626e-27) < 1e-31 + + # make sure it has the necessary attributes and they're not blank + assert h.uncertainty + assert h.name + assert h.reference + assert h.unit + + +def test_e(): + """Tests for #572 demonstrating how EM constants should behave.""" + + from .. import e + + # A test quantity + E = Q(100, 'V/m') + + # Without specifying a system e should not combine with other quantities + pytest.raises(TypeError, lambda: e * E) + # Try it again (as regression test on a minor issue mentioned in #745 where + # repeated attempts to use e in an expression resulted in UnboundLocalError + # instead of TypeError) + pytest.raises(TypeError, lambda: e * E) + + # e.cgs is too ambiguous and should not work at all + pytest.raises(TypeError, lambda: e.cgs * E) + + assert isinstance(e.si, Q) + assert isinstance(e.gauss, Q) + assert isinstance(e.esu, Q) + + assert e.si * E == Q(100, 'eV/m') + assert e.gauss * E == Q(e.gauss.value * E.value, 'Fr V/m') + assert e.esu * E == Q(e.esu.value * E.value, 'Fr V/m') + + +def test_g0(): + """Tests for #1263 demonstrating how g0 constant should behave.""" + from .. import g0 + + # g0 is an exactly defined constant, so it shouldn't be changing + assert g0.value == 9.80665 # default is S.I. + assert g0.si.value == 9.80665 + assert g0.cgs.value == 9.80665e2 + + # make sure it has the necessary attributes and they're not blank + assert g0.uncertainty == 0 # g0 is a *defined* quantity + assert g0.name + assert g0.reference + assert g0.unit + + # Check that its unit have the correct physical type + assert g0.unit.physical_type == 'acceleration' + + +def test_b_wien(): + """b_wien should give the correct peak wavelength for + given blackbody temperature. The Sun is used in this test. + + """ + from .. import b_wien + from ... import units as u + t = 5778 * u.K + w = (b_wien / t).to(u.nm) + assert round(w.value) == 502 + + +def test_unit(): + + from ... import units as u + + from ... import constants as const + + for key, val in six.iteritems(vars(const)): + if isinstance(val, Constant): + # Getting the unit forces the unit parser to run. Confirm + # that none of the constants defined in astropy have + # invalid unit. + assert not isinstance(val.unit, u.UnrecognizedUnit) + + +def test_copy(): + from ... import constants as const + cc = copy.deepcopy(const.c) + assert cc == const.c + + cc = copy.copy(const.c) + assert cc == const.c diff --git a/astropy/convolution/__init__.py b/astropy/convolution/__init__.py new file mode 100644 index 0000000..a188e1d --- /dev/null +++ b/astropy/convolution/__init__.py @@ -0,0 +1,15 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from .core import * +from .kernels import * +from .utils import discretize_model + +try: + # Not guaranteed available at setup time + from .convolve import convolve, convolve_fft +except ImportError: + if not _ASTROPY_SETUP_: + raise diff --git a/astropy/convolution/boundary_extend.c b/astropy/convolution/boundary_extend.c new file mode 100644 index 0000000..9d49928 --- /dev/null +++ b/astropy/convolution/boundary_extend.c @@ -0,0 +1,8608 @@ +/* Generated by Cython 0.18 on Tue Sep 23 16:50:23 2014 */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02040000 + #error Cython requires Python 2.4+. +#else +#include /* For offsetof */ +#ifndef offsetof +#define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION +#define CYTHON_COMPILING_IN_PYPY 1 +#define CYTHON_COMPILING_IN_CPYTHON 0 +#else +#define CYTHON_COMPILING_IN_PYPY 0 +#define CYTHON_COMPILING_IN_CPYTHON 1 +#endif +#if PY_VERSION_HEX < 0x02050000 + typedef int Py_ssize_t; + #define PY_SSIZE_T_MAX INT_MAX + #define PY_SSIZE_T_MIN INT_MIN + #define PY_FORMAT_SIZE_T "" + #define CYTHON_FORMAT_SSIZE_T "" + #define PyInt_FromSsize_t(z) PyInt_FromLong(z) + #define PyInt_AsSsize_t(o) __Pyx_PyInt_AsInt(o) + #define PyNumber_Index(o) ((PyNumber_Check(o) && !PyFloat_Check(o)) ? PyNumber_Int(o) : \ + (PyErr_Format(PyExc_TypeError, \ + "expected index value, got %.200s", Py_TYPE(o)->tp_name), \ + (PyObject*)0)) + #define __Pyx_PyIndex_Check(o) (PyNumber_Check(o) && !PyFloat_Check(o) && \ + !PyComplex_Check(o)) + #define PyIndex_Check __Pyx_PyIndex_Check + #define PyErr_WarnEx(category, message, stacklevel) PyErr_Warn(category, message) + #define __PYX_BUILD_PY_SSIZE_T "i" +#else + #define __PYX_BUILD_PY_SSIZE_T "n" + #define CYTHON_FORMAT_SSIZE_T "z" + #define __Pyx_PyIndex_Check PyIndex_Check +#endif +#if PY_VERSION_HEX < 0x02060000 + #define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt) + #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) + #define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size) + #define PyVarObject_HEAD_INIT(type, size) \ + PyObject_HEAD_INIT(type) size, + #define PyType_Modified(t) + typedef struct { + void *buf; + PyObject *obj; + Py_ssize_t len; + Py_ssize_t itemsize; + int readonly; + int ndim; + char *format; + Py_ssize_t *shape; + Py_ssize_t *strides; + Py_ssize_t *suboffsets; + void *internal; + } Py_buffer; + #define PyBUF_SIMPLE 0 + #define PyBUF_WRITABLE 0x0001 + #define PyBUF_FORMAT 0x0004 + #define PyBUF_ND 0x0008 + #define PyBUF_STRIDES (0x0010 | PyBUF_ND) + #define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES) + #define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES) + #define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES) + #define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES) + #define PyBUF_RECORDS (PyBUF_STRIDES | PyBUF_FORMAT | PyBUF_WRITABLE) + #define PyBUF_FULL (PyBUF_INDIRECT | PyBUF_FORMAT | PyBUF_WRITABLE) + typedef int (*getbufferproc)(PyObject *, Py_buffer *, int); + typedef void (*releasebufferproc)(PyObject *, Py_buffer *); +#endif +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ + PyCode_New(a, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif +#if PY_MAJOR_VERSION < 3 && PY_MINOR_VERSION < 6 + #define PyUnicode_FromString(s) PyUnicode_Decode(s, strlen(s), "UTF-8", "strict") +#endif +#if PY_MAJOR_VERSION >= 3 + #define Py_TPFLAGS_CHECKTYPES 0 + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#if (PY_VERSION_HEX < 0x02060000) || (PY_MAJOR_VERSION >= 3) + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ? \ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) +#else + #define CYTHON_PEP393_ENABLED 0 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_READ(k, d, i) ((k=k), (Py_UCS4)(((Py_UNICODE*)d)[i])) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#endif +#if PY_VERSION_HEX < 0x02060000 + #define PyBytesObject PyStringObject + #define PyBytes_Type PyString_Type + #define PyBytes_Check PyString_Check + #define PyBytes_CheckExact PyString_CheckExact + #define PyBytes_FromString PyString_FromString + #define PyBytes_FromStringAndSize PyString_FromStringAndSize + #define PyBytes_FromFormat PyString_FromFormat + #define PyBytes_DecodeEscape PyString_DecodeEscape + #define PyBytes_AsString PyString_AsString + #define PyBytes_AsStringAndSize PyString_AsStringAndSize + #define PyBytes_Size PyString_Size + #define PyBytes_AS_STRING PyString_AS_STRING + #define PyBytes_GET_SIZE PyString_GET_SIZE + #define PyBytes_Repr PyString_Repr + #define PyBytes_Concat PyString_Concat + #define PyBytes_ConcatAndDel PyString_ConcatAndDel +#endif +#if PY_VERSION_HEX < 0x02060000 + #define PySet_Check(obj) PyObject_TypeCheck(obj, &PySet_Type) + #define PyFrozenSet_Check(obj) PyObject_TypeCheck(obj, &PyFrozenSet_Type) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_VERSION_HEX < 0x03020000 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if (PY_MAJOR_VERSION < 3) || (PY_VERSION_HEX >= 0x03010300) + #define __Pyx_PySequence_GetSlice(obj, a, b) PySequence_GetSlice(obj, a, b) + #define __Pyx_PySequence_SetSlice(obj, a, b, value) PySequence_SetSlice(obj, a, b, value) + #define __Pyx_PySequence_DelSlice(obj, a, b) PySequence_DelSlice(obj, a, b) +#else + #define __Pyx_PySequence_GetSlice(obj, a, b) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), (PyObject*)0) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_GetSlice(obj, a, b)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", (obj)->ob_type->tp_name), (PyObject*)0))) + #define __Pyx_PySequence_SetSlice(obj, a, b, value) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_SetSlice(obj, a, b, value)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice assignment", (obj)->ob_type->tp_name), -1))) + #define __Pyx_PySequence_DelSlice(obj, a, b) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_DelSlice(obj, a, b)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice deletion", (obj)->ob_type->tp_name), -1))) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) +#endif +#if PY_VERSION_HEX < 0x02050000 + #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),((char *)(n))) + #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),((char *)(n)),(a)) + #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),((char *)(n))) +#else + #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),(n)) + #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),(n),(a)) + #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),(n)) +#endif +#if PY_VERSION_HEX < 0x02050000 + #define __Pyx_NAMESTR(n) ((char *)(n)) + #define __Pyx_DOCSTR(n) ((char *)(n)) +#else + #define __Pyx_NAMESTR(n) (n) + #define __Pyx_DOCSTR(n) (n) +#endif + + +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#endif + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) +#define _USE_MATH_DEFINES +#endif +#include +#define __PYX_HAVE__astropy__convolution__boundary_extend +#define __PYX_HAVE_API__astropy__convolution__boundary_extend +#include "string.h" +#include "stdio.h" +#include "stdlib.h" +#include "numpy/arrayobject.h" +#include "numpy/ufuncobject.h" +#include "numpy/npy_math.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#ifdef PYREX_WITHOUT_ASSERTIONS +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +#ifndef CYTHON_INLINE + #if defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +typedef struct {PyObject **p; char *s; const long n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/ + +#define __Pyx_PyBytes_FromUString(s) PyBytes_FromString((char*)s) +#define __Pyx_PyBytes_AsUString(s) ((unsigned char*) PyBytes_AsString(s)) +#define __Pyx_Owned_Py_None(b) (Py_INCREF(Py_None), Py_None) +#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False)) +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x); +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject*); +#if CYTHON_COMPILING_IN_CPYTHON +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) + + +#ifdef __GNUC__ + /* Test for GCC > 2.95 */ + #if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) + #else /* __GNUC__ > 2 ... */ + #define likely(x) (x) + #define unlikely(x) (x) + #endif /* __GNUC__ > 2 ... */ +#else /* __GNUC__ */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ + +static PyObject *__pyx_m; +static PyObject *__pyx_b; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + +#if !defined(CYTHON_CCOMPLEX) + #if defined(__cplusplus) + #define CYTHON_CCOMPLEX 1 + #elif defined(_Complex_I) + #define CYTHON_CCOMPLEX 1 + #else + #define CYTHON_CCOMPLEX 0 + #endif +#endif +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #include + #else + #include + #endif +#endif +#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__) + #undef _Complex_I + #define _Complex_I 1.0fj +#endif + + +static const char *__pyx_f[] = { + "boundary_extend.pyx", + "numpy.pxd", + "type.pxd", +}; +#define IS_UNSIGNED(type) (((type) -1) > 0) +struct __Pyx_StructField_; +#define __PYX_BUF_FLAGS_PACKED_STRUCT (1 << 0) +typedef struct { + const char* name; /* for error messages only */ + struct __Pyx_StructField_* fields; + size_t size; /* sizeof(type) */ + size_t arraysize[8]; /* length of array in each dimension */ + int ndim; + char typegroup; /* _R_eal, _C_omplex, Signed _I_nt, _U_nsigned int, _S_truct, _P_ointer, _O_bject, c_H_ar */ + char is_unsigned; + int flags; +} __Pyx_TypeInfo; +typedef struct __Pyx_StructField_ { + __Pyx_TypeInfo* type; + const char* name; + size_t offset; +} __Pyx_StructField; +typedef struct { + __Pyx_StructField* field; + size_t parent_offset; +} __Pyx_BufFmt_StackElem; +typedef struct { + __Pyx_StructField root; + __Pyx_BufFmt_StackElem* head; + size_t fmt_offset; + size_t new_count, enc_count; + size_t struct_alignment; + int is_complex; + char enc_type; + char new_packmode; + char enc_packmode; + char is_valid_array; +} __Pyx_BufFmt_Context; + + +/* "numpy.pxd":723 + * # in Cython to enable them only on the right systems. + * + * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + */ +typedef npy_int8 __pyx_t_5numpy_int8_t; + +/* "numpy.pxd":724 + * + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t + */ +typedef npy_int16 __pyx_t_5numpy_int16_t; + +/* "numpy.pxd":725 + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< + * ctypedef npy_int64 int64_t + * #ctypedef npy_int96 int96_t + */ +typedef npy_int32 __pyx_t_5numpy_int32_t; + +/* "numpy.pxd":726 + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< + * #ctypedef npy_int96 int96_t + * #ctypedef npy_int128 int128_t + */ +typedef npy_int64 __pyx_t_5numpy_int64_t; + +/* "numpy.pxd":730 + * #ctypedef npy_int128 int128_t + * + * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + */ +typedef npy_uint8 __pyx_t_5numpy_uint8_t; + +/* "numpy.pxd":731 + * + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t + */ +typedef npy_uint16 __pyx_t_5numpy_uint16_t; + +/* "numpy.pxd":732 + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< + * ctypedef npy_uint64 uint64_t + * #ctypedef npy_uint96 uint96_t + */ +typedef npy_uint32 __pyx_t_5numpy_uint32_t; + +/* "numpy.pxd":733 + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< + * #ctypedef npy_uint96 uint96_t + * #ctypedef npy_uint128 uint128_t + */ +typedef npy_uint64 __pyx_t_5numpy_uint64_t; + +/* "numpy.pxd":737 + * #ctypedef npy_uint128 uint128_t + * + * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< + * ctypedef npy_float64 float64_t + * #ctypedef npy_float80 float80_t + */ +typedef npy_float32 __pyx_t_5numpy_float32_t; + +/* "numpy.pxd":738 + * + * ctypedef npy_float32 float32_t + * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< + * #ctypedef npy_float80 float80_t + * #ctypedef npy_float128 float128_t + */ +typedef npy_float64 __pyx_t_5numpy_float64_t; + +/* "numpy.pxd":747 + * # The int types are mapped a bit surprising -- + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t + */ +typedef npy_long __pyx_t_5numpy_int_t; + +/* "numpy.pxd":748 + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong longlong_t + * + */ +typedef npy_longlong __pyx_t_5numpy_long_t; + +/* "numpy.pxd":749 + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_ulong uint_t + */ +typedef npy_longlong __pyx_t_5numpy_longlong_t; + +/* "numpy.pxd":751 + * ctypedef npy_longlong longlong_t + * + * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t + */ +typedef npy_ulong __pyx_t_5numpy_uint_t; + +/* "numpy.pxd":752 + * + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulonglong_t + * + */ +typedef npy_ulonglong __pyx_t_5numpy_ulong_t; + +/* "numpy.pxd":753 + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_intp intp_t + */ +typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; + +/* "numpy.pxd":755 + * ctypedef npy_ulonglong ulonglong_t + * + * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< + * ctypedef npy_uintp uintp_t + * + */ +typedef npy_intp __pyx_t_5numpy_intp_t; + +/* "numpy.pxd":756 + * + * ctypedef npy_intp intp_t + * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< + * + * ctypedef npy_double float_t + */ +typedef npy_uintp __pyx_t_5numpy_uintp_t; + +/* "numpy.pxd":758 + * ctypedef npy_uintp uintp_t + * + * ctypedef npy_double float_t # <<<<<<<<<<<<<< + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t + */ +typedef npy_double __pyx_t_5numpy_float_t; + +/* "numpy.pxd":759 + * + * ctypedef npy_double float_t + * ctypedef npy_double double_t # <<<<<<<<<<<<<< + * ctypedef npy_longdouble longdouble_t + * + */ +typedef npy_double __pyx_t_5numpy_double_t; + +/* "numpy.pxd":760 + * ctypedef npy_double float_t + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cfloat cfloat_t + */ +typedef npy_longdouble __pyx_t_5numpy_longdouble_t; + +/* "astropy/convolution/boundary_extend.pyx":7 + * + * DTYPE = np.float + * ctypedef np.float_t DTYPE_t # <<<<<<<<<<<<<< + * + * cdef inline int int_max(int a, int b): return a if a >= b else b + */ +typedef __pyx_t_5numpy_float_t __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t; +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< float > __pyx_t_float_complex; + #else + typedef float _Complex __pyx_t_float_complex; + #endif +#else + typedef struct { float real, imag; } __pyx_t_float_complex; +#endif + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< double > __pyx_t_double_complex; + #else + typedef double _Complex __pyx_t_double_complex; + #endif +#else + typedef struct { double real, imag; } __pyx_t_double_complex; +#endif + + +/*--- Type declarations ---*/ + +/* "numpy.pxd":762 + * ctypedef npy_longdouble longdouble_t + * + * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t + */ +typedef npy_cfloat __pyx_t_5numpy_cfloat_t; + +/* "numpy.pxd":763 + * + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< + * ctypedef npy_clongdouble clongdouble_t + * + */ +typedef npy_cdouble __pyx_t_5numpy_cdouble_t; + +/* "numpy.pxd":764 + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cdouble complex_t + */ +typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; + +/* "numpy.pxd":766 + * ctypedef npy_clongdouble clongdouble_t + * + * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew1(a): + */ +typedef npy_cdouble __pyx_t_5numpy_complex_t; +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); /*proto*/ + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil) \ + if (acquire_gil) { \ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure(); \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \ + PyGILState_Release(__pyx_gilstate_save); \ + } else { \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil) \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext() \ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif /* CYTHON_REFNANNY */ +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/ + +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/ + +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /*proto*/ + +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[], \ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, \ + const char* function_name); /*proto*/ + +static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact); /*proto*/ + +static CYTHON_INLINE int __Pyx_GetBufferAndValidate(Py_buffer* buf, PyObject* obj, + __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack); +static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info); + +static CYTHON_INLINE long __Pyx_mod_long(long, long); /* proto */ + +static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb); /*proto*/ +static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ + +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /*proto*/ + +static CYTHON_INLINE long __Pyx_div_long(long, long); /* proto */ + +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/ + +#define __Pyx_BufPtrStrided1d(type, buf, i0, s0) (type)((char*)buf + i0 * s0) +#define __Pyx_BufPtrStrided2d(type, buf, i0, s0, i1, s1) (type)((char*)buf + i0 * s0 + i1 * s1) +#define __Pyx_BufPtrStrided3d(type, buf, i0, s0, i1, s1, i2, s2) (type)((char*)buf + i0 * s0 + i1 * s1 + i2 * s2) +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +static CYTHON_INLINE int __Pyx_IterFinish(void); /*proto*/ + +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); /*proto*/ + +typedef struct { + Py_ssize_t shape, strides, suboffsets; +} __Pyx_Buf_DimInfo; +typedef struct { + size_t refcount; + Py_buffer pybuffer; +} __Pyx_Buffer; +typedef struct { + __Pyx_Buffer *rcbuffer; + char *data; + __Pyx_Buf_DimInfo diminfo[8]; +} __Pyx_LocalBuf_ND; + +#if PY_MAJOR_VERSION < 3 + static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags); + static void __Pyx_ReleaseBuffer(Py_buffer *view); +#else + #define __Pyx_GetBuffer PyObject_GetBuffer + #define __Pyx_ReleaseBuffer PyBuffer_Release +#endif + + +static Py_ssize_t __Pyx_zeros[] = {0, 0, 0, 0, 0, 0, 0, 0}; +static Py_ssize_t __Pyx_minusones[] = {-1, -1, -1, -1, -1, -1, -1, -1}; + +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); /*proto*/ + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #define __Pyx_CREAL(z) ((z).real()) + #define __Pyx_CIMAG(z) ((z).imag()) + #else + #define __Pyx_CREAL(z) (__real__(z)) + #define __Pyx_CIMAG(z) (__imag__(z)) + #endif +#else + #define __Pyx_CREAL(z) ((z).real) + #define __Pyx_CIMAG(z) ((z).imag) +#endif +#if defined(_WIN32) && defined(__cplusplus) && CYTHON_CCOMPLEX + #define __Pyx_SET_CREAL(z,x) ((z).real(x)) + #define __Pyx_SET_CIMAG(z,y) ((z).imag(y)) +#else + #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x) + #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y) +#endif + +static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float, float); + +#if CYTHON_CCOMPLEX + #define __Pyx_c_eqf(a, b) ((a)==(b)) + #define __Pyx_c_sumf(a, b) ((a)+(b)) + #define __Pyx_c_difff(a, b) ((a)-(b)) + #define __Pyx_c_prodf(a, b) ((a)*(b)) + #define __Pyx_c_quotf(a, b) ((a)/(b)) + #define __Pyx_c_negf(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zerof(z) ((z)==(float)0) + #define __Pyx_c_conjf(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_absf(z) (::std::abs(z)) + #define __Pyx_c_powf(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zerof(z) ((z)==0) + #define __Pyx_c_conjf(z) (conjf(z)) + #if 1 + #define __Pyx_c_absf(z) (cabsf(z)) + #define __Pyx_c_powf(a, b) (cpowf(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eqf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sumf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_difff(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prodf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quotf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_negf(__pyx_t_float_complex); + static CYTHON_INLINE int __Pyx_c_is_zerof(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conjf(__pyx_t_float_complex); + #if 1 + static CYTHON_INLINE float __Pyx_c_absf(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_powf(__pyx_t_float_complex, __pyx_t_float_complex); + #endif +#endif + +static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double, double); + +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq(a, b) ((a)==(b)) + #define __Pyx_c_sum(a, b) ((a)+(b)) + #define __Pyx_c_diff(a, b) ((a)-(b)) + #define __Pyx_c_prod(a, b) ((a)*(b)) + #define __Pyx_c_quot(a, b) ((a)/(b)) + #define __Pyx_c_neg(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero(z) ((z)==(double)0) + #define __Pyx_c_conj(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs(z) (::std::abs(z)) + #define __Pyx_c_pow(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero(z) ((z)==0) + #define __Pyx_c_conj(z) (conj(z)) + #if 1 + #define __Pyx_c_abs(z) (cabs(z)) + #define __Pyx_c_pow(a, b) (cpow(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg(__pyx_t_double_complex); + static CYTHON_INLINE int __Pyx_c_is_zero(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj(__pyx_t_double_complex); + #if 1 + static CYTHON_INLINE double __Pyx_c_abs(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow(__pyx_t_double_complex, __pyx_t_double_complex); + #endif +#endif + +static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject *); + +static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject *); + +static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject *); + +static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject *); + +static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject *); + +static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject *); + +static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject *); + +static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject *); + +static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject *); + +static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject *); + +static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject *); + +static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject *); + +static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject *); + +static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject *); + +static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject *); + +static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject *); + +static int __Pyx_check_binary_version(void); + +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif +#endif + +static PyObject *__Pyx_ImportModule(const char *name); /*proto*/ + +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); /*proto*/ + +typedef struct { + int code_line; + PyCodeObject* code_object; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); /*proto*/ + +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/ + + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'libc.stdlib' */ + +/* Module declarations from 'numpy' */ + +/* Module declarations from 'numpy' */ +static PyTypeObject *__pyx_ptype_5numpy_dtype = 0; +static PyTypeObject *__pyx_ptype_5numpy_flatiter = 0; +static PyTypeObject *__pyx_ptype_5numpy_broadcast = 0; +static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0; +static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0; +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/ + +/* Module declarations from 'cython' */ + +/* Module declarations from 'astropy.convolution.boundary_extend' */ +static CYTHON_INLINE int __pyx_f_7astropy_11convolution_15boundary_extend_int_max(int, int); /*proto*/ +static CYTHON_INLINE int __pyx_f_7astropy_11convolution_15boundary_extend_int_min(int, int); /*proto*/ +static __Pyx_TypeInfo __Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t = { "DTYPE_t", NULL, sizeof(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t), { 0 }, 0, 'R', 0, 0 }; +#define __Pyx_MODULE_NAME "astropy.convolution.boundary_extend" +int __pyx_module_is_main_astropy__convolution__boundary_extend = 0; + +/* Implementation of 'astropy.convolution.boundary_extend' */ +static PyObject *__pyx_builtin_ValueError; +static PyObject *__pyx_builtin_range; +static PyObject *__pyx_builtin_RuntimeError; +static PyObject *__pyx_pf_7astropy_11convolution_15boundary_extend_convolve1d_boundary_extend(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g); /* proto */ +static PyObject *__pyx_pf_7astropy_11convolution_15boundary_extend_2convolve2d_boundary_extend(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g); /* proto */ +static PyObject *__pyx_pf_7astropy_11convolution_15boundary_extend_4convolve3d_boundary_extend(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g); /* proto */ +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */ +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */ +static char __pyx_k_1[] = "Convolution kernel must have odd dimensions"; +static char __pyx_k_5[] = "ndarray is not C contiguous"; +static char __pyx_k_7[] = "ndarray is not Fortran contiguous"; +static char __pyx_k_9[] = "Non-native byte order not supported"; +static char __pyx_k_11[] = "unknown dtype code in numpy.pxd (%d)"; +static char __pyx_k_12[] = "Format string allocated too short, see comment in numpy.pxd"; +static char __pyx_k_15[] = "Format string allocated too short."; +static char __pyx_k_19[] = "convolve1d_boundary_extend"; +static char __pyx_k_20[] = "/internal/1/root/src/astropy/astropy/astropy/convolution/boundary_extend.pyx"; +static char __pyx_k_21[] = "astropy.convolution.boundary_extend"; +static char __pyx_k_24[] = "convolve2d_boundary_extend"; +static char __pyx_k_27[] = "convolve3d_boundary_extend"; +static char __pyx_k__B[] = "B"; +static char __pyx_k__H[] = "H"; +static char __pyx_k__I[] = "I"; +static char __pyx_k__L[] = "L"; +static char __pyx_k__O[] = "O"; +static char __pyx_k__Q[] = "Q"; +static char __pyx_k__b[] = "b"; +static char __pyx_k__d[] = "d"; +static char __pyx_k__f[] = "f"; +static char __pyx_k__g[] = "g"; +static char __pyx_k__h[] = "h"; +static char __pyx_k__i[] = "i"; +static char __pyx_k__j[] = "j"; +static char __pyx_k__k[] = "k"; +static char __pyx_k__l[] = "l"; +static char __pyx_k__q[] = "q"; +static char __pyx_k__Zd[] = "Zd"; +static char __pyx_k__Zf[] = "Zf"; +static char __pyx_k__Zg[] = "Zg"; +static char __pyx_k__ii[] = "ii"; +static char __pyx_k__jj[] = "jj"; +static char __pyx_k__kk[] = "kk"; +static char __pyx_k__np[] = "np"; +static char __pyx_k__nx[] = "nx"; +static char __pyx_k__ny[] = "ny"; +static char __pyx_k__nz[] = "nz"; +static char __pyx_k__bot[] = "bot"; +static char __pyx_k__iii[] = "iii"; +static char __pyx_k__jjj[] = "jjj"; +static char __pyx_k__ker[] = "ker"; +static char __pyx_k__kkk[] = "kkk"; +static char __pyx_k__nkx[] = "nkx"; +static char __pyx_k__nky[] = "nky"; +static char __pyx_k__nkz[] = "nkz"; +static char __pyx_k__top[] = "top"; +static char __pyx_k__val[] = "val"; +static char __pyx_k__wkx[] = "wkx"; +static char __pyx_k__wky[] = "wky"; +static char __pyx_k__wkz[] = "wkz"; +static char __pyx_k__conv[] = "conv"; +static char __pyx_k__DTYPE[] = "DTYPE"; +static char __pyx_k__dtype[] = "dtype"; +static char __pyx_k__empty[] = "empty"; +static char __pyx_k__fixed[] = "fixed"; +static char __pyx_k__float[] = "float"; +static char __pyx_k__iimax[] = "iimax"; +static char __pyx_k__iimin[] = "iimin"; +static char __pyx_k__jjmax[] = "jjmax"; +static char __pyx_k__jjmin[] = "jjmin"; +static char __pyx_k__kkmax[] = "kkmax"; +static char __pyx_k__kkmin[] = "kkmin"; +static char __pyx_k__numpy[] = "numpy"; +static char __pyx_k__range[] = "range"; +static char __pyx_k____main__[] = "__main__"; +static char __pyx_k____test__[] = "__test__"; +static char __pyx_k__ValueError[] = "ValueError"; +static char __pyx_k__RuntimeError[] = "RuntimeError"; +static PyObject *__pyx_kp_s_1; +static PyObject *__pyx_kp_u_11; +static PyObject *__pyx_kp_u_12; +static PyObject *__pyx_kp_u_15; +static PyObject *__pyx_n_s_19; +static PyObject *__pyx_kp_s_20; +static PyObject *__pyx_n_s_21; +static PyObject *__pyx_n_s_24; +static PyObject *__pyx_n_s_27; +static PyObject *__pyx_kp_u_5; +static PyObject *__pyx_kp_u_7; +static PyObject *__pyx_kp_u_9; +static PyObject *__pyx_n_s__DTYPE; +static PyObject *__pyx_n_s__RuntimeError; +static PyObject *__pyx_n_s__ValueError; +static PyObject *__pyx_n_s____main__; +static PyObject *__pyx_n_s____test__; +static PyObject *__pyx_n_s__bot; +static PyObject *__pyx_n_s__conv; +static PyObject *__pyx_n_s__dtype; +static PyObject *__pyx_n_s__empty; +static PyObject *__pyx_n_s__f; +static PyObject *__pyx_n_s__fixed; +static PyObject *__pyx_n_s__float; +static PyObject *__pyx_n_s__g; +static PyObject *__pyx_n_s__i; +static PyObject *__pyx_n_s__ii; +static PyObject *__pyx_n_s__iii; +static PyObject *__pyx_n_s__iimax; +static PyObject *__pyx_n_s__iimin; +static PyObject *__pyx_n_s__j; +static PyObject *__pyx_n_s__jj; +static PyObject *__pyx_n_s__jjj; +static PyObject *__pyx_n_s__jjmax; +static PyObject *__pyx_n_s__jjmin; +static PyObject *__pyx_n_s__k; +static PyObject *__pyx_n_s__ker; +static PyObject *__pyx_n_s__kk; +static PyObject *__pyx_n_s__kkk; +static PyObject *__pyx_n_s__kkmax; +static PyObject *__pyx_n_s__kkmin; +static PyObject *__pyx_n_s__nkx; +static PyObject *__pyx_n_s__nky; +static PyObject *__pyx_n_s__nkz; +static PyObject *__pyx_n_s__np; +static PyObject *__pyx_n_s__numpy; +static PyObject *__pyx_n_s__nx; +static PyObject *__pyx_n_s__ny; +static PyObject *__pyx_n_s__nz; +static PyObject *__pyx_n_s__range; +static PyObject *__pyx_n_s__top; +static PyObject *__pyx_n_s__val; +static PyObject *__pyx_n_s__wkx; +static PyObject *__pyx_n_s__wky; +static PyObject *__pyx_n_s__wkz; +static PyObject *__pyx_int_15; +static PyObject *__pyx_k_tuple_2; +static PyObject *__pyx_k_tuple_3; +static PyObject *__pyx_k_tuple_4; +static PyObject *__pyx_k_tuple_6; +static PyObject *__pyx_k_tuple_8; +static PyObject *__pyx_k_tuple_10; +static PyObject *__pyx_k_tuple_13; +static PyObject *__pyx_k_tuple_14; +static PyObject *__pyx_k_tuple_16; +static PyObject *__pyx_k_tuple_17; +static PyObject *__pyx_k_tuple_22; +static PyObject *__pyx_k_tuple_25; +static PyObject *__pyx_k_codeobj_18; +static PyObject *__pyx_k_codeobj_23; +static PyObject *__pyx_k_codeobj_26; + +/* "astropy/convolution/boundary_extend.pyx":9 + * ctypedef np.float_t DTYPE_t + * + * cdef inline int int_max(int a, int b): return a if a >= b else b # <<<<<<<<<<<<<< + * cdef inline int int_min(int a, int b): return a if a <= b else b + * + */ + +static CYTHON_INLINE int __pyx_f_7astropy_11convolution_15boundary_extend_int_max(int __pyx_v_a, int __pyx_v_b) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("int_max", 0); + if ((__pyx_v_a >= __pyx_v_b)) { + __pyx_t_1 = __pyx_v_a; + } else { + __pyx_t_1 = __pyx_v_b; + } + __pyx_r = __pyx_t_1; + goto __pyx_L0; + + __pyx_r = 0; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_extend.pyx":10 + * + * cdef inline int int_max(int a, int b): return a if a >= b else b + * cdef inline int int_min(int a, int b): return a if a <= b else b # <<<<<<<<<<<<<< + * + * cdef extern from "numpy/npy_math.h": + */ + +static CYTHON_INLINE int __pyx_f_7astropy_11convolution_15boundary_extend_int_min(int __pyx_v_a, int __pyx_v_b) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("int_min", 0); + if ((__pyx_v_a <= __pyx_v_b)) { + __pyx_t_1 = __pyx_v_a; + } else { + __pyx_t_1 = __pyx_v_b; + } + __pyx_r = __pyx_t_1; + goto __pyx_L0; + + __pyx_r = 0; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_15boundary_extend_1convolve1d_boundary_extend(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_15boundary_extend_1convolve1d_boundary_extend = {__Pyx_NAMESTR("convolve1d_boundary_extend"), (PyCFunction)__pyx_pw_7astropy_11convolution_15boundary_extend_1convolve1d_boundary_extend, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_15boundary_extend_1convolve1d_boundary_extend(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve1d_boundary_extend (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve1d_boundary_extend", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve1d_boundary_extend") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve1d_boundary_extend", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_extend.convolve1d_boundary_extend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_15boundary_extend_convolve1d_boundary_extend(__pyx_self, __pyx_v_f, __pyx_v_g); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_extend.pyx":19 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_extend(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g): + * + */ + +static PyObject *__pyx_pf_7astropy_11convolution_15boundary_extend_convolve1d_boundary_extend(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g) { + int __pyx_v_nx; + int __pyx_v_nkx; + int __pyx_v_wkx; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + unsigned int __pyx_v_iii; + int __pyx_v_ii; + int __pyx_v_iimin; + int __pyx_v_iimax; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyArrayObject *__pyx_t_8 = NULL; + PyArrayObject *__pyx_t_9 = NULL; + int __pyx_t_10; + unsigned int __pyx_t_11; + unsigned int __pyx_t_12; + int __pyx_t_13; + int __pyx_t_14; + unsigned int __pyx_t_15; + unsigned int __pyx_t_16; + unsigned int __pyx_t_17; + unsigned int __pyx_t_18; + unsigned int __pyx_t_19; + unsigned int __pyx_t_20; + unsigned int __pyx_t_21; + unsigned int __pyx_t_22; + unsigned int __pyx_t_23; + unsigned int __pyx_t_24; + unsigned int __pyx_t_25; + unsigned int __pyx_t_26; + unsigned int __pyx_t_27; + unsigned int __pyx_t_28; + unsigned int __pyx_t_29; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve1d_boundary_extend", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; + + /* "astropy/convolution/boundary_extend.pyx":22 + * np.ndarray[DTYPE_t, ndim=1] g): + * + * if g.shape[0] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_extend.pyx":23 + * + * if g.shape[0] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_2 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_2), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __Pyx_Raise(__pyx_t_2, 0, 0, 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_extend.pyx":25 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_2 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_2, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_1) { + __pyx_t_4 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = PyObject_RichCompare(__pyx_t_4, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_5 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_6 = __pyx_t_5; + } else { + __pyx_t_6 = __pyx_t_1; + } + if (unlikely(!__pyx_t_6)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_extend.pyx":27 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_extend.pyx":28 + * + * cdef int nx = f.shape[0] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_extend.pyx":29 + * cdef int nx = f.shape[0] + * cdef int nkx = g.shape[0] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_extend.pyx":30 + * cdef int nkx = g.shape[0] + * cdef int wkx = nkx // 2 + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) + * cdef unsigned int i, iii + */ + __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__empty); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = PyList_New(1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_4, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __pyx_t_2 = 0; + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_t_4)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_4)); + __pyx_t_4 = 0; + __pyx_t_4 = PyDict_New(); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_4)); + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_t_4, ((PyObject *)__pyx_n_s__dtype), __pyx_t_7) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), ((PyObject *)__pyx_t_4)); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; + if (!(likely(((__pyx_t_7) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_7, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_8 = ((PyArrayObject *)__pyx_t_7); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_8, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 1, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; + } + } + __pyx_t_8 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "astropy/convolution/boundary_extend.pyx":31 + * cdef int wkx = nkx // 2 + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef unsigned int i, iii + * cdef int ii + */ + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = PyObject_GetAttr(__pyx_t_7, __pyx_n_s__empty); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + __pyx_t_7 = 0; + __pyx_t_7 = PyTuple_New(1); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_7, 0, ((PyObject *)__pyx_t_2)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_2)); + __pyx_t_2 = 0; + __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_2)); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_2, ((PyObject *)__pyx_n_s__dtype), __pyx_t_3) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_7), ((PyObject *)__pyx_t_2)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_7)); __pyx_t_7 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_9 = ((PyArrayObject *)__pyx_t_3); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_9, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 1, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; + } + } + __pyx_t_9 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "astropy/convolution/boundary_extend.pyx":41 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * if npy_isnan(f[i]): + * top = 0. + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_extend.pyx":42 + * # neighboring values + * for i in range(nx): + * if npy_isnan(f[i]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_12 = __pyx_v_i; + __pyx_t_6 = npy_isnan((*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_12, __pyx_pybuffernd_f.diminfo[0].strides))); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_extend.pyx":43 + * for i in range(nx): + * if npy_isnan(f[i]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_extend.pyx":44 + * if npy_isnan(f[i]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_extend.pyx":45 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_extend.pyx":46 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * iii = int_min(int_max(ii, 0), nx - 1) + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_extend.pyx":47 + * iimin = i - wkx + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * iii = int_min(int_max(ii, 0), nx - 1) + * val = f[iii] + */ + __pyx_t_13 = __pyx_v_iimax; + for (__pyx_t_14 = __pyx_v_iimin; __pyx_t_14 < __pyx_t_13; __pyx_t_14+=1) { + __pyx_v_ii = __pyx_t_14; + + /* "astropy/convolution/boundary_extend.pyx":48 + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + * iii = int_min(int_max(ii, 0), nx - 1) # <<<<<<<<<<<<<< + * val = f[iii] + * if not npy_isnan(val): + */ + __pyx_v_iii = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_ii, 0), (__pyx_v_nx - 1)); + + /* "astropy/convolution/boundary_extend.pyx":49 + * for ii in range(iimin, iimax): + * iii = int_min(int_max(ii, 0), nx - 1) + * val = f[iii] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] + */ + __pyx_t_15 = __pyx_v_iii; + __pyx_v_val = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_15, __pyx_pybuffernd_f.diminfo[0].strides)); + + /* "astropy/convolution/boundary_extend.pyx":50 + * iii = int_min(int_max(ii, 0), nx - 1) + * val = f[iii] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i)] + * top += val * ker + */ + __pyx_t_6 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_extend.pyx":51 + * val = f[iii] + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_16 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_v_ker = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_16, __pyx_pybuffernd_g.diminfo[0].strides)); + + /* "astropy/convolution/boundary_extend.pyx":52 + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_extend.pyx":53 + * ker = g[(wkx + ii - i)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * + * if bot != 0.: + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L9; + } + __pyx_L9:; + } + + /* "astropy/convolution/boundary_extend.pyx":55 + * bot += ker + * + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i] = top / bot + * else: + */ + __pyx_t_6 = (__pyx_v_bot != 0.); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_extend.pyx":56 + * + * if bot != 0.: + * fixed[i] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i] = f[i] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 56; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_17 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_17, __pyx_pybuffernd_fixed.diminfo[0].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L10; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":58 + * fixed[i] = top / bot + * else: + * fixed[i] = f[i] # <<<<<<<<<<<<<< + * else: + * fixed[i] = f[i] + */ + __pyx_t_18 = __pyx_v_i; + __pyx_t_19 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_19, __pyx_pybuffernd_fixed.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_18, __pyx_pybuffernd_f.diminfo[0].strides)); + } + __pyx_L10:; + goto __pyx_L6; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":60 + * fixed[i] = f[i] + * else: + * fixed[i] = f[i] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_20 = __pyx_v_i; + __pyx_t_21 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_21, __pyx_pybuffernd_fixed.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_20, __pyx_pybuffernd_f.diminfo[0].strides)); + } + __pyx_L6:; + } + + /* "astropy/convolution/boundary_extend.pyx":63 + * + * # Now run the proper convolution + * for i in range(nx): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i]): + * top = 0. + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_extend.pyx":64 + * # Now run the proper convolution + * for i in range(nx): + * if not npy_isnan(fixed[i]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_22 = __pyx_v_i; + __pyx_t_6 = (!npy_isnan((*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_22, __pyx_pybuffernd_fixed.diminfo[0].strides)))); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_extend.pyx":65 + * for i in range(nx): + * if not npy_isnan(fixed[i]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_extend.pyx":66 + * if not npy_isnan(fixed[i]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_extend.pyx":67 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_extend.pyx":68 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * iii = int_min(int_max(ii, 0), nx - 1) + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_extend.pyx":69 + * iimin = i - wkx + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * iii = int_min(int_max(ii, 0), nx - 1) + * val = fixed[iii] + */ + __pyx_t_13 = __pyx_v_iimax; + for (__pyx_t_14 = __pyx_v_iimin; __pyx_t_14 < __pyx_t_13; __pyx_t_14+=1) { + __pyx_v_ii = __pyx_t_14; + + /* "astropy/convolution/boundary_extend.pyx":70 + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + * iii = int_min(int_max(ii, 0), nx - 1) # <<<<<<<<<<<<<< + * val = fixed[iii] + * ker = g[(wkx + ii - i)] + */ + __pyx_v_iii = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_ii, 0), (__pyx_v_nx - 1)); + + /* "astropy/convolution/boundary_extend.pyx":71 + * for ii in range(iimin, iimax): + * iii = int_min(int_max(ii, 0), nx - 1) + * val = fixed[iii] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): + */ + __pyx_t_23 = __pyx_v_iii; + __pyx_v_val = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_23, __pyx_pybuffernd_fixed.diminfo[0].strides)); + + /* "astropy/convolution/boundary_extend.pyx":72 + * iii = int_min(int_max(ii, 0), nx - 1) + * val = fixed[iii] + * ker = g[(wkx + ii - i)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_24 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_v_ker = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_24, __pyx_pybuffernd_g.diminfo[0].strides)); + + /* "astropy/convolution/boundary_extend.pyx":73 + * val = fixed[iii] + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_6 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_extend.pyx":74 + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_extend.pyx":75 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L16; + } + __pyx_L16:; + } + + /* "astropy/convolution/boundary_extend.pyx":76 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i] = top / bot + * else: + */ + __pyx_t_6 = (__pyx_v_bot != 0.0); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_extend.pyx":77 + * bot += ker + * if bot != 0: + * conv[i] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i] = fixed[i] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 77; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_25 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_25, __pyx_pybuffernd_conv.diminfo[0].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L17; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":79 + * conv[i] = top / bot + * else: + * conv[i] = fixed[i] # <<<<<<<<<<<<<< + * else: + * conv[i] = fixed[i] + */ + __pyx_t_26 = __pyx_v_i; + __pyx_t_27 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_27, __pyx_pybuffernd_conv.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_26, __pyx_pybuffernd_fixed.diminfo[0].strides)); + } + __pyx_L17:; + goto __pyx_L13; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":81 + * conv[i] = fixed[i] + * else: + * conv[i] = fixed[i] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_28 = __pyx_v_i; + __pyx_t_29 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_29, __pyx_pybuffernd_conv.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_28, __pyx_pybuffernd_fixed.diminfo[0].strides)); + } + __pyx_L13:; + } + + /* "astropy/convolution/boundary_extend.pyx":83 + * conv[i] = fixed[i] + * + * return conv # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_7); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_extend.convolve1d_boundary_extend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_15boundary_extend_3convolve2d_boundary_extend(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_15boundary_extend_3convolve2d_boundary_extend = {__Pyx_NAMESTR("convolve2d_boundary_extend"), (PyCFunction)__pyx_pw_7astropy_11convolution_15boundary_extend_3convolve2d_boundary_extend, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_15boundary_extend_3convolve2d_boundary_extend(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve2d_boundary_extend (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve2d_boundary_extend", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 87; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve2d_boundary_extend") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 87; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve2d_boundary_extend", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 87; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_extend.convolve2d_boundary_extend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 87; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_15boundary_extend_2convolve2d_boundary_extend(__pyx_self, __pyx_v_f, __pyx_v_g); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_extend.pyx":87 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_extend(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g): + * + */ + +static PyObject *__pyx_pf_7astropy_11convolution_15boundary_extend_2convolve2d_boundary_extend(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g) { + int __pyx_v_nx; + int __pyx_v_ny; + int __pyx_v_nkx; + int __pyx_v_nky; + int __pyx_v_wkx; + int __pyx_v_wky; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + unsigned int __pyx_v_j; + unsigned int __pyx_v_iii; + unsigned int __pyx_v_jjj; + int __pyx_v_ii; + int __pyx_v_jj; + int __pyx_v_iimin; + int __pyx_v_iimax; + int __pyx_v_jjmin; + int __pyx_v_jjmax; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyArrayObject *__pyx_t_8 = NULL; + PyArrayObject *__pyx_t_9 = NULL; + int __pyx_t_10; + unsigned int __pyx_t_11; + int __pyx_t_12; + unsigned int __pyx_t_13; + unsigned int __pyx_t_14; + unsigned int __pyx_t_15; + int __pyx_t_16; + int __pyx_t_17; + int __pyx_t_18; + int __pyx_t_19; + unsigned int __pyx_t_20; + unsigned int __pyx_t_21; + unsigned int __pyx_t_22; + unsigned int __pyx_t_23; + unsigned int __pyx_t_24; + unsigned int __pyx_t_25; + unsigned int __pyx_t_26; + unsigned int __pyx_t_27; + unsigned int __pyx_t_28; + unsigned int __pyx_t_29; + unsigned int __pyx_t_30; + unsigned int __pyx_t_31; + unsigned int __pyx_t_32; + unsigned int __pyx_t_33; + unsigned int __pyx_t_34; + unsigned int __pyx_t_35; + unsigned int __pyx_t_36; + unsigned int __pyx_t_37; + unsigned int __pyx_t_38; + unsigned int __pyx_t_39; + unsigned int __pyx_t_40; + unsigned int __pyx_t_41; + unsigned int __pyx_t_42; + unsigned int __pyx_t_43; + unsigned int __pyx_t_44; + unsigned int __pyx_t_45; + unsigned int __pyx_t_46; + unsigned int __pyx_t_47; + unsigned int __pyx_t_48; + unsigned int __pyx_t_49; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve2d_boundary_extend", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 2, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 87; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_f.diminfo[1].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_f.diminfo[1].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[1]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 2, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 87; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_g.diminfo[1].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_g.diminfo[1].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[1]; + + /* "astropy/convolution/boundary_extend.pyx":90 + * np.ndarray[DTYPE_t, ndim=2] g): + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (!__pyx_t_1) { + __pyx_t_2 = (__Pyx_mod_long((__pyx_v_g->dimensions[1]), 2) != 1); + __pyx_t_3 = __pyx_t_2; + } else { + __pyx_t_3 = __pyx_t_1; + } + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_extend.pyx":91 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_3), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 91; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 91; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_extend.pyx":93 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_4 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyObject_RichCompare(__pyx_t_4, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__pyx_t_3) { + __pyx_t_6 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_6, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = __pyx_t_1; + } else { + __pyx_t_2 = __pyx_t_3; + } + if (unlikely(!__pyx_t_2)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_extend.pyx":95 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_extend.pyx":96 + * + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + */ + __pyx_v_ny = (__pyx_v_f->dimensions[1]); + + /* "astropy/convolution/boundary_extend.pyx":97 + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_extend.pyx":98 + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + */ + __pyx_v_nky = (__pyx_v_g->dimensions[1]); + + /* "astropy/convolution/boundary_extend.pyx":99 + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * cdef int wky = nky // 2 + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_extend.pyx":100 + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) + */ + __pyx_v_wky = __Pyx_div_long(__pyx_v_nky, 2); + + /* "astropy/convolution/boundary_extend.pyx":101 + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) + * cdef unsigned int i, j, iii, jjj + */ + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = PyObject_GetAttr(__pyx_t_4, __pyx_n_s__empty); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PyList_New(2); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_7, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_7, 1, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_4 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_7)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_7)); + __pyx_t_7 = 0; + __pyx_t_7 = PyDict_New(); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_7)); + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + if (PyDict_SetItem(__pyx_t_7, ((PyObject *)__pyx_n_s__dtype), __pyx_t_4) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyObject_Call(__pyx_t_5, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_7)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_7)); __pyx_t_7 = 0; + if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_8 = ((PyArrayObject *)__pyx_t_4); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_8, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 2, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_fixed.diminfo[1].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_fixed.diminfo[1].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[1]; + } + } + __pyx_t_8 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "astropy/convolution/boundary_extend.pyx":102 + * cdef int wky = nky // 2 + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef unsigned int i, j, iii, jjj + * cdef int ii, jj + */ + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = PyObject_GetAttr(__pyx_t_4, __pyx_n_s__empty); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyList_New(2); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_5, 1, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_4 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyDict_New(); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + if (PyDict_SetItem(__pyx_t_5, ((PyObject *)__pyx_n_s__dtype), __pyx_t_4) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyObject_Call(__pyx_t_7, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_5)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0; + if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_9 = ((PyArrayObject *)__pyx_t_4); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_9, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 2, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 102; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_conv.diminfo[1].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_conv.diminfo[1].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[1]; + } + } + __pyx_t_9 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "astropy/convolution/boundary_extend.pyx":112 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * if npy_isnan(f[i, j]): + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_extend.pyx":113 + * # neighboring values + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * if npy_isnan(f[i, j]): + * top = 0. + */ + __pyx_t_12 = __pyx_v_ny; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_j = __pyx_t_13; + + /* "astropy/convolution/boundary_extend.pyx":114 + * for i in range(nx): + * for j in range(ny): + * if npy_isnan(f[i, j]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_14 = __pyx_v_i; + __pyx_t_15 = __pyx_v_j; + __pyx_t_2 = npy_isnan((*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_14, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_15, __pyx_pybuffernd_f.diminfo[1].strides))); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_extend.pyx":115 + * for j in range(ny): + * if npy_isnan(f[i, j]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_extend.pyx":116 + * if npy_isnan(f[i, j]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_extend.pyx":117 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_extend.pyx":118 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_extend.pyx":119 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_extend.pyx":120 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_extend.pyx":121 + * jjmin = j - wky + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * iii = int_min(int_max(ii, 0), nx - 1) + */ + __pyx_t_16 = __pyx_v_iimax; + for (__pyx_t_17 = __pyx_v_iimin; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_ii = __pyx_t_17; + + /* "astropy/convolution/boundary_extend.pyx":122 + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) + */ + __pyx_t_18 = __pyx_v_jjmax; + for (__pyx_t_19 = __pyx_v_jjmin; __pyx_t_19 < __pyx_t_18; __pyx_t_19+=1) { + __pyx_v_jj = __pyx_t_19; + + /* "astropy/convolution/boundary_extend.pyx":123 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * iii = int_min(int_max(ii, 0), nx - 1) # <<<<<<<<<<<<<< + * jjj = int_min(int_max(jj, 0), ny - 1) + * val = f[iii, jjj] + */ + __pyx_v_iii = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_ii, 0), (__pyx_v_nx - 1)); + + /* "astropy/convolution/boundary_extend.pyx":124 + * for jj in range(jjmin, jjmax): + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) # <<<<<<<<<<<<<< + * val = f[iii, jjj] + * if not npy_isnan(val): + */ + __pyx_v_jjj = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_jj, 0), (__pyx_v_ny - 1)); + + /* "astropy/convolution/boundary_extend.pyx":125 + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) + * val = f[iii, jjj] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + */ + __pyx_t_20 = __pyx_v_iii; + __pyx_t_21 = __pyx_v_jjj; + __pyx_v_val = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_20, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_21, __pyx_pybuffernd_f.diminfo[1].strides)); + + /* "astropy/convolution/boundary_extend.pyx":126 + * jjj = int_min(int_max(jj, 0), ny - 1) + * val = f[iii, jjj] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + */ + __pyx_t_2 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_extend.pyx":128 + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + * (wky + jj - j)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_22 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_23 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_v_ker = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_22, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_23, __pyx_pybuffernd_g.diminfo[1].strides)); + + /* "astropy/convolution/boundary_extend.pyx":129 + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_extend.pyx":130 + * (wky + jj - j)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * + * if bot != 0.: + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L13; + } + __pyx_L13:; + } + } + + /* "astropy/convolution/boundary_extend.pyx":132 + * bot += ker + * + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i, j] = top / bot + * else: + */ + __pyx_t_2 = (__pyx_v_bot != 0.); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_extend.pyx":133 + * + * if bot != 0.: + * fixed[i, j] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i, j] = f[i, j] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 133; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_24 = __pyx_v_i; + __pyx_t_25 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_24, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_25, __pyx_pybuffernd_fixed.diminfo[1].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L14; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":135 + * fixed[i, j] = top / bot + * else: + * fixed[i, j] = f[i, j] # <<<<<<<<<<<<<< + * else: + * fixed[i, j] = f[i, j] + */ + __pyx_t_26 = __pyx_v_i; + __pyx_t_27 = __pyx_v_j; + __pyx_t_28 = __pyx_v_i; + __pyx_t_29 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_28, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_29, __pyx_pybuffernd_fixed.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_26, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_27, __pyx_pybuffernd_f.diminfo[1].strides)); + } + __pyx_L14:; + goto __pyx_L8; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":137 + * fixed[i, j] = f[i, j] + * else: + * fixed[i, j] = f[i, j] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_30 = __pyx_v_i; + __pyx_t_31 = __pyx_v_j; + __pyx_t_32 = __pyx_v_i; + __pyx_t_33 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_32, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_33, __pyx_pybuffernd_fixed.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_30, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_31, __pyx_pybuffernd_f.diminfo[1].strides)); + } + __pyx_L8:; + } + } + + /* "astropy/convolution/boundary_extend.pyx":140 + * + * # Now run the proper convolution + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * if not npy_isnan(fixed[i, j]): + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_extend.pyx":141 + * # Now run the proper convolution + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i, j]): + * top = 0. + */ + __pyx_t_12 = __pyx_v_ny; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_j = __pyx_t_13; + + /* "astropy/convolution/boundary_extend.pyx":142 + * for i in range(nx): + * for j in range(ny): + * if not npy_isnan(fixed[i, j]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_34 = __pyx_v_i; + __pyx_t_35 = __pyx_v_j; + __pyx_t_2 = (!npy_isnan((*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_34, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_35, __pyx_pybuffernd_fixed.diminfo[1].strides)))); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_extend.pyx":143 + * for j in range(ny): + * if not npy_isnan(fixed[i, j]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_extend.pyx":144 + * if not npy_isnan(fixed[i, j]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_extend.pyx":145 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_extend.pyx":146 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_extend.pyx":147 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_extend.pyx":148 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_extend.pyx":149 + * jjmin = j - wky + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * iii = int_min(int_max(ii, 0), nx - 1) + */ + __pyx_t_16 = __pyx_v_iimax; + for (__pyx_t_17 = __pyx_v_iimin; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_ii = __pyx_t_17; + + /* "astropy/convolution/boundary_extend.pyx":150 + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) + */ + __pyx_t_18 = __pyx_v_jjmax; + for (__pyx_t_19 = __pyx_v_jjmin; __pyx_t_19 < __pyx_t_18; __pyx_t_19+=1) { + __pyx_v_jj = __pyx_t_19; + + /* "astropy/convolution/boundary_extend.pyx":151 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * iii = int_min(int_max(ii, 0), nx - 1) # <<<<<<<<<<<<<< + * jjj = int_min(int_max(jj, 0), ny - 1) + * val = fixed[iii, jjj] + */ + __pyx_v_iii = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_ii, 0), (__pyx_v_nx - 1)); + + /* "astropy/convolution/boundary_extend.pyx":152 + * for jj in range(jjmin, jjmax): + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) # <<<<<<<<<<<<<< + * val = fixed[iii, jjj] + * ker = g[(wkx + ii - i), + */ + __pyx_v_jjj = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_jj, 0), (__pyx_v_ny - 1)); + + /* "astropy/convolution/boundary_extend.pyx":153 + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) + * val = fixed[iii, jjj] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + */ + __pyx_t_36 = __pyx_v_iii; + __pyx_t_37 = __pyx_v_jjj; + __pyx_v_val = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_36, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_37, __pyx_pybuffernd_fixed.diminfo[1].strides)); + + /* "astropy/convolution/boundary_extend.pyx":155 + * val = fixed[iii, jjj] + * ker = g[(wkx + ii - i), + * (wky + jj - j)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_38 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_39 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_v_ker = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_38, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_39, __pyx_pybuffernd_g.diminfo[1].strides)); + + /* "astropy/convolution/boundary_extend.pyx":156 + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_2 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_extend.pyx":157 + * (wky + jj - j)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_extend.pyx":158 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i, j] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L24; + } + __pyx_L24:; + } + } + + /* "astropy/convolution/boundary_extend.pyx":159 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i, j] = top / bot + * else: + */ + __pyx_t_2 = (__pyx_v_bot != 0.0); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_extend.pyx":160 + * bot += ker + * if bot != 0: + * conv[i, j] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i, j] = fixed[i, j] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 160; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_40 = __pyx_v_i; + __pyx_t_41 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_40, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_41, __pyx_pybuffernd_conv.diminfo[1].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L25; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":162 + * conv[i, j] = top / bot + * else: + * conv[i, j] = fixed[i, j] # <<<<<<<<<<<<<< + * else: + * conv[i, j] = fixed[i, j] + */ + __pyx_t_42 = __pyx_v_i; + __pyx_t_43 = __pyx_v_j; + __pyx_t_44 = __pyx_v_i; + __pyx_t_45 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_44, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_45, __pyx_pybuffernd_conv.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_42, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_43, __pyx_pybuffernd_fixed.diminfo[1].strides)); + } + __pyx_L25:; + goto __pyx_L19; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":164 + * conv[i, j] = fixed[i, j] + * else: + * conv[i, j] = fixed[i, j] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_46 = __pyx_v_i; + __pyx_t_47 = __pyx_v_j; + __pyx_t_48 = __pyx_v_i; + __pyx_t_49 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_48, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_49, __pyx_pybuffernd_conv.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_46, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_47, __pyx_pybuffernd_fixed.diminfo[1].strides)); + } + __pyx_L19:; + } + } + + /* "astropy/convolution/boundary_extend.pyx":166 + * conv[i, j] = fixed[i, j] + * + * return conv # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_extend.convolve2d_boundary_extend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_15boundary_extend_5convolve3d_boundary_extend(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_15boundary_extend_5convolve3d_boundary_extend = {__Pyx_NAMESTR("convolve3d_boundary_extend"), (PyCFunction)__pyx_pw_7astropy_11convolution_15boundary_extend_5convolve3d_boundary_extend, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_15boundary_extend_5convolve3d_boundary_extend(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve3d_boundary_extend (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve3d_boundary_extend", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 170; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve3d_boundary_extend") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 170; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve3d_boundary_extend", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 170; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_extend.convolve3d_boundary_extend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 170; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 171; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_15boundary_extend_4convolve3d_boundary_extend(__pyx_self, __pyx_v_f, __pyx_v_g); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_extend.pyx":170 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_extend(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g): + * + */ + +static PyObject *__pyx_pf_7astropy_11convolution_15boundary_extend_4convolve3d_boundary_extend(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g) { + int __pyx_v_nx; + int __pyx_v_ny; + int __pyx_v_nz; + int __pyx_v_nkx; + int __pyx_v_nky; + int __pyx_v_nkz; + int __pyx_v_wkx; + int __pyx_v_wky; + int __pyx_v_wkz; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + unsigned int __pyx_v_j; + unsigned int __pyx_v_k; + unsigned int __pyx_v_iii; + unsigned int __pyx_v_jjj; + unsigned int __pyx_v_kkk; + int __pyx_v_ii; + int __pyx_v_jj; + int __pyx_v_kk; + int __pyx_v_iimin; + int __pyx_v_iimax; + int __pyx_v_jjmin; + int __pyx_v_jjmax; + int __pyx_v_kkmin; + int __pyx_v_kkmax; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyArrayObject *__pyx_t_10 = NULL; + PyArrayObject *__pyx_t_11 = NULL; + int __pyx_t_12; + unsigned int __pyx_t_13; + int __pyx_t_14; + unsigned int __pyx_t_15; + int __pyx_t_16; + unsigned int __pyx_t_17; + unsigned int __pyx_t_18; + unsigned int __pyx_t_19; + unsigned int __pyx_t_20; + int __pyx_t_21; + int __pyx_t_22; + int __pyx_t_23; + int __pyx_t_24; + int __pyx_t_25; + int __pyx_t_26; + unsigned int __pyx_t_27; + unsigned int __pyx_t_28; + unsigned int __pyx_t_29; + unsigned int __pyx_t_30; + unsigned int __pyx_t_31; + unsigned int __pyx_t_32; + unsigned int __pyx_t_33; + unsigned int __pyx_t_34; + unsigned int __pyx_t_35; + unsigned int __pyx_t_36; + unsigned int __pyx_t_37; + unsigned int __pyx_t_38; + unsigned int __pyx_t_39; + unsigned int __pyx_t_40; + unsigned int __pyx_t_41; + unsigned int __pyx_t_42; + unsigned int __pyx_t_43; + unsigned int __pyx_t_44; + unsigned int __pyx_t_45; + unsigned int __pyx_t_46; + unsigned int __pyx_t_47; + unsigned int __pyx_t_48; + unsigned int __pyx_t_49; + unsigned int __pyx_t_50; + unsigned int __pyx_t_51; + unsigned int __pyx_t_52; + unsigned int __pyx_t_53; + unsigned int __pyx_t_54; + unsigned int __pyx_t_55; + unsigned int __pyx_t_56; + unsigned int __pyx_t_57; + unsigned int __pyx_t_58; + unsigned int __pyx_t_59; + unsigned int __pyx_t_60; + unsigned int __pyx_t_61; + unsigned int __pyx_t_62; + unsigned int __pyx_t_63; + unsigned int __pyx_t_64; + unsigned int __pyx_t_65; + unsigned int __pyx_t_66; + unsigned int __pyx_t_67; + unsigned int __pyx_t_68; + unsigned int __pyx_t_69; + unsigned int __pyx_t_70; + unsigned int __pyx_t_71; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve3d_boundary_extend", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 3, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 170; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_f.diminfo[1].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_f.diminfo[1].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_f.diminfo[2].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_f.diminfo[2].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[2]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 3, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 170; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_g.diminfo[1].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_g.diminfo[1].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_g.diminfo[2].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_g.diminfo[2].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[2]; + + /* "astropy/convolution/boundary_extend.pyx":173 + * np.ndarray[DTYPE_t, ndim=3] g): + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (!__pyx_t_1) { + __pyx_t_2 = (__Pyx_mod_long((__pyx_v_g->dimensions[1]), 2) != 1); + if (!__pyx_t_2) { + __pyx_t_3 = (__Pyx_mod_long((__pyx_v_g->dimensions[2]), 2) != 1); + __pyx_t_4 = __pyx_t_3; + } else { + __pyx_t_4 = __pyx_t_2; + } + __pyx_t_2 = __pyx_t_4; + } else { + __pyx_t_2 = __pyx_t_1; + } + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_extend.pyx":174 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_4), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_extend.pyx":176 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_5 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 176; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 176; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PyObject_RichCompare(__pyx_t_5, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_7); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 176; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 176; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_2) { + __pyx_t_7 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 176; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 176; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyObject_RichCompare(__pyx_t_7, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 176; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 176; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_4 = __pyx_t_1; + } else { + __pyx_t_4 = __pyx_t_2; + } + if (unlikely(!__pyx_t_4)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 176; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_extend.pyx":178 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_extend.pyx":179 + * + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] # <<<<<<<<<<<<<< + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] + */ + __pyx_v_ny = (__pyx_v_f->dimensions[1]); + + /* "astropy/convolution/boundary_extend.pyx":180 + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + */ + __pyx_v_nz = (__pyx_v_f->dimensions[2]); + + /* "astropy/convolution/boundary_extend.pyx":181 + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_extend.pyx":182 + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] # <<<<<<<<<<<<<< + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nky = (__pyx_v_g->dimensions[1]); + + /* "astropy/convolution/boundary_extend.pyx":183 + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + */ + __pyx_v_nkz = (__pyx_v_g->dimensions[2]); + + /* "astropy/convolution/boundary_extend.pyx":184 + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * cdef int wky = nky // 2 + * cdef int wkz = nkz // 2 + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_extend.pyx":185 + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 # <<<<<<<<<<<<<< + * cdef int wkz = nkz // 2 + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + */ + __pyx_v_wky = __Pyx_div_long(__pyx_v_nky, 2); + + /* "astropy/convolution/boundary_extend.pyx":186 + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + * cdef int wkz = nkz // 2 # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) + */ + __pyx_v_wkz = __Pyx_div_long(__pyx_v_nkz, 2); + + /* "astropy/convolution/boundary_extend.pyx":187 + * cdef int wky = nky // 2 + * cdef int wkz = nkz // 2 + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) + * cdef unsigned int i, j, k, iii, jjj, kkk + */ + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyObject_GetAttr(__pyx_t_5, __pyx_n_s__empty); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyInt_FromLong(__pyx_v_nz); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = PyList_New(3); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_9); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_9, 1, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_9, 2, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_8); + __pyx_t_5 = 0; + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_8 = PyTuple_New(1); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_8, 0, ((PyObject *)__pyx_t_9)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_9)); + __pyx_t_9 = 0; + __pyx_t_9 = PyDict_New(); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_9)); + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_t_9, ((PyObject *)__pyx_n_s__dtype), __pyx_t_7) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyObject_Call(__pyx_t_6, ((PyObject *)__pyx_t_8), ((PyObject *)__pyx_t_9)); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_8)); __pyx_t_8 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_9)); __pyx_t_9 = 0; + if (!(likely(((__pyx_t_7) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_7, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_10 = ((PyArrayObject *)__pyx_t_7); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_10, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 3, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_fixed.diminfo[1].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_fixed.diminfo[1].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_fixed.diminfo[2].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_fixed.diminfo[2].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[2]; + } + } + __pyx_t_10 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "astropy/convolution/boundary_extend.pyx":188 + * cdef int wkz = nkz // 2 + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef unsigned int i, j, k, iii, jjj, kkk + * cdef int ii, jj, kk + */ + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = PyObject_GetAttr(__pyx_t_7, __pyx_n_s__empty); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = PyInt_FromLong(__pyx_v_nz); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyList_New(3); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_5, 0, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_5, 1, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_8); + PyList_SET_ITEM(__pyx_t_5, 2, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyDict_New(); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_8 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + if (PyDict_SetItem(__pyx_t_5, ((PyObject *)__pyx_n_s__dtype), __pyx_t_8) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = PyObject_Call(__pyx_t_9, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_5)); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0; + if (!(likely(((__pyx_t_8) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_8, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_11 = ((PyArrayObject *)__pyx_t_8); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_11, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 3, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_conv.diminfo[1].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_conv.diminfo[1].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_conv.diminfo[2].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_conv.diminfo[2].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[2]; + } + } + __pyx_t_11 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_8); + __pyx_t_8 = 0; + + /* "astropy/convolution/boundary_extend.pyx":198 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * for k in range(nz): + */ + __pyx_t_12 = __pyx_v_nx; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_i = __pyx_t_13; + + /* "astropy/convolution/boundary_extend.pyx":199 + * # neighboring values + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * for k in range(nz): + * if npy_isnan(f[i, j, k]): + */ + __pyx_t_14 = __pyx_v_ny; + for (__pyx_t_15 = 0; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_j = __pyx_t_15; + + /* "astropy/convolution/boundary_extend.pyx":200 + * for i in range(nx): + * for j in range(ny): + * for k in range(nz): # <<<<<<<<<<<<<< + * if npy_isnan(f[i, j, k]): + * top = 0. + */ + __pyx_t_16 = __pyx_v_nz; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_k = __pyx_t_17; + + /* "astropy/convolution/boundary_extend.pyx":201 + * for j in range(ny): + * for k in range(nz): + * if npy_isnan(f[i, j, k]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_18 = __pyx_v_i; + __pyx_t_19 = __pyx_v_j; + __pyx_t_20 = __pyx_v_k; + __pyx_t_4 = npy_isnan((*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_18, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_19, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_20, __pyx_pybuffernd_f.diminfo[2].strides))); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_extend.pyx":202 + * for k in range(nz): + * if npy_isnan(f[i, j, k]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_extend.pyx":203 + * if npy_isnan(f[i, j, k]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_extend.pyx":204 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_extend.pyx":205 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_extend.pyx":206 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * kkmin = k - wkz + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_extend.pyx":207 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * kkmin = k - wkz + * kkmax = k + wkz + 1 + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_extend.pyx":208 + * jjmin = j - wky + * jjmax = j + wky + 1 + * kkmin = k - wkz # <<<<<<<<<<<<<< + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_kkmin = (__pyx_v_k - __pyx_v_wkz); + + /* "astropy/convolution/boundary_extend.pyx":209 + * jjmax = j + wky + 1 + * kkmin = k - wkz + * kkmax = k + wkz + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_kkmax = ((__pyx_v_k + __pyx_v_wkz) + 1); + + /* "astropy/convolution/boundary_extend.pyx":210 + * kkmin = k - wkz + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + */ + __pyx_t_21 = __pyx_v_iimax; + for (__pyx_t_22 = __pyx_v_iimin; __pyx_t_22 < __pyx_t_21; __pyx_t_22+=1) { + __pyx_v_ii = __pyx_t_22; + + /* "astropy/convolution/boundary_extend.pyx":211 + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * for kk in range(kkmin, kkmax): + * iii = int_min(int_max(ii, 0), nx - 1) + */ + __pyx_t_23 = __pyx_v_jjmax; + for (__pyx_t_24 = __pyx_v_jjmin; __pyx_t_24 < __pyx_t_23; __pyx_t_24+=1) { + __pyx_v_jj = __pyx_t_24; + + /* "astropy/convolution/boundary_extend.pyx":212 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): # <<<<<<<<<<<<<< + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) + */ + __pyx_t_25 = __pyx_v_kkmax; + for (__pyx_t_26 = __pyx_v_kkmin; __pyx_t_26 < __pyx_t_25; __pyx_t_26+=1) { + __pyx_v_kk = __pyx_t_26; + + /* "astropy/convolution/boundary_extend.pyx":213 + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + * iii = int_min(int_max(ii, 0), nx - 1) # <<<<<<<<<<<<<< + * jjj = int_min(int_max(jj, 0), ny - 1) + * kkk = int_min(int_max(kk, 0), nz - 1) + */ + __pyx_v_iii = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_ii, 0), (__pyx_v_nx - 1)); + + /* "astropy/convolution/boundary_extend.pyx":214 + * for kk in range(kkmin, kkmax): + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) # <<<<<<<<<<<<<< + * kkk = int_min(int_max(kk, 0), nz - 1) + * val = f[iii, jjj, kkk] + */ + __pyx_v_jjj = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_jj, 0), (__pyx_v_ny - 1)); + + /* "astropy/convolution/boundary_extend.pyx":215 + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) + * kkk = int_min(int_max(kk, 0), nz - 1) # <<<<<<<<<<<<<< + * val = f[iii, jjj, kkk] + * if not npy_isnan(val): + */ + __pyx_v_kkk = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_kk, 0), (__pyx_v_nz - 1)); + + /* "astropy/convolution/boundary_extend.pyx":216 + * jjj = int_min(int_max(jj, 0), ny - 1) + * kkk = int_min(int_max(kk, 0), nz - 1) + * val = f[iii, jjj, kkk] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + */ + __pyx_t_27 = __pyx_v_iii; + __pyx_t_28 = __pyx_v_jjj; + __pyx_t_29 = __pyx_v_kkk; + __pyx_v_val = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_27, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_28, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_29, __pyx_pybuffernd_f.diminfo[2].strides)); + + /* "astropy/convolution/boundary_extend.pyx":217 + * kkk = int_min(int_max(kk, 0), nz - 1) + * val = f[iii, jjj, kkk] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j), + */ + __pyx_t_4 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_extend.pyx":220 + * ker = g[(wkx + ii - i), + * (wky + jj - j), + * (wkz + kk - k)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_30 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_31 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_t_32 = ((unsigned int)((__pyx_v_wkz + __pyx_v_kk) - __pyx_v_k)); + __pyx_v_ker = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_30, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_31, __pyx_pybuffernd_g.diminfo[1].strides, __pyx_t_32, __pyx_pybuffernd_g.diminfo[2].strides)); + + /* "astropy/convolution/boundary_extend.pyx":221 + * (wky + jj - j), + * (wkz + kk - k)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_extend.pyx":222 + * (wkz + kk - k)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * + * if bot != 0.: + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L17; + } + __pyx_L17:; + } + } + } + + /* "astropy/convolution/boundary_extend.pyx":224 + * bot += ker + * + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i, j, k] = top / bot + * else: + */ + __pyx_t_4 = (__pyx_v_bot != 0.); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_extend.pyx":225 + * + * if bot != 0.: + * fixed[i, j, k] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i, j, k] = f[i, j, k] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 225; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_33 = __pyx_v_i; + __pyx_t_34 = __pyx_v_j; + __pyx_t_35 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_33, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_34, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_35, __pyx_pybuffernd_fixed.diminfo[2].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L18; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":227 + * fixed[i, j, k] = top / bot + * else: + * fixed[i, j, k] = f[i, j, k] # <<<<<<<<<<<<<< + * else: + * fixed[i, j, k] = f[i, j, k] + */ + __pyx_t_36 = __pyx_v_i; + __pyx_t_37 = __pyx_v_j; + __pyx_t_38 = __pyx_v_k; + __pyx_t_39 = __pyx_v_i; + __pyx_t_40 = __pyx_v_j; + __pyx_t_41 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_39, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_40, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_41, __pyx_pybuffernd_fixed.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_36, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_37, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_38, __pyx_pybuffernd_f.diminfo[2].strides)); + } + __pyx_L18:; + goto __pyx_L10; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":229 + * fixed[i, j, k] = f[i, j, k] + * else: + * fixed[i, j, k] = f[i, j, k] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_42 = __pyx_v_i; + __pyx_t_43 = __pyx_v_j; + __pyx_t_44 = __pyx_v_k; + __pyx_t_45 = __pyx_v_i; + __pyx_t_46 = __pyx_v_j; + __pyx_t_47 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_45, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_46, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_47, __pyx_pybuffernd_fixed.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_42, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_43, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_44, __pyx_pybuffernd_f.diminfo[2].strides)); + } + __pyx_L10:; + } + } + } + + /* "astropy/convolution/boundary_extend.pyx":232 + * + * # Now run the proper convolution + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * for k in range(nz): + */ + __pyx_t_12 = __pyx_v_nx; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_i = __pyx_t_13; + + /* "astropy/convolution/boundary_extend.pyx":233 + * # Now run the proper convolution + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * for k in range(nz): + * if not npy_isnan(fixed[i, j, k]): + */ + __pyx_t_14 = __pyx_v_ny; + for (__pyx_t_15 = 0; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_j = __pyx_t_15; + + /* "astropy/convolution/boundary_extend.pyx":234 + * for i in range(nx): + * for j in range(ny): + * for k in range(nz): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i, j, k]): + * top = 0. + */ + __pyx_t_16 = __pyx_v_nz; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_k = __pyx_t_17; + + /* "astropy/convolution/boundary_extend.pyx":235 + * for j in range(ny): + * for k in range(nz): + * if not npy_isnan(fixed[i, j, k]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_48 = __pyx_v_i; + __pyx_t_49 = __pyx_v_j; + __pyx_t_50 = __pyx_v_k; + __pyx_t_4 = (!npy_isnan((*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_48, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_49, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_50, __pyx_pybuffernd_fixed.diminfo[2].strides)))); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_extend.pyx":236 + * for k in range(nz): + * if not npy_isnan(fixed[i, j, k]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_extend.pyx":237 + * if not npy_isnan(fixed[i, j, k]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_extend.pyx":238 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_extend.pyx":239 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_extend.pyx":240 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * kkmin = k - wkz + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_extend.pyx":241 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * kkmin = k - wkz + * kkmax = k + wkz + 1 + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_extend.pyx":242 + * jjmin = j - wky + * jjmax = j + wky + 1 + * kkmin = k - wkz # <<<<<<<<<<<<<< + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_kkmin = (__pyx_v_k - __pyx_v_wkz); + + /* "astropy/convolution/boundary_extend.pyx":243 + * jjmax = j + wky + 1 + * kkmin = k - wkz + * kkmax = k + wkz + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_kkmax = ((__pyx_v_k + __pyx_v_wkz) + 1); + + /* "astropy/convolution/boundary_extend.pyx":244 + * kkmin = k - wkz + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + */ + __pyx_t_21 = __pyx_v_iimax; + for (__pyx_t_22 = __pyx_v_iimin; __pyx_t_22 < __pyx_t_21; __pyx_t_22+=1) { + __pyx_v_ii = __pyx_t_22; + + /* "astropy/convolution/boundary_extend.pyx":245 + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * for kk in range(kkmin, kkmax): + * iii = int_min(int_max(ii, 0), nx - 1) + */ + __pyx_t_23 = __pyx_v_jjmax; + for (__pyx_t_24 = __pyx_v_jjmin; __pyx_t_24 < __pyx_t_23; __pyx_t_24+=1) { + __pyx_v_jj = __pyx_t_24; + + /* "astropy/convolution/boundary_extend.pyx":246 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): # <<<<<<<<<<<<<< + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) + */ + __pyx_t_25 = __pyx_v_kkmax; + for (__pyx_t_26 = __pyx_v_kkmin; __pyx_t_26 < __pyx_t_25; __pyx_t_26+=1) { + __pyx_v_kk = __pyx_t_26; + + /* "astropy/convolution/boundary_extend.pyx":247 + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + * iii = int_min(int_max(ii, 0), nx - 1) # <<<<<<<<<<<<<< + * jjj = int_min(int_max(jj, 0), ny - 1) + * kkk = int_min(int_max(kk, 0), nz - 1) + */ + __pyx_v_iii = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_ii, 0), (__pyx_v_nx - 1)); + + /* "astropy/convolution/boundary_extend.pyx":248 + * for kk in range(kkmin, kkmax): + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) # <<<<<<<<<<<<<< + * kkk = int_min(int_max(kk, 0), nz - 1) + * val = fixed[iii, jjj, kkk] + */ + __pyx_v_jjj = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_jj, 0), (__pyx_v_ny - 1)); + + /* "astropy/convolution/boundary_extend.pyx":249 + * iii = int_min(int_max(ii, 0), nx - 1) + * jjj = int_min(int_max(jj, 0), ny - 1) + * kkk = int_min(int_max(kk, 0), nz - 1) # <<<<<<<<<<<<<< + * val = fixed[iii, jjj, kkk] + * ker = g[(wkx + ii - i), + */ + __pyx_v_kkk = __pyx_f_7astropy_11convolution_15boundary_extend_int_min(__pyx_f_7astropy_11convolution_15boundary_extend_int_max(__pyx_v_kk, 0), (__pyx_v_nz - 1)); + + /* "astropy/convolution/boundary_extend.pyx":250 + * jjj = int_min(int_max(jj, 0), ny - 1) + * kkk = int_min(int_max(kk, 0), nz - 1) + * val = fixed[iii, jjj, kkk] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j), + */ + __pyx_t_51 = __pyx_v_iii; + __pyx_t_52 = __pyx_v_jjj; + __pyx_t_53 = __pyx_v_kkk; + __pyx_v_val = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_51, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_52, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_53, __pyx_pybuffernd_fixed.diminfo[2].strides)); + + /* "astropy/convolution/boundary_extend.pyx":253 + * ker = g[(wkx + ii - i), + * (wky + jj - j), + * (wkz + kk - k)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_54 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_55 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_t_56 = ((unsigned int)((__pyx_v_wkz + __pyx_v_kk) - __pyx_v_k)); + __pyx_v_ker = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_54, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_55, __pyx_pybuffernd_g.diminfo[1].strides, __pyx_t_56, __pyx_pybuffernd_g.diminfo[2].strides)); + + /* "astropy/convolution/boundary_extend.pyx":254 + * (wky + jj - j), + * (wkz + kk - k)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_4 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_extend.pyx":255 + * (wkz + kk - k)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_extend.pyx":256 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i, j, k] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L32; + } + __pyx_L32:; + } + } + } + + /* "astropy/convolution/boundary_extend.pyx":257 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i, j, k] = top / bot + * else: + */ + __pyx_t_4 = (__pyx_v_bot != 0.0); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_extend.pyx":258 + * bot += ker + * if bot != 0: + * conv[i, j, k] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i, j, k] = fixed[i, j, k] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 258; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_57 = __pyx_v_i; + __pyx_t_58 = __pyx_v_j; + __pyx_t_59 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_57, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_58, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_59, __pyx_pybuffernd_conv.diminfo[2].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L33; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":260 + * conv[i, j, k] = top / bot + * else: + * conv[i, j, k] = fixed[i, j, k] # <<<<<<<<<<<<<< + * else: + * conv[i, j, k] = fixed[i, j, k] + */ + __pyx_t_60 = __pyx_v_i; + __pyx_t_61 = __pyx_v_j; + __pyx_t_62 = __pyx_v_k; + __pyx_t_63 = __pyx_v_i; + __pyx_t_64 = __pyx_v_j; + __pyx_t_65 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_63, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_64, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_65, __pyx_pybuffernd_conv.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_60, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_61, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_62, __pyx_pybuffernd_fixed.diminfo[2].strides)); + } + __pyx_L33:; + goto __pyx_L25; + } + /*else*/ { + + /* "astropy/convolution/boundary_extend.pyx":262 + * conv[i, j, k] = fixed[i, j, k] + * else: + * conv[i, j, k] = fixed[i, j, k] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_66 = __pyx_v_i; + __pyx_t_67 = __pyx_v_j; + __pyx_t_68 = __pyx_v_k; + __pyx_t_69 = __pyx_v_i; + __pyx_t_70 = __pyx_v_j; + __pyx_t_71 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_69, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_70, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_71, __pyx_pybuffernd_conv.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_15boundary_extend_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_66, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_67, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_68, __pyx_pybuffernd_fixed.diminfo[2].strides)); + } + __pyx_L25:; + } + } + } + + /* "astropy/convolution/boundary_extend.pyx":264 + * conv[i, j, k] = fixed[i, j, k] + * + * return conv # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_extend.convolve3d_boundary_extend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0); + __pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags)); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":194 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fullfill the PEP. + */ + +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_v_copy_shape; + int __pyx_v_i; + int __pyx_v_ndim; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + int __pyx_v_t; + char *__pyx_v_f; + PyArray_Descr *__pyx_v_descr = 0; + int __pyx_v_offset; + int __pyx_v_hasfields; + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + char *__pyx_t_9; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__getbuffer__", 0); + if (__pyx_v_info != NULL) { + __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(__pyx_v_info->obj); + } + + /* "numpy.pxd":200 + * # of flags + * + * if info == NULL: return # <<<<<<<<<<<<<< + * + * cdef int copy_shape, i, ndim + */ + __pyx_t_1 = (__pyx_v_info == NULL); + if (__pyx_t_1) { + __pyx_r = 0; + goto __pyx_L0; + goto __pyx_L3; + } + __pyx_L3:; + + /* "numpy.pxd":203 + * + * cdef int copy_shape, i, ndim + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + */ + __pyx_v_endian_detector = 1; + + /* "numpy.pxd":204 + * cdef int copy_shape, i, ndim + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * + * ndim = PyArray_NDIM(self) + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "numpy.pxd":206 + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); + + /* "numpy.pxd":208 + * ndim = PyArray_NDIM(self) + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * copy_shape = 1 + * else: + */ + __pyx_t_1 = ((sizeof(npy_intp)) != (sizeof(Py_ssize_t))); + if (__pyx_t_1) { + + /* "numpy.pxd":209 + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * copy_shape = 1 # <<<<<<<<<<<<<< + * else: + * copy_shape = 0 + */ + __pyx_v_copy_shape = 1; + goto __pyx_L4; + } + /*else*/ { + + /* "numpy.pxd":211 + * copy_shape = 1 + * else: + * copy_shape = 0 # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + */ + __pyx_v_copy_shape = 0; + } + __pyx_L4:; + + /* "numpy.pxd":213 + * copy_shape = 0 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + __pyx_t_1 = ((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS); + if (__pyx_t_1) { + + /* "numpy.pxd":214 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not C contiguous") + * + */ + __pyx_t_2 = (!PyArray_CHKFLAGS(__pyx_v_self, NPY_C_CONTIGUOUS)); + __pyx_t_3 = __pyx_t_2; + } else { + __pyx_t_3 = __pyx_t_1; + } + if (__pyx_t_3) { + + /* "numpy.pxd":215 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_6), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L5; + } + __pyx_L5:; + + /* "numpy.pxd":217 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + __pyx_t_3 = ((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS); + if (__pyx_t_3) { + + /* "numpy.pxd":218 + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not Fortran contiguous") + * + */ + __pyx_t_1 = (!PyArray_CHKFLAGS(__pyx_v_self, NPY_F_CONTIGUOUS)); + __pyx_t_2 = __pyx_t_1; + } else { + __pyx_t_2 = __pyx_t_3; + } + if (__pyx_t_2) { + + /* "numpy.pxd":219 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_8), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L6; + } + __pyx_L6:; + + /* "numpy.pxd":221 + * raise ValueError(u"ndarray is not Fortran contiguous") + * + * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< + * info.ndim = ndim + * if copy_shape: + */ + __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); + + /* "numpy.pxd":222 + * + * info.buf = PyArray_DATA(self) + * info.ndim = ndim # <<<<<<<<<<<<<< + * if copy_shape: + * # Allocate new buffer for strides and shape info. + */ + __pyx_v_info->ndim = __pyx_v_ndim; + + /* "numpy.pxd":223 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if copy_shape: # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + if (__pyx_v_copy_shape) { + + /* "numpy.pxd":226 + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) # <<<<<<<<<<<<<< + * info.shape = info.strides + ndim + * for i in range(ndim): + */ + __pyx_v_info->strides = ((Py_ssize_t *)malloc((((sizeof(Py_ssize_t)) * ((size_t)__pyx_v_ndim)) * 2))); + + /* "numpy.pxd":227 + * # This is allocated as one block, strides first. + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) + * info.shape = info.strides + ndim # <<<<<<<<<<<<<< + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + */ + __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); + + /* "numpy.pxd":228 + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) + * info.shape = info.strides + ndim + * for i in range(ndim): # <<<<<<<<<<<<<< + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] + */ + __pyx_t_5 = __pyx_v_ndim; + for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { + __pyx_v_i = __pyx_t_6; + + /* "numpy.pxd":229 + * info.shape = info.strides + ndim + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + */ + (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); + + /* "numpy.pxd":230 + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< + * else: + * info.strides = PyArray_STRIDES(self) + */ + (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); + } + goto __pyx_L7; + } + /*else*/ { + + /* "numpy.pxd":232 + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + */ + __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); + + /* "numpy.pxd":233 + * else: + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + */ + __pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self)); + } + __pyx_L7:; + + /* "numpy.pxd":234 + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL # <<<<<<<<<<<<<< + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) + */ + __pyx_v_info->suboffsets = NULL; + + /* "numpy.pxd":235 + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< + * info.readonly = not PyArray_ISWRITEABLE(self) + * + */ + __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); + + /* "numpy.pxd":236 + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< + * + * cdef int t + */ + __pyx_v_info->readonly = (!PyArray_ISWRITEABLE(__pyx_v_self)); + + /* "numpy.pxd":239 + * + * cdef int t + * cdef char* f = NULL # <<<<<<<<<<<<<< + * cdef dtype descr = self.descr + * cdef list stack + */ + __pyx_v_f = NULL; + + /* "numpy.pxd":240 + * cdef int t + * cdef char* f = NULL + * cdef dtype descr = self.descr # <<<<<<<<<<<<<< + * cdef list stack + * cdef int offset + */ + __pyx_t_4 = ((PyObject *)__pyx_v_self->descr); + __Pyx_INCREF(__pyx_t_4); + __pyx_v_descr = ((PyArray_Descr *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "numpy.pxd":244 + * cdef int offset + * + * cdef bint hasfields = PyDataType_HASFIELDS(descr) # <<<<<<<<<<<<<< + * + * if not hasfields and not copy_shape: + */ + __pyx_v_hasfields = PyDataType_HASFIELDS(__pyx_v_descr); + + /* "numpy.pxd":246 + * cdef bint hasfields = PyDataType_HASFIELDS(descr) + * + * if not hasfields and not copy_shape: # <<<<<<<<<<<<<< + * # do not call releasebuffer + * info.obj = None + */ + __pyx_t_2 = (!__pyx_v_hasfields); + if (__pyx_t_2) { + __pyx_t_3 = (!__pyx_v_copy_shape); + __pyx_t_1 = __pyx_t_3; + } else { + __pyx_t_1 = __pyx_t_2; + } + if (__pyx_t_1) { + + /* "numpy.pxd":248 + * if not hasfields and not copy_shape: + * # do not call releasebuffer + * info.obj = None # <<<<<<<<<<<<<< + * else: + * # need to call releasebuffer + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = Py_None; + goto __pyx_L10; + } + /*else*/ { + + /* "numpy.pxd":251 + * else: + * # need to call releasebuffer + * info.obj = self # <<<<<<<<<<<<<< + * + * if not hasfields: + */ + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = ((PyObject *)__pyx_v_self); + } + __pyx_L10:; + + /* "numpy.pxd":253 + * info.obj = self + * + * if not hasfields: # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + __pyx_t_1 = (!__pyx_v_hasfields); + if (__pyx_t_1) { + + /* "numpy.pxd":254 + * + * if not hasfields: + * t = descr.type_num # <<<<<<<<<<<<<< + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + */ + __pyx_t_5 = __pyx_v_descr->type_num; + __pyx_v_t = __pyx_t_5; + + /* "numpy.pxd":255 + * if not hasfields: + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_1 = (__pyx_v_descr->byteorder == '>'); + if (__pyx_t_1) { + __pyx_t_2 = __pyx_v_little_endian; + } else { + __pyx_t_2 = __pyx_t_1; + } + if (!__pyx_t_2) { + + /* "numpy.pxd":256 + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + */ + __pyx_t_1 = (__pyx_v_descr->byteorder == '<'); + if (__pyx_t_1) { + __pyx_t_3 = (!__pyx_v_little_endian); + __pyx_t_7 = __pyx_t_3; + } else { + __pyx_t_7 = __pyx_t_1; + } + __pyx_t_1 = __pyx_t_7; + } else { + __pyx_t_1 = __pyx_t_2; + } + if (__pyx_t_1) { + + /* "numpy.pxd":257 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_10), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L12; + } + __pyx_L12:; + + /* "numpy.pxd":258 + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + */ + __pyx_t_1 = (__pyx_v_t == NPY_BYTE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__b; + goto __pyx_L13; + } + + /* "numpy.pxd":259 + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + */ + __pyx_t_1 = (__pyx_v_t == NPY_UBYTE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__B; + goto __pyx_L13; + } + + /* "numpy.pxd":260 + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + */ + __pyx_t_1 = (__pyx_v_t == NPY_SHORT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__h; + goto __pyx_L13; + } + + /* "numpy.pxd":261 + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + */ + __pyx_t_1 = (__pyx_v_t == NPY_USHORT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__H; + goto __pyx_L13; + } + + /* "numpy.pxd":262 + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + */ + __pyx_t_1 = (__pyx_v_t == NPY_INT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__i; + goto __pyx_L13; + } + + /* "numpy.pxd":263 + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + */ + __pyx_t_1 = (__pyx_v_t == NPY_UINT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__I; + goto __pyx_L13; + } + + /* "numpy.pxd":264 + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__l; + goto __pyx_L13; + } + + /* "numpy.pxd":265 + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + */ + __pyx_t_1 = (__pyx_v_t == NPY_ULONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__L; + goto __pyx_L13; + } + + /* "numpy.pxd":266 + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONGLONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__q; + goto __pyx_L13; + } + + /* "numpy.pxd":267 + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + */ + __pyx_t_1 = (__pyx_v_t == NPY_ULONGLONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Q; + goto __pyx_L13; + } + + /* "numpy.pxd":268 + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + */ + __pyx_t_1 = (__pyx_v_t == NPY_FLOAT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__f; + goto __pyx_L13; + } + + /* "numpy.pxd":269 + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + */ + __pyx_t_1 = (__pyx_v_t == NPY_DOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__d; + goto __pyx_L13; + } + + /* "numpy.pxd":270 + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONGDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__g; + goto __pyx_L13; + } + + /* "numpy.pxd":271 + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + */ + __pyx_t_1 = (__pyx_v_t == NPY_CFLOAT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zf; + goto __pyx_L13; + } + + /* "numpy.pxd":272 + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" + */ + __pyx_t_1 = (__pyx_v_t == NPY_CDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zd; + goto __pyx_L13; + } + + /* "numpy.pxd":273 + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f = "O" + * else: + */ + __pyx_t_1 = (__pyx_v_t == NPY_CLONGDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zg; + goto __pyx_L13; + } + + /* "numpy.pxd":274 + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_1 = (__pyx_v_t == NPY_OBJECT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__O; + goto __pyx_L13; + } + /*else*/ { + + /* "numpy.pxd":276 + * elif t == NPY_OBJECT: f = "O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * info.format = f + * return + */ + __pyx_t_4 = PyInt_FromLong(__pyx_v_t); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_8 = PyNumber_Remainder(((PyObject *)__pyx_kp_u_11), __pyx_t_4); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_8)); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)__pyx_t_8)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_8)); + __pyx_t_8 = 0; + __pyx_t_8 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_L13:; + + /* "numpy.pxd":277 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f # <<<<<<<<<<<<<< + * return + * else: + */ + __pyx_v_info->format = __pyx_v_f; + + /* "numpy.pxd":278 + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f + * return # <<<<<<<<<<<<<< + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) + */ + __pyx_r = 0; + goto __pyx_L0; + goto __pyx_L11; + } + /*else*/ { + + /* "numpy.pxd":280 + * return + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + */ + __pyx_v_info->format = ((char *)malloc(255)); + + /* "numpy.pxd":281 + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, + */ + (__pyx_v_info->format[0]) = '^'; + + /* "numpy.pxd":282 + * info.format = stdlib.malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 # <<<<<<<<<<<<<< + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + */ + __pyx_v_offset = 0; + + /* "numpy.pxd":285 + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + * &offset) # <<<<<<<<<<<<<< + * f[0] = c'\0' # Terminate format string + * + */ + __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 255), (&__pyx_v_offset)); if (unlikely(__pyx_t_9 == NULL)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 283; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_v_f = __pyx_t_9; + + /* "numpy.pxd":286 + * info.format + _buffer_format_string_len, + * &offset) + * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + */ + (__pyx_v_f[0]) = '\x00'; + } + __pyx_L11:; + + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.ndarray.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + if (__pyx_v_info != NULL && __pyx_v_info->obj != NULL) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = NULL; + } + goto __pyx_L2; + __pyx_L0:; + if (__pyx_v_info != NULL && __pyx_v_info->obj == Py_None) { + __Pyx_GOTREF(Py_None); + __Pyx_DECREF(Py_None); __pyx_v_info->obj = NULL; + } + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_descr); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0); + __pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info)); + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":288 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) + */ + +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__releasebuffer__", 0); + + /* "numpy.pxd":289 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_t_1 = PyArray_HASFIELDS(__pyx_v_self); + if (__pyx_t_1) { + + /* "numpy.pxd":290 + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * stdlib.free(info.strides) + */ + free(__pyx_v_info->format); + goto __pyx_L3; + } + __pyx_L3:; + + /* "numpy.pxd":291 + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * stdlib.free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + __pyx_t_1 = ((sizeof(npy_intp)) != (sizeof(Py_ssize_t))); + if (__pyx_t_1) { + + /* "numpy.pxd":292 + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * stdlib.free(info.strides) # <<<<<<<<<<<<<< + * # info.shape was stored after info.strides in the same block + * + */ + free(__pyx_v_info->strides); + goto __pyx_L4; + } + __pyx_L4:; + + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":768 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); + + /* "numpy.pxd":769 + * + * cdef inline object PyArray_MultiIterNew1(a): + * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew2(a, b): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 769; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":771 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); + + /* "numpy.pxd":772 + * + * cdef inline object PyArray_MultiIterNew2(a, b): + * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 772; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":774 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); + + /* "numpy.pxd":775 + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 775; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":777 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); + + /* "numpy.pxd":778 + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 778; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":780 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); + + /* "numpy.pxd":781 + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 781; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":783 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) { + PyArray_Descr *__pyx_v_child = 0; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + PyObject *__pyx_v_fields = 0; + PyObject *__pyx_v_childname = NULL; + PyObject *__pyx_v_new_offset = NULL; + PyObject *__pyx_v_t = NULL; + char *__pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *(*__pyx_t_6)(PyObject *); + int __pyx_t_7; + int __pyx_t_8; + int __pyx_t_9; + int __pyx_t_10; + long __pyx_t_11; + char *__pyx_t_12; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_util_dtypestring", 0); + + /* "numpy.pxd":790 + * cdef int delta_offset + * cdef tuple i + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * cdef tuple fields + */ + __pyx_v_endian_detector = 1; + + /* "numpy.pxd":791 + * cdef tuple i + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * cdef tuple fields + * + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "numpy.pxd":794 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + if (unlikely(((PyObject *)__pyx_v_descr->names) == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_1 = ((PyObject *)__pyx_v_descr->names); __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_COMPILING_IN_CPYTHON + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + __Pyx_XDECREF(__pyx_v_childname); + __pyx_v_childname = __pyx_t_3; + __pyx_t_3 = 0; + + /* "numpy.pxd":795 + * + * for childname in descr.names: + * fields = descr.fields[childname] # <<<<<<<<<<<<<< + * child, new_offset = fields + * + */ + __pyx_t_3 = PyObject_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (!__pyx_t_3) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 795; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected tuple, got %.200s", Py_TYPE(__pyx_t_3)->tp_name), 0))) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 795; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_XDECREF(((PyObject *)__pyx_v_fields)); + __pyx_v_fields = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "numpy.pxd":796 + * for childname in descr.names: + * fields = descr.fields[childname] + * child, new_offset = fields # <<<<<<<<<<<<<< + * + * if (end - f) - (new_offset - offset[0]) < 15: + */ + if (likely(PyTuple_CheckExact(((PyObject *)__pyx_v_fields)))) { + PyObject* sequence = ((PyObject *)__pyx_v_fields); + #if CYTHON_COMPILING_IN_CPYTHON + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #if CYTHON_COMPILING_IN_CPYTHON + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + #endif + } else if (1) { + __Pyx_RaiseNoneNotIterableError(); {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else + { + Py_ssize_t index = -1; + __pyx_t_5 = PyObject_GetIter(((PyObject *)__pyx_v_fields)); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = Py_TYPE(__pyx_t_5)->tp_iternext; + index = 0; __pyx_t_3 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_3)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + index = 1; __pyx_t_4 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_4)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_6(__pyx_t_5), 2) < 0) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_6 = NULL; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L6_unpacking_done; + __pyx_L5_unpacking_failed:; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_6 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_L6_unpacking_done:; + } + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_XDECREF(((PyObject *)__pyx_v_child)); + __pyx_v_child = ((PyArray_Descr *)__pyx_t_3); + __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_v_new_offset); + __pyx_v_new_offset = __pyx_t_4; + __pyx_t_4 = 0; + + /* "numpy.pxd":798 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + __pyx_t_4 = PyInt_FromLong((__pyx_v_end - __pyx_v_f)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyInt_FromLong((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_3); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyNumber_Subtract(__pyx_t_4, __pyx_t_5); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = PyObject_RichCompare(__pyx_t_3, __pyx_int_15, Py_LT); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + + /* "numpy.pxd":799 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_RuntimeError, ((PyObject *)__pyx_k_tuple_13), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L7; + } + __pyx_L7:; + + /* "numpy.pxd":801 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_7 = (__pyx_v_child->byteorder == '>'); + if (__pyx_t_7) { + __pyx_t_8 = __pyx_v_little_endian; + } else { + __pyx_t_8 = __pyx_t_7; + } + if (!__pyx_t_8) { + + /* "numpy.pxd":802 + * + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * # One could encode it in the format string and have Cython + */ + __pyx_t_7 = (__pyx_v_child->byteorder == '<'); + if (__pyx_t_7) { + __pyx_t_9 = (!__pyx_v_little_endian); + __pyx_t_10 = __pyx_t_9; + } else { + __pyx_t_10 = __pyx_t_7; + } + __pyx_t_7 = __pyx_t_10; + } else { + __pyx_t_7 = __pyx_t_8; + } + if (__pyx_t_7) { + + /* "numpy.pxd":803 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_14), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L8; + } + __pyx_L8:; + + /* "numpy.pxd":813 + * + * # Output padding bytes + * while offset[0] < new_offset: # <<<<<<<<<<<<<< + * f[0] = 120 # "x"; pad byte + * f += 1 + */ + while (1) { + __pyx_t_5 = PyInt_FromLong((__pyx_v_offset[0])); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_5, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!__pyx_t_7) break; + + /* "numpy.pxd":814 + * # Output padding bytes + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< + * f += 1 + * offset[0] += 1 + */ + (__pyx_v_f[0]) = 120; + + /* "numpy.pxd":815 + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte + * f += 1 # <<<<<<<<<<<<<< + * offset[0] += 1 + * + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "numpy.pxd":816 + * f[0] = 120 # "x"; pad byte + * f += 1 + * offset[0] += 1 # <<<<<<<<<<<<<< + * + * offset[0] += child.itemsize + */ + __pyx_t_11 = 0; + (__pyx_v_offset[__pyx_t_11]) = ((__pyx_v_offset[__pyx_t_11]) + 1); + } + + /* "numpy.pxd":818 + * offset[0] += 1 + * + * offset[0] += child.itemsize # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(child): + */ + __pyx_t_11 = 0; + (__pyx_v_offset[__pyx_t_11]) = ((__pyx_v_offset[__pyx_t_11]) + __pyx_v_child->elsize); + + /* "numpy.pxd":820 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + __pyx_t_7 = (!PyDataType_HASFIELDS(__pyx_v_child)); + if (__pyx_t_7) { + + /* "numpy.pxd":821 + * + * if not PyDataType_HASFIELDS(child): + * t = child.type_num # <<<<<<<<<<<<<< + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") + */ + __pyx_t_3 = PyInt_FromLong(__pyx_v_child->type_num); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 821; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_v_t); + __pyx_v_t = __pyx_t_3; + __pyx_t_3 = 0; + + /* "numpy.pxd":822 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + __pyx_t_7 = ((__pyx_v_end - __pyx_v_f) < 5); + if (__pyx_t_7) { + + /* "numpy.pxd":823 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_t_3 = PyObject_Call(__pyx_builtin_RuntimeError, ((PyObject *)__pyx_k_tuple_16), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L12; + } + __pyx_L12:; + + /* "numpy.pxd":826 + * + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + */ + __pyx_t_3 = PyInt_FromLong(NPY_BYTE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 98; + goto __pyx_L13; + } + + /* "numpy.pxd":827 + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + */ + __pyx_t_5 = PyInt_FromLong(NPY_UBYTE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 66; + goto __pyx_L13; + } + + /* "numpy.pxd":828 + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + */ + __pyx_t_3 = PyInt_FromLong(NPY_SHORT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 104; + goto __pyx_L13; + } + + /* "numpy.pxd":829 + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + */ + __pyx_t_5 = PyInt_FromLong(NPY_USHORT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 72; + goto __pyx_L13; + } + + /* "numpy.pxd":830 + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + */ + __pyx_t_3 = PyInt_FromLong(NPY_INT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 105; + goto __pyx_L13; + } + + /* "numpy.pxd":831 + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + */ + __pyx_t_5 = PyInt_FromLong(NPY_UINT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 73; + goto __pyx_L13; + } + + /* "numpy.pxd":832 + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONG); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 108; + goto __pyx_L13; + } + + /* "numpy.pxd":833 + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + */ + __pyx_t_5 = PyInt_FromLong(NPY_ULONG); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 76; + goto __pyx_L13; + } + + /* "numpy.pxd":834 + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONGLONG); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 113; + goto __pyx_L13; + } + + /* "numpy.pxd":835 + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + */ + __pyx_t_5 = PyInt_FromLong(NPY_ULONGLONG); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 81; + goto __pyx_L13; + } + + /* "numpy.pxd":836 + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + */ + __pyx_t_3 = PyInt_FromLong(NPY_FLOAT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 102; + goto __pyx_L13; + } + + /* "numpy.pxd":837 + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + */ + __pyx_t_5 = PyInt_FromLong(NPY_DOUBLE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 100; + goto __pyx_L13; + } + + /* "numpy.pxd":838 + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 103; + goto __pyx_L13; + } + + /* "numpy.pxd":839 + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + */ + __pyx_t_5 = PyInt_FromLong(NPY_CFLOAT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 102; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":840 + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" + */ + __pyx_t_3 = PyInt_FromLong(NPY_CDOUBLE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 100; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":841 + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + */ + __pyx_t_5 = PyInt_FromLong(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 103; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":842 + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_3 = PyInt_FromLong(NPY_OBJECT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 79; + goto __pyx_L13; + } + /*else*/ { + + /* "numpy.pxd":844 + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * f += 1 + * else: + */ + __pyx_t_5 = PyNumber_Remainder(((PyObject *)__pyx_kp_u_11), __pyx_v_t); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_L13:; + + /* "numpy.pxd":845 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * f += 1 # <<<<<<<<<<<<<< + * else: + * # Cython ignores struct boundary information ("T{...}"), + */ + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L11; + } + /*else*/ { + + /* "numpy.pxd":849 + * # Cython ignores struct boundary information ("T{...}"), + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< + * return f + * + */ + __pyx_t_12 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_12 == NULL)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 849; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_v_f = __pyx_t_12; + } + __pyx_L11:; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "numpy.pxd":850 + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) + * return f # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = __pyx_v_f; + goto __pyx_L0; + + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("numpy._util_dtypestring", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_child); + __Pyx_XDECREF(__pyx_v_fields); + __Pyx_XDECREF(__pyx_v_childname); + __Pyx_XDECREF(__pyx_v_new_offset); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":965 + * + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * cdef PyObject* baseptr + * if base is None: + */ + +static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_arr, PyObject *__pyx_v_base) { + PyObject *__pyx_v_baseptr; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("set_array_base", 0); + + /* "numpy.pxd":967 + * cdef inline void set_array_base(ndarray arr, object base): + * cdef PyObject* baseptr + * if base is None: # <<<<<<<<<<<<<< + * baseptr = NULL + * else: + */ + __pyx_t_1 = (__pyx_v_base == Py_None); + if (__pyx_t_1) { + + /* "numpy.pxd":968 + * cdef PyObject* baseptr + * if base is None: + * baseptr = NULL # <<<<<<<<<<<<<< + * else: + * Py_INCREF(base) # important to do this before decref below! + */ + __pyx_v_baseptr = NULL; + goto __pyx_L3; + } + /*else*/ { + + /* "numpy.pxd":970 + * baseptr = NULL + * else: + * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< + * baseptr = base + * Py_XDECREF(arr.base) + */ + Py_INCREF(__pyx_v_base); + + /* "numpy.pxd":971 + * else: + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base # <<<<<<<<<<<<<< + * Py_XDECREF(arr.base) + * arr.base = baseptr + */ + __pyx_v_baseptr = ((PyObject *)__pyx_v_base); + } + __pyx_L3:; + + /* "numpy.pxd":972 + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base + * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< + * arr.base = baseptr + * + */ + Py_XDECREF(__pyx_v_arr->base); + + /* "numpy.pxd":973 + * baseptr = base + * Py_XDECREF(arr.base) + * arr.base = baseptr # <<<<<<<<<<<<<< + * + * cdef inline object get_array_base(ndarray arr): + */ + __pyx_v_arr->base = __pyx_v_baseptr; + + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":975 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__pyx_v_arr) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("get_array_base", 0); + + /* "numpy.pxd":976 + * + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: # <<<<<<<<<<<<<< + * return None + * else: + */ + __pyx_t_1 = (__pyx_v_arr->base == NULL); + if (__pyx_t_1) { + + /* "numpy.pxd":977 + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: + * return None # <<<<<<<<<<<<<< + * else: + * return arr.base + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L0; + goto __pyx_L3; + } + /*else*/ { + + /* "numpy.pxd":979 + * return None + * else: + * return arr.base # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_arr->base)); + __pyx_r = ((PyObject *)__pyx_v_arr->base); + goto __pyx_L0; + } + __pyx_L3:; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef __pyx_moduledef = { + #if PY_VERSION_HEX < 0x03020000 + { PyObject_HEAD_INIT(NULL) NULL, 0, NULL }, + #else + PyModuleDef_HEAD_INIT, + #endif + __Pyx_NAMESTR("boundary_extend"), + 0, /* m_doc */ + -1, /* m_size */ + __pyx_methods /* m_methods */, + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_s_1, __pyx_k_1, sizeof(__pyx_k_1), 0, 0, 1, 0}, + {&__pyx_kp_u_11, __pyx_k_11, sizeof(__pyx_k_11), 0, 1, 0, 0}, + {&__pyx_kp_u_12, __pyx_k_12, sizeof(__pyx_k_12), 0, 1, 0, 0}, + {&__pyx_kp_u_15, __pyx_k_15, sizeof(__pyx_k_15), 0, 1, 0, 0}, + {&__pyx_n_s_19, __pyx_k_19, sizeof(__pyx_k_19), 0, 0, 1, 1}, + {&__pyx_kp_s_20, __pyx_k_20, sizeof(__pyx_k_20), 0, 0, 1, 0}, + {&__pyx_n_s_21, __pyx_k_21, sizeof(__pyx_k_21), 0, 0, 1, 1}, + {&__pyx_n_s_24, __pyx_k_24, sizeof(__pyx_k_24), 0, 0, 1, 1}, + {&__pyx_n_s_27, __pyx_k_27, sizeof(__pyx_k_27), 0, 0, 1, 1}, + {&__pyx_kp_u_5, __pyx_k_5, sizeof(__pyx_k_5), 0, 1, 0, 0}, + {&__pyx_kp_u_7, __pyx_k_7, sizeof(__pyx_k_7), 0, 1, 0, 0}, + {&__pyx_kp_u_9, __pyx_k_9, sizeof(__pyx_k_9), 0, 1, 0, 0}, + {&__pyx_n_s__DTYPE, __pyx_k__DTYPE, sizeof(__pyx_k__DTYPE), 0, 0, 1, 1}, + {&__pyx_n_s__RuntimeError, __pyx_k__RuntimeError, sizeof(__pyx_k__RuntimeError), 0, 0, 1, 1}, + {&__pyx_n_s__ValueError, __pyx_k__ValueError, sizeof(__pyx_k__ValueError), 0, 0, 1, 1}, + {&__pyx_n_s____main__, __pyx_k____main__, sizeof(__pyx_k____main__), 0, 0, 1, 1}, + {&__pyx_n_s____test__, __pyx_k____test__, sizeof(__pyx_k____test__), 0, 0, 1, 1}, + {&__pyx_n_s__bot, __pyx_k__bot, sizeof(__pyx_k__bot), 0, 0, 1, 1}, + {&__pyx_n_s__conv, __pyx_k__conv, sizeof(__pyx_k__conv), 0, 0, 1, 1}, + {&__pyx_n_s__dtype, __pyx_k__dtype, sizeof(__pyx_k__dtype), 0, 0, 1, 1}, + {&__pyx_n_s__empty, __pyx_k__empty, sizeof(__pyx_k__empty), 0, 0, 1, 1}, + {&__pyx_n_s__f, __pyx_k__f, sizeof(__pyx_k__f), 0, 0, 1, 1}, + {&__pyx_n_s__fixed, __pyx_k__fixed, sizeof(__pyx_k__fixed), 0, 0, 1, 1}, + {&__pyx_n_s__float, __pyx_k__float, sizeof(__pyx_k__float), 0, 0, 1, 1}, + {&__pyx_n_s__g, __pyx_k__g, sizeof(__pyx_k__g), 0, 0, 1, 1}, + {&__pyx_n_s__i, __pyx_k__i, sizeof(__pyx_k__i), 0, 0, 1, 1}, + {&__pyx_n_s__ii, __pyx_k__ii, sizeof(__pyx_k__ii), 0, 0, 1, 1}, + {&__pyx_n_s__iii, __pyx_k__iii, sizeof(__pyx_k__iii), 0, 0, 1, 1}, + {&__pyx_n_s__iimax, __pyx_k__iimax, sizeof(__pyx_k__iimax), 0, 0, 1, 1}, + {&__pyx_n_s__iimin, __pyx_k__iimin, sizeof(__pyx_k__iimin), 0, 0, 1, 1}, + {&__pyx_n_s__j, __pyx_k__j, sizeof(__pyx_k__j), 0, 0, 1, 1}, + {&__pyx_n_s__jj, __pyx_k__jj, sizeof(__pyx_k__jj), 0, 0, 1, 1}, + {&__pyx_n_s__jjj, __pyx_k__jjj, sizeof(__pyx_k__jjj), 0, 0, 1, 1}, + {&__pyx_n_s__jjmax, __pyx_k__jjmax, sizeof(__pyx_k__jjmax), 0, 0, 1, 1}, + {&__pyx_n_s__jjmin, __pyx_k__jjmin, sizeof(__pyx_k__jjmin), 0, 0, 1, 1}, + {&__pyx_n_s__k, __pyx_k__k, sizeof(__pyx_k__k), 0, 0, 1, 1}, + {&__pyx_n_s__ker, __pyx_k__ker, sizeof(__pyx_k__ker), 0, 0, 1, 1}, + {&__pyx_n_s__kk, __pyx_k__kk, sizeof(__pyx_k__kk), 0, 0, 1, 1}, + {&__pyx_n_s__kkk, __pyx_k__kkk, sizeof(__pyx_k__kkk), 0, 0, 1, 1}, + {&__pyx_n_s__kkmax, __pyx_k__kkmax, sizeof(__pyx_k__kkmax), 0, 0, 1, 1}, + {&__pyx_n_s__kkmin, __pyx_k__kkmin, sizeof(__pyx_k__kkmin), 0, 0, 1, 1}, + {&__pyx_n_s__nkx, __pyx_k__nkx, sizeof(__pyx_k__nkx), 0, 0, 1, 1}, + {&__pyx_n_s__nky, __pyx_k__nky, sizeof(__pyx_k__nky), 0, 0, 1, 1}, + {&__pyx_n_s__nkz, __pyx_k__nkz, sizeof(__pyx_k__nkz), 0, 0, 1, 1}, + {&__pyx_n_s__np, __pyx_k__np, sizeof(__pyx_k__np), 0, 0, 1, 1}, + {&__pyx_n_s__numpy, __pyx_k__numpy, sizeof(__pyx_k__numpy), 0, 0, 1, 1}, + {&__pyx_n_s__nx, __pyx_k__nx, sizeof(__pyx_k__nx), 0, 0, 1, 1}, + {&__pyx_n_s__ny, __pyx_k__ny, sizeof(__pyx_k__ny), 0, 0, 1, 1}, + {&__pyx_n_s__nz, __pyx_k__nz, sizeof(__pyx_k__nz), 0, 0, 1, 1}, + {&__pyx_n_s__range, __pyx_k__range, sizeof(__pyx_k__range), 0, 0, 1, 1}, + {&__pyx_n_s__top, __pyx_k__top, sizeof(__pyx_k__top), 0, 0, 1, 1}, + {&__pyx_n_s__val, __pyx_k__val, sizeof(__pyx_k__val), 0, 0, 1, 1}, + {&__pyx_n_s__wkx, __pyx_k__wkx, sizeof(__pyx_k__wkx), 0, 0, 1, 1}, + {&__pyx_n_s__wky, __pyx_k__wky, sizeof(__pyx_k__wky), 0, 0, 1, 1}, + {&__pyx_n_s__wkz, __pyx_k__wkz, sizeof(__pyx_k__wkz), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_ValueError = __Pyx_GetName(__pyx_b, __pyx_n_s__ValueError); if (!__pyx_builtin_ValueError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_builtin_range = __Pyx_GetName(__pyx_b, __pyx_n_s__range); if (!__pyx_builtin_range) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_builtin_RuntimeError = __Pyx_GetName(__pyx_b, __pyx_n_s__RuntimeError); if (!__pyx_builtin_RuntimeError) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "astropy/convolution/boundary_extend.pyx":23 + * + * if g.shape[0] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_2 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_2); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_2)); + + /* "astropy/convolution/boundary_extend.pyx":91 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_3 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 91; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_3); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_3)); + + /* "astropy/convolution/boundary_extend.pyx":174 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_4 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_4); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_4)); + + /* "numpy.pxd":215 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_k_tuple_6 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_5)); if (unlikely(!__pyx_k_tuple_6)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_6); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_6)); + + /* "numpy.pxd":219 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_k_tuple_8 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_7)); if (unlikely(!__pyx_k_tuple_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_8); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_8)); + + /* "numpy.pxd":257 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_k_tuple_10 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_9)); if (unlikely(!__pyx_k_tuple_10)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_10); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_10)); + + /* "numpy.pxd":799 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_k_tuple_13 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_12)); if (unlikely(!__pyx_k_tuple_13)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_13); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_13)); + + /* "numpy.pxd":803 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_k_tuple_14 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_9)); if (unlikely(!__pyx_k_tuple_14)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_14); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_14)); + + /* "numpy.pxd":823 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_k_tuple_16 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_15)); if (unlikely(!__pyx_k_tuple_16)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_16); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_16)); + + /* "astropy/convolution/boundary_extend.pyx":19 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_extend(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g): + * + */ + __pyx_k_tuple_17 = PyTuple_Pack(16, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__iii), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_17)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_17); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_17)); + __pyx_k_codeobj_18 = (PyObject*)__Pyx_PyCode_New(2, 0, 16, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_19, 19, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_18)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + + /* "astropy/convolution/boundary_extend.pyx":87 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_extend(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g): + * + */ + __pyx_k_tuple_22 = PyTuple_Pack(24, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__ny), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__nky), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__wky), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__j), ((PyObject *)__pyx_n_s__iii), ((PyObject *)__pyx_n_s__jjj), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__jj), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__jjmin), ((PyObject *)__pyx_n_s__jjmax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_22)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 87; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_22); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_22)); + __pyx_k_codeobj_23 = (PyObject*)__Pyx_PyCode_New(2, 0, 24, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_24, 87, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_23)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 87; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + + /* "astropy/convolution/boundary_extend.pyx":170 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_extend(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g): + * + */ + __pyx_k_tuple_25 = PyTuple_Pack(32, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__ny), ((PyObject *)__pyx_n_s__nz), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__nky), ((PyObject *)__pyx_n_s__nkz), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__wky), ((PyObject *)__pyx_n_s__wkz), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__j), ((PyObject *)__pyx_n_s__k), ((PyObject *)__pyx_n_s__iii), ((PyObject *)__pyx_n_s__jjj), ((PyObject *)__pyx_n_s__kkk), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__jj), ((PyObject *)__pyx_n_s__kk), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__jjmin), ((PyObject *)__pyx_n_s__jjmax), ((PyObject *)__pyx_n_s__kkmin), ((PyObject *)__pyx_n_s__kkmax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_25)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 170; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_25); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_25)); + __pyx_k_codeobj_26 = (PyObject*)__Pyx_PyCode_New(2, 0, 32, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_27, 170, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_26)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 170; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + __pyx_int_15 = PyInt_FromLong(15); if (unlikely(!__pyx_int_15)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + return 0; + __pyx_L1_error:; + return -1; +} + +#if PY_MAJOR_VERSION < 3 +PyMODINIT_FUNC initboundary_extend(void); /*proto*/ +PyMODINIT_FUNC initboundary_extend(void) +#else +PyMODINIT_FUNC PyInit_boundary_extend(void); /*proto*/ +PyMODINIT_FUNC PyInit_boundary_extend(void) +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_REFNANNY + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); + if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); + } + #endif + __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit_boundary_extend(void)", 0); + if ( __Pyx_check_binary_version() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #ifdef __Pyx_CyFunction_USED + if (__Pyx_CyFunction_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4(__Pyx_NAMESTR("boundary_extend"), __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (!PyDict_GetItemString(modules, "astropy.convolution.boundary_extend")) { + if (unlikely(PyDict_SetItemString(modules, "astropy.convolution.boundary_extend", __pyx_m) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + } + #endif + __pyx_b = PyImport_AddModule(__Pyx_NAMESTR(__Pyx_BUILTIN_MODULE_NAME)); if (unlikely(!__pyx_b)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (__Pyx_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + /*--- Initialize various global constants etc. ---*/ + if (unlikely(__Pyx_InitGlobals() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (__pyx_module_is_main_astropy__convolution__boundary_extend) { + if (__Pyx_SetAttrString(__pyx_m, "__name__", __pyx_n_s____main__) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + } + /*--- Builtin init code ---*/ + if (unlikely(__Pyx_InitCachedBuiltins() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Constants init code ---*/ + if (unlikely(__Pyx_InitCachedConstants() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Global init code ---*/ + /*--- Variable export code ---*/ + /*--- Function export code ---*/ + /*--- Type init code ---*/ + /*--- Type import code ---*/ + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", + #if CYTHON_COMPILING_IN_PYPY + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 9; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_dtype = __Pyx_ImportType("numpy", "dtype", sizeof(PyArray_Descr), 0); if (unlikely(!__pyx_ptype_5numpy_dtype)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 155; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_flatiter = __Pyx_ImportType("numpy", "flatiter", sizeof(PyArrayIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_flatiter)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 165; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_broadcast = __Pyx_ImportType("numpy", "broadcast", sizeof(PyArrayMultiIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_broadcast)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 169; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_ndarray = __Pyx_ImportType("numpy", "ndarray", sizeof(PyArrayObject), 0); if (unlikely(!__pyx_ptype_5numpy_ndarray)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 178; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_ufunc = __Pyx_ImportType("numpy", "ufunc", sizeof(PyUFuncObject), 0); if (unlikely(!__pyx_ptype_5numpy_ufunc)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 861; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Variable import code ---*/ + /*--- Function import code ---*/ + /*--- Execution code ---*/ + + /* "astropy/convolution/boundary_extend.pyx":3 + * # Licensed under a 3-clause BSD style license - see LICENSE.rst + * from __future__ import division + * import numpy as np # <<<<<<<<<<<<<< + * cimport numpy as np + * + */ + __pyx_t_1 = __Pyx_Import(((PyObject *)__pyx_n_s__numpy), 0, -1); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s__np, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "astropy/convolution/boundary_extend.pyx":6 + * cimport numpy as np + * + * DTYPE = np.float # <<<<<<<<<<<<<< + * ctypedef np.float_t DTYPE_t + * + */ + __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__float); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (PyObject_SetAttr(__pyx_m, __pyx_n_s__DTYPE, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_extend.pyx":19 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_extend(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g): + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_15boundary_extend_1convolve1d_boundary_extend, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_19, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_extend.pyx":87 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_extend(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g): + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_15boundary_extend_3convolve2d_boundary_extend, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 87; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_24, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 87; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_extend.pyx":170 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_extend(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g): + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_15boundary_extend_5convolve3d_boundary_extend, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 170; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_27, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 170; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_extend.pyx":1 + * # Licensed under a 3-clause BSD style license - see LICENSE.rst # <<<<<<<<<<<<<< + * from __future__ import division + * import numpy as np + */ + __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_2)); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s____test__, ((PyObject *)__pyx_t_2)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + + /* "numpy.pxd":975 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + if (__pyx_m) { + __Pyx_AddTraceback("init astropy.convolution.boundary_extend", __pyx_clineno, __pyx_lineno, __pyx_filename); + Py_DECREF(__pyx_m); __pyx_m = 0; + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init astropy.convolution.boundary_extend"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if PY_MAJOR_VERSION < 3 + return; + #else + return __pyx_m; + #endif +} + +/* Runtime support code */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule((char *)modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif /* CYTHON_REFNANNY */ + +static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name) { + PyObject *result; + result = PyObject_GetAttr(dict, name); + if (!result) { + if (dict != __pyx_b) { + PyErr_Clear(); + result = PyObject_GetAttr(__pyx_b, name); + } + if (!result) { + PyErr_SetObject(PyExc_NameError, name); + } + } + return result; +} + +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%s() takes %s %" CYTHON_FORMAT_SSIZE_T "d positional argument%s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%s() got an unexpected keyword argument '%s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact) +{ + if (!type) { + PyErr_Format(PyExc_SystemError, "Missing type object"); + return 0; + } + if (none_allowed && obj == Py_None) return 1; + else if (exact) { + if (Py_TYPE(obj) == type) return 1; + } + else { + if (PyObject_TypeCheck(obj, type)) return 1; + } + PyErr_Format(PyExc_TypeError, + "Argument '%s' has incorrect type (expected %s, got %s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); + return 0; +} + +static CYTHON_INLINE int __Pyx_IsLittleEndian(void) { + unsigned int n = 1; + return *(unsigned char*)(&n) != 0; +} +static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx, + __Pyx_BufFmt_StackElem* stack, + __Pyx_TypeInfo* type) { + stack[0].field = &ctx->root; + stack[0].parent_offset = 0; + ctx->root.type = type; + ctx->root.name = "buffer dtype"; + ctx->root.offset = 0; + ctx->head = stack; + ctx->head->field = &ctx->root; + ctx->fmt_offset = 0; + ctx->head->parent_offset = 0; + ctx->new_packmode = '@'; + ctx->enc_packmode = '@'; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->is_complex = 0; + ctx->is_valid_array = 0; + ctx->struct_alignment = 0; + while (type->typegroup == 'S') { + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = 0; + type = type->fields->type; + } +} +static int __Pyx_BufFmt_ParseNumber(const char** ts) { + int count; + const char* t = *ts; + if (*t < '0' || *t > '9') { + return -1; + } else { + count = *t++ - '0'; + while (*t >= '0' && *t < '9') { + count *= 10; + count += *t++ - '0'; + } + } + *ts = t; + return count; +} +static int __Pyx_BufFmt_ExpectNumber(const char **ts) { + int number = __Pyx_BufFmt_ParseNumber(ts); + if (number == -1) /* First char was not a digit */ + PyErr_Format(PyExc_ValueError,\ + "Does not understand character buffer dtype format string ('%c')", **ts); + return number; +} +static void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) { + PyErr_Format(PyExc_ValueError, + "Unexpected format string character: '%c'", ch); +} +static const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) { + switch (ch) { + case 'c': return "'char'"; + case 'b': return "'signed char'"; + case 'B': return "'unsigned char'"; + case 'h': return "'short'"; + case 'H': return "'unsigned short'"; + case 'i': return "'int'"; + case 'I': return "'unsigned int'"; + case 'l': return "'long'"; + case 'L': return "'unsigned long'"; + case 'q': return "'long long'"; + case 'Q': return "'unsigned long long'"; + case 'f': return (is_complex ? "'complex float'" : "'float'"); + case 'd': return (is_complex ? "'complex double'" : "'double'"); + case 'g': return (is_complex ? "'complex long double'" : "'long double'"); + case 'T': return "a struct"; + case 'O': return "Python object"; + case 'P': return "a pointer"; + case 's': case 'p': return "a string"; + case 0: return "end"; + default: return "unparseable format string"; + } +} +static size_t __Pyx_BufFmt_TypeCharToStandardSize(char ch, int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return 2; + case 'i': case 'I': case 'l': case 'L': return 4; + case 'q': case 'Q': return 8; + case 'f': return (is_complex ? 8 : 4); + case 'd': return (is_complex ? 16 : 8); + case 'g': { + PyErr_SetString(PyExc_ValueError, "Python does not define a standard format string size for long double ('g').."); + return 0; + } + case 'O': case 'P': return sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +static size_t __Pyx_BufFmt_TypeCharToNativeSize(char ch, int is_complex) { + switch (ch) { + case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(short); + case 'i': case 'I': return sizeof(int); + case 'l': case 'L': return sizeof(long); + #ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(PY_LONG_LONG); + #endif + case 'f': return sizeof(float) * (is_complex ? 2 : 1); + case 'd': return sizeof(double) * (is_complex ? 2 : 1); + case 'g': return sizeof(long double) * (is_complex ? 2 : 1); + case 'O': case 'P': return sizeof(void*); + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} +typedef struct { char c; short x; } __Pyx_st_short; +typedef struct { char c; int x; } __Pyx_st_int; +typedef struct { char c; long x; } __Pyx_st_long; +typedef struct { char c; float x; } __Pyx_st_float; +typedef struct { char c; double x; } __Pyx_st_double; +typedef struct { char c; long double x; } __Pyx_st_longdouble; +typedef struct { char c; void *x; } __Pyx_st_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { char c; PY_LONG_LONG x; } __Pyx_st_longlong; +#endif +static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_st_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_st_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_st_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_st_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_st_float) - sizeof(float); + case 'd': return sizeof(__Pyx_st_double) - sizeof(double); + case 'g': return sizeof(__Pyx_st_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_st_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +/* These are for computing the padding at the end of the struct to align + on the first member of the struct. This will probably the same as above, + but we don't have any guarantees. + */ +typedef struct { short x; char c; } __Pyx_pad_short; +typedef struct { int x; char c; } __Pyx_pad_int; +typedef struct { long x; char c; } __Pyx_pad_long; +typedef struct { float x; char c; } __Pyx_pad_float; +typedef struct { double x; char c; } __Pyx_pad_double; +typedef struct { long double x; char c; } __Pyx_pad_longdouble; +typedef struct { void *x; char c; } __Pyx_pad_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { PY_LONG_LONG x; char c; } __Pyx_pad_longlong; +#endif +static size_t __Pyx_BufFmt_TypeCharToPadding(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_pad_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_pad_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_pad_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_pad_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_pad_float) - sizeof(float); + case 'd': return sizeof(__Pyx_pad_double) - sizeof(double); + case 'g': return sizeof(__Pyx_pad_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_pad_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +static char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) { + switch (ch) { + case 'c': + return 'H'; + case 'b': case 'h': case 'i': + case 'l': case 'q': case 's': case 'p': + return 'I'; + case 'B': case 'H': case 'I': case 'L': case 'Q': + return 'U'; + case 'f': case 'd': case 'g': + return (is_complex ? 'C' : 'R'); + case 'O': + return 'O'; + case 'P': + return 'P'; + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} +static void __Pyx_BufFmt_RaiseExpected(__Pyx_BufFmt_Context* ctx) { + if (ctx->head == NULL || ctx->head->field == &ctx->root) { + const char* expected; + const char* quote; + if (ctx->head == NULL) { + expected = "end"; + quote = ""; + } else { + expected = ctx->head->field->type->name; + quote = "'"; + } + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected %s%s%s but got %s", + quote, expected, quote, + __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex)); + } else { + __Pyx_StructField* field = ctx->head->field; + __Pyx_StructField* parent = (ctx->head - 1)->field; + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected '%s' but got %s in '%s.%s'", + field->type->name, __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex), + parent->type->name, field->name); + } +} +static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) { + char group; + size_t size, offset, arraysize = 1; + if (ctx->enc_type == 0) return 0; + if (ctx->head->field->type->arraysize[0]) { + int i, ndim = 0; + if (ctx->enc_type == 's' || ctx->enc_type == 'p') { + ctx->is_valid_array = ctx->head->field->type->ndim == 1; + ndim = 1; + if (ctx->enc_count != ctx->head->field->type->arraysize[0]) { + PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %zu", + ctx->head->field->type->arraysize[0], ctx->enc_count); + return -1; + } + } + if (!ctx->is_valid_array) { + PyErr_Format(PyExc_ValueError, "Expected %d dimensions, got %d", + ctx->head->field->type->ndim, ndim); + return -1; + } + for (i = 0; i < ctx->head->field->type->ndim; i++) { + arraysize *= ctx->head->field->type->arraysize[i]; + } + ctx->is_valid_array = 0; + ctx->enc_count = 1; + } + group = __Pyx_BufFmt_TypeCharToGroup(ctx->enc_type, ctx->is_complex); + do { + __Pyx_StructField* field = ctx->head->field; + __Pyx_TypeInfo* type = field->type; + if (ctx->enc_packmode == '@' || ctx->enc_packmode == '^') { + size = __Pyx_BufFmt_TypeCharToNativeSize(ctx->enc_type, ctx->is_complex); + } else { + size = __Pyx_BufFmt_TypeCharToStandardSize(ctx->enc_type, ctx->is_complex); + } + if (ctx->enc_packmode == '@') { + size_t align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex); + size_t align_mod_offset; + if (align_at == 0) return -1; + align_mod_offset = ctx->fmt_offset % align_at; + if (align_mod_offset > 0) ctx->fmt_offset += align_at - align_mod_offset; + if (ctx->struct_alignment == 0) + ctx->struct_alignment = __Pyx_BufFmt_TypeCharToPadding(ctx->enc_type, + ctx->is_complex); + } + if (type->size != size || type->typegroup != group) { + if (type->typegroup == 'C' && type->fields != NULL) { + size_t parent_offset = ctx->head->parent_offset + field->offset; + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = parent_offset; + continue; + } + if ((type->typegroup == 'H' || group == 'H') && type->size == size) { + } else { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + } + offset = ctx->head->parent_offset + field->offset; + if (ctx->fmt_offset != offset) { + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch; next field is at offset %" CYTHON_FORMAT_SSIZE_T "d but %" CYTHON_FORMAT_SSIZE_T "d expected", + (Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset); + return -1; + } + ctx->fmt_offset += size; + if (arraysize) + ctx->fmt_offset += (arraysize - 1) * size; + --ctx->enc_count; /* Consume from buffer string */ + while (1) { + if (field == &ctx->root) { + ctx->head = NULL; + if (ctx->enc_count != 0) { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + break; /* breaks both loops as ctx->enc_count == 0 */ + } + ctx->head->field = ++field; + if (field->type == NULL) { + --ctx->head; + field = ctx->head->field; + continue; + } else if (field->type->typegroup == 'S') { + size_t parent_offset = ctx->head->parent_offset + field->offset; + if (field->type->fields->type == NULL) continue; /* empty struct */ + field = field->type->fields; + ++ctx->head; + ctx->head->field = field; + ctx->head->parent_offset = parent_offset; + break; + } else { + break; + } + } + } while (ctx->enc_count); + ctx->enc_type = 0; + ctx->is_complex = 0; + return 0; +} +static CYTHON_INLINE PyObject * +__pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp) +{ + const char *ts = *tsp; + int i = 0, number; + int ndim = ctx->head->field->type->ndim; +; + ++ts; + if (ctx->new_count != 1) { + PyErr_SetString(PyExc_ValueError, + "Cannot handle repeated arrays in format string"); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + while (*ts && *ts != ')') { + if (isspace(*ts)) + continue; + number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + if (i < ndim && (size_t) number != ctx->head->field->type->arraysize[i]) + return PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %d", + ctx->head->field->type->arraysize[i], number); + if (*ts != ',' && *ts != ')') + return PyErr_Format(PyExc_ValueError, + "Expected a comma in format string, got '%c'", *ts); + if (*ts == ',') ts++; + i++; + } + if (i != ndim) + return PyErr_Format(PyExc_ValueError, "Expected %d dimension(s), got %d", + ctx->head->field->type->ndim, i); + if (!*ts) { + PyErr_SetString(PyExc_ValueError, + "Unexpected end of format string, expected ')'"); + return NULL; + } + ctx->is_valid_array = 1; + ctx->new_count = 1; + *tsp = ++ts; + return Py_None; +} +static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts) { + int got_Z = 0; + while (1) { + switch(*ts) { + case 0: + if (ctx->enc_type != 0 && ctx->head == NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + if (ctx->head != NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + return ts; + case ' ': + case 10: + case 13: + ++ts; + break; + case '<': + if (!__Pyx_IsLittleEndian()) { + PyErr_SetString(PyExc_ValueError, "Little-endian buffer not supported on big-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '>': + case '!': + if (__Pyx_IsLittleEndian()) { + PyErr_SetString(PyExc_ValueError, "Big-endian buffer not supported on little-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '=': + case '@': + case '^': + ctx->new_packmode = *ts++; + break; + case 'T': /* substruct */ + { + const char* ts_after_sub; + size_t i, struct_count = ctx->new_count; + size_t struct_alignment = ctx->struct_alignment; + ctx->new_count = 1; + ++ts; + if (*ts != '{') { + PyErr_SetString(PyExc_ValueError, "Buffer acquisition: Expected '{' after 'T'"); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; /* Erase processed last struct element */ + ctx->enc_count = 0; + ctx->struct_alignment = 0; + ++ts; + ts_after_sub = ts; + for (i = 0; i != struct_count; ++i) { + ts_after_sub = __Pyx_BufFmt_CheckString(ctx, ts); + if (!ts_after_sub) return NULL; + } + ts = ts_after_sub; + if (struct_alignment) ctx->struct_alignment = struct_alignment; + } + break; + case '}': /* end of substruct; either repeat or move on */ + { + size_t alignment = ctx->struct_alignment; + ++ts; + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; /* Erase processed last struct element */ + if (alignment && ctx->fmt_offset % alignment) { + ctx->fmt_offset += alignment - (ctx->fmt_offset % alignment); + } + } + return ts; + case 'x': + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->fmt_offset += ctx->new_count; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->enc_packmode = ctx->new_packmode; + ++ts; + break; + case 'Z': + got_Z = 1; + ++ts; + if (*ts != 'f' && *ts != 'd' && *ts != 'g') { + __Pyx_BufFmt_RaiseUnexpectedChar('Z'); + return NULL; + } /* fall through */ + case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I': + case 'l': case 'L': case 'q': case 'Q': + case 'f': case 'd': case 'g': + case 'O': case 's': case 'p': + if (ctx->enc_type == *ts && got_Z == ctx->is_complex && + ctx->enc_packmode == ctx->new_packmode) { + ctx->enc_count += ctx->new_count; + } else { + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_count = ctx->new_count; + ctx->enc_packmode = ctx->new_packmode; + ctx->enc_type = *ts; + ctx->is_complex = got_Z; + } + ++ts; + ctx->new_count = 1; + got_Z = 0; + break; + case ':': + ++ts; + while(*ts != ':') ++ts; + ++ts; + break; + case '(': + if (!__pyx_buffmt_parse_array(ctx, &ts)) return NULL; + break; + default: + { + int number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + ctx->new_count = (size_t)number; + } + } + } +} +static CYTHON_INLINE void __Pyx_ZeroBuffer(Py_buffer* buf) { + buf->buf = NULL; + buf->obj = NULL; + buf->strides = __Pyx_zeros; + buf->shape = __Pyx_zeros; + buf->suboffsets = __Pyx_minusones; +} +static CYTHON_INLINE int __Pyx_GetBufferAndValidate( + Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, + int nd, int cast, __Pyx_BufFmt_StackElem* stack) +{ + if (obj == Py_None || obj == NULL) { + __Pyx_ZeroBuffer(buf); + return 0; + } + buf->buf = NULL; + if (__Pyx_GetBuffer(obj, buf, flags) == -1) goto fail; + if (buf->ndim != nd) { + PyErr_Format(PyExc_ValueError, + "Buffer has wrong number of dimensions (expected %d, got %d)", + nd, buf->ndim); + goto fail; + } + if (!cast) { + __Pyx_BufFmt_Context ctx; + __Pyx_BufFmt_Init(&ctx, stack, dtype); + if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail; + } + if ((unsigned)buf->itemsize != dtype->size) { + PyErr_Format(PyExc_ValueError, + "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "d byte%s) does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "d byte%s)", + buf->itemsize, (buf->itemsize > 1) ? "s" : "", + dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : ""); + goto fail; + } + if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones; + return 0; +fail:; + __Pyx_ZeroBuffer(buf); + return -1; +} +static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) { + if (info->buf == NULL) return; + if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL; + __Pyx_ReleaseBuffer(info); +} + +static CYTHON_INLINE long __Pyx_mod_long(long a, long b) { + long r = a % b; + r += ((r != 0) & ((r ^ b) < 0)) * b; + return r; +} + +static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb) { +#if CYTHON_COMPILING_IN_CPYTHON + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyThreadState *tstate = PyThreadState_GET(); + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_Restore(type, value, tb); +#endif +} +static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb) { +#if CYTHON_COMPILING_IN_CPYTHON + PyThreadState *tstate = PyThreadState_GET(); + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(type, value, tb); +#endif +} + +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + #if PY_VERSION_HEX < 0x02050000 + if (PyClass_Check(type)) { + #else + if (PyType_Check(type)) { + #endif +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + #if PY_VERSION_HEX < 0x02050000 + if (PyInstance_Check(type)) { + type = (PyObject*) ((PyInstanceObject*)type)->in_class; + Py_INCREF(type); + } + else { + type = 0; + PyErr_SetString(PyExc_TypeError, + "raise: exception must be an old-style class or instance"); + goto raise_error; + } + #else + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + #endif + } + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else /* Python 3+ */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } + else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyEval_CallObject(type, args); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause && cause != Py_None) { + PyObject *fixed_cause; + if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } + else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } + else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { + PyThreadState *tstate = PyThreadState_GET(); + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +static CYTHON_INLINE long __Pyx_div_long(long a, long b) { + long q = a / b; + long r = a - q*b; + q -= ((r != 0) & ((r ^ b) < 0)); + return q; +} + +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_Format(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(PyObject_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%s to unpack", + index, (index == 1) ? "" : "s"); +} + +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_COMPILING_IN_CPYTHON + PyThreadState *tstate = PyThreadState_GET(); + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(exc_type == PyExc_StopIteration) || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +#if PY_MAJOR_VERSION < 3 +static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) { + CYTHON_UNUSED PyObject *getbuffer_cobj; + #if PY_VERSION_HEX >= 0x02060000 + if (PyObject_CheckBuffer(obj)) return PyObject_GetBuffer(obj, view, flags); + #endif + if (PyObject_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) return __pyx_pw_5numpy_7ndarray_1__getbuffer__(obj, view, flags); + #if PY_VERSION_HEX < 0x02060000 + if (obj->ob_type->tp_dict && + (getbuffer_cobj = PyMapping_GetItemString(obj->ob_type->tp_dict, + "__pyx_getbuffer"))) { + getbufferproc func; + #if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION == 0) + func = (getbufferproc) PyCapsule_GetPointer(getbuffer_cobj, "getbuffer(obj, view, flags)"); + #else + func = (getbufferproc) PyCObject_AsVoidPtr(getbuffer_cobj); + #endif + Py_DECREF(getbuffer_cobj); + if (!func) + goto fail; + return func(obj, view, flags); + } else { + PyErr_Clear(); + } + #endif + PyErr_Format(PyExc_TypeError, "'%100s' does not have the buffer interface", Py_TYPE(obj)->tp_name); +#if PY_VERSION_HEX < 0x02060000 +fail: +#endif + return -1; +} +static void __Pyx_ReleaseBuffer(Py_buffer *view) { + PyObject *obj = view->obj; + CYTHON_UNUSED PyObject *releasebuffer_cobj; + if (!obj) return; + #if PY_VERSION_HEX >= 0x02060000 + if (PyObject_CheckBuffer(obj)) { + PyBuffer_Release(view); + return; + } + #endif + if (PyObject_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) { __pyx_pw_5numpy_7ndarray_3__releasebuffer__(obj, view); return; } + #if PY_VERSION_HEX < 0x02060000 + if (obj->ob_type->tp_dict && + (releasebuffer_cobj = PyMapping_GetItemString(obj->ob_type->tp_dict, + "__pyx_releasebuffer"))) { + releasebufferproc func; + #if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION == 0) + func = (releasebufferproc) PyCapsule_GetPointer(releasebuffer_cobj, "releasebuffer(obj, view)"); + #else + func = (releasebufferproc) PyCObject_AsVoidPtr(releasebuffer_cobj); + #endif + Py_DECREF(releasebuffer_cobj); + if (!func) + goto fail; + func(obj, view); + return; + } else { + PyErr_Clear(); + } + #endif + goto nofail; +#if PY_VERSION_HEX < 0x02060000 +fail: +#endif + PyErr_WriteUnraisable(obj); +nofail: + Py_DECREF(obj); + view->obj = NULL; +} +#endif /* PY_MAJOR_VERSION < 3 */ + + + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_import = 0; + py_import = __Pyx_GetAttrString(__pyx_b, "__import__"); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + #if PY_VERSION_HEX >= 0x02050000 + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(1); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + #endif + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; /* try absolute import on failure */ + } + #endif + if (!module) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } + #else + if (level>0) { + PyErr_SetString(PyExc_RuntimeError, "Relative import is not supported for Python <=2.4."); + goto bad; + } + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, NULL); + #endif +bad: + #if PY_VERSION_HEX < 0x03030000 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return ::std::complex< float >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return x + y*(__pyx_t_float_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + __pyx_t_float_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +#if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eqf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sumf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_difff(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prodf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quotf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float denom = b.real * b.real + b.imag * b.imag; + z.real = (a.real * b.real + a.imag * b.imag) / denom; + z.imag = (a.imag * b.real - a.real * b.imag) / denom; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_negf(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zerof(__pyx_t_float_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conjf(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE float __Pyx_c_absf(__pyx_t_float_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrtf(z.real*z.real + z.imag*z.imag); + #else + return hypotf(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_powf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + float denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(a, a); + case 3: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(z, a); + case 4: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } + r = a.real; + theta = 0; + } else { + r = __Pyx_c_absf(a); + theta = atan2f(a.imag, a.real); + } + lnr = logf(r); + z_r = expf(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cosf(z_theta); + z.imag = z_r * sinf(z_theta); + return z; + } + #endif +#endif + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return ::std::complex< double >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return x + y*(__pyx_t_double_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + __pyx_t_double_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +#if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq(__pyx_t_double_complex a, __pyx_t_double_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double denom = b.real * b.real + b.imag * b.imag; + z.real = (a.real * b.real + a.imag * b.imag) / denom; + z.imag = (a.imag * b.real - a.real * b.imag) / denom; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero(__pyx_t_double_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE double __Pyx_c_abs(__pyx_t_double_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrt(z.real*z.real + z.imag*z.imag); + #else + return hypot(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + double denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(a, a); + case 3: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(z, a); + case 4: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } + r = a.real; + theta = 0; + } else { + r = __Pyx_c_abs(a); + theta = atan2(a.imag, a.real); + } + lnr = log(r); + z_r = exp(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cos(z_theta); + z.imag = z_r * sin(z_theta); + return z; + } + #endif +#endif + +static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject* x) { + const unsigned char neg_one = (unsigned char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned char" : + "value too large to convert to unsigned char"); + } + return (unsigned char)-1; + } + return (unsigned char)val; + } + return (unsigned char)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject* x) { + const unsigned short neg_one = (unsigned short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned short" : + "value too large to convert to unsigned short"); + } + return (unsigned short)-1; + } + return (unsigned short)val; + } + return (unsigned short)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject* x) { + const unsigned int neg_one = (unsigned int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned int" : + "value too large to convert to unsigned int"); + } + return (unsigned int)-1; + } + return (unsigned int)val; + } + return (unsigned int)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject* x) { + const char neg_one = (char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to char" : + "value too large to convert to char"); + } + return (char)-1; + } + return (char)val; + } + return (char)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject* x) { + const short neg_one = (short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to short" : + "value too large to convert to short"); + } + return (short)-1; + } + return (short)val; + } + return (short)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject* x) { + const int neg_one = (int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to int" : + "value too large to convert to int"); + } + return (int)-1; + } + return (int)val; + } + return (int)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject* x) { + const signed char neg_one = (signed char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed char" : + "value too large to convert to signed char"); + } + return (signed char)-1; + } + return (signed char)val; + } + return (signed char)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject* x) { + const signed short neg_one = (signed short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed short" : + "value too large to convert to signed short"); + } + return (signed short)-1; + } + return (signed short)val; + } + return (signed short)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject* x) { + const signed int neg_one = (signed int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed int" : + "value too large to convert to signed int"); + } + return (signed int)-1; + } + return (signed int)val; + } + return (signed int)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject* x) { + const int neg_one = (int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to int" : + "value too large to convert to int"); + } + return (int)-1; + } + return (int)val; + } + return (int)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject* x) { + const unsigned long neg_one = (unsigned long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned long"); + return (unsigned long)-1; + } + return (unsigned long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned long"); + return (unsigned long)-1; + } + return (unsigned long)PyLong_AsUnsignedLong(x); + } else { + return (unsigned long)PyLong_AsLong(x); + } + } else { + unsigned long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (unsigned long)-1; + val = __Pyx_PyInt_AsUnsignedLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject* x) { + const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned PY_LONG_LONG"); + return (unsigned PY_LONG_LONG)-1; + } + return (unsigned PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned PY_LONG_LONG"); + return (unsigned PY_LONG_LONG)-1; + } + return (unsigned PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (unsigned PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + unsigned PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (unsigned PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsUnsignedLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject* x) { + const long neg_one = (long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long)-1; + } + return (long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long)-1; + } + return (long)PyLong_AsUnsignedLong(x); + } else { + return (long)PyLong_AsLong(x); + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (long)-1; + val = __Pyx_PyInt_AsLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject* x) { + const PY_LONG_LONG neg_one = (PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to PY_LONG_LONG"); + return (PY_LONG_LONG)-1; + } + return (PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to PY_LONG_LONG"); + return (PY_LONG_LONG)-1; + } + return (PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject* x) { + const signed long neg_one = (signed long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed long"); + return (signed long)-1; + } + return (signed long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed long"); + return (signed long)-1; + } + return (signed long)PyLong_AsUnsignedLong(x); + } else { + return (signed long)PyLong_AsLong(x); + } + } else { + signed long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (signed long)-1; + val = __Pyx_PyInt_AsSignedLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject* x) { + const signed PY_LONG_LONG neg_one = (signed PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed PY_LONG_LONG"); + return (signed PY_LONG_LONG)-1; + } + return (signed PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed PY_LONG_LONG"); + return (signed PY_LONG_LONG)-1; + } + return (signed PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (signed PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + signed PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (signed PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsSignedLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + #if PY_VERSION_HEX < 0x02050000 + return PyErr_Warn(NULL, message); + #else + return PyErr_WarnEx(NULL, message, 1); + #endif + } + return 0; +} + +#ifndef __PYX_HAVE_RT_ImportModule +#define __PYX_HAVE_RT_ImportModule +static PyObject *__Pyx_ImportModule(const char *name) { + PyObject *py_name = 0; + PyObject *py_module = 0; + py_name = __Pyx_PyIdentifier_FromString(name); + if (!py_name) + goto bad; + py_module = PyImport_Import(py_name); + Py_DECREF(py_name); + return py_module; +bad: + Py_XDECREF(py_name); + return 0; +} +#endif + +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, + size_t size, int strict) +{ + PyObject *py_module = 0; + PyObject *result = 0; + PyObject *py_name = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + py_module = __Pyx_ImportModule(module_name); + if (!py_module) + goto bad; + py_name = __Pyx_PyIdentifier_FromString(class_name); + if (!py_name) + goto bad; + result = PyObject_GetAttr(py_module, py_name); + Py_DECREF(py_name); + py_name = 0; + Py_DECREF(py_module); + py_module = 0; + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%s.%s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if (!strict && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility", + module_name, class_name); + #if PY_VERSION_HEX < 0x02050000 + if (PyErr_Warn(NULL, warning) < 0) goto bad; + #else + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + #endif + } + else if ((size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%s.%s has the wrong size, try recompiling", + module_name, class_name); + goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(py_module); + Py_XDECREF(result); + return NULL; +} +#endif + +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = (start + end) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, /*int argcount,*/ + 0, /*int kwonlyargcount,*/ + 0, /*int nlocals,*/ + 0, /*int stacksize,*/ + 0, /*int flags,*/ + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, /*int firstlineno,*/ + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_globals = 0; + PyFrameObject *py_frame = 0; + py_code = __pyx_find_code_object(c_line ? c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? c_line : py_line, py_code); + } + py_globals = PyModule_GetDict(__pyx_m); + if (!py_globals) goto bad; + py_frame = PyFrame_New( + PyThreadState_GET(), /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + py_globals, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + py_frame->f_lineno = py_line; + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else /* Python 3+ has unicode identifiers */ + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) { + PyNumberMethods *m; + const char *name = NULL; + PyObject *res = NULL; +#if PY_VERSION_HEX < 0x03000000 + if (PyInt_Check(x) || PyLong_Check(x)) +#else + if (PyLong_Check(x)) +#endif + return Py_INCREF(x), x; + m = Py_TYPE(x)->tp_as_number; +#if PY_VERSION_HEX < 0x03000000 + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = PyNumber_Long(x); + } +#else + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Long(x); + } +#endif + if (res) { +#if PY_VERSION_HEX < 0x03000000 + if (!PyInt_Check(res) && !PyLong_Check(res)) { +#else + if (!PyLong_Check(res)) { +#endif + PyErr_Format(PyExc_TypeError, + "__%s__ returned non-%s (type %.200s)", + name, name, Py_TYPE(res)->tp_name); + Py_DECREF(res); + return NULL; + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject* x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { +#if PY_VERSION_HEX < 0x02050000 + if (ival <= LONG_MAX) + return PyInt_FromLong((long)ival); + else { + unsigned char *bytes = (unsigned char *) &ival; + int one = 1; int little = (int)*(unsigned char*)&one; + return _PyLong_FromByteArray(bytes, sizeof(size_t), little, 0); + } +#else + return PyInt_FromSize_t(ival); +#endif +} +static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject* x) { + unsigned PY_LONG_LONG val = __Pyx_PyInt_AsUnsignedLongLong(x); + if (unlikely(val == (unsigned PY_LONG_LONG)-1 && PyErr_Occurred())) { + return (size_t)-1; + } else if (unlikely(val != (unsigned PY_LONG_LONG)(size_t)val)) { + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to size_t"); + return (size_t)-1; + } + return (size_t)val; +} + + +#endif /* Py_PYTHON_H */ diff --git a/astropy/convolution/boundary_extend.pyx b/astropy/convolution/boundary_extend.pyx new file mode 100644 index 0000000..63e9032 --- /dev/null +++ b/astropy/convolution/boundary_extend.pyx @@ -0,0 +1,264 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import division +import numpy as np +cimport numpy as np + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + +cdef inline int int_max(int a, int b): return a if a >= b else b +cdef inline int int_min(int a, int b): return a if a <= b else b + +cdef extern from "numpy/npy_math.h": + bint npy_isnan(double x) + +cimport cython + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve1d_boundary_extend(np.ndarray[DTYPE_t, ndim=1] f, + np.ndarray[DTYPE_t, ndim=1] g): + + if g.shape[0] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int nkx = g.shape[0] + cdef int wkx = nkx // 2 + cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) + cdef unsigned int i, iii + cdef int ii + + cdef int iimin, iimax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + if npy_isnan(f[i]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + for ii in range(iimin, iimax): + iii = int_min(int_max(ii, 0), nx - 1) + val = f[iii] + if not npy_isnan(val): + ker = g[(wkx + ii - i)] + top += val * ker + bot += ker + + if bot != 0.: + fixed[i] = top / bot + else: + fixed[i] = f[i] + else: + fixed[i] = f[i] + + # Now run the proper convolution + for i in range(nx): + if not npy_isnan(fixed[i]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + for ii in range(iimin, iimax): + iii = int_min(int_max(ii, 0), nx - 1) + val = fixed[iii] + ker = g[(wkx + ii - i)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i] = top / bot + else: + conv[i] = fixed[i] + else: + conv[i] = fixed[i] + + return conv + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve2d_boundary_extend(np.ndarray[DTYPE_t, ndim=2] f, + np.ndarray[DTYPE_t, ndim=2] g): + + if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int ny = f.shape[1] + cdef int nkx = g.shape[0] + cdef int nky = g.shape[1] + cdef int wkx = nkx // 2 + cdef int wky = nky // 2 + cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) + cdef unsigned int i, j, iii, jjj + cdef int ii, jj + + cdef int iimin, iimax, jjmin, jjmax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + for j in range(ny): + if npy_isnan(f[i, j]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + iii = int_min(int_max(ii, 0), nx - 1) + jjj = int_min(int_max(jj, 0), ny - 1) + val = f[iii, jjj] + if not npy_isnan(val): + ker = g[(wkx + ii - i), + (wky + jj - j)] + top += val * ker + bot += ker + + if bot != 0.: + fixed[i, j] = top / bot + else: + fixed[i, j] = f[i, j] + else: + fixed[i, j] = f[i, j] + + # Now run the proper convolution + for i in range(nx): + for j in range(ny): + if not npy_isnan(fixed[i, j]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + iii = int_min(int_max(ii, 0), nx - 1) + jjj = int_min(int_max(jj, 0), ny - 1) + val = fixed[iii, jjj] + ker = g[(wkx + ii - i), + (wky + jj - j)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i, j] = top / bot + else: + conv[i, j] = fixed[i, j] + else: + conv[i, j] = fixed[i, j] + + return conv + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve3d_boundary_extend(np.ndarray[DTYPE_t, ndim=3] f, + np.ndarray[DTYPE_t, ndim=3] g): + + if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int ny = f.shape[1] + cdef int nz = f.shape[2] + cdef int nkx = g.shape[0] + cdef int nky = g.shape[1] + cdef int nkz = g.shape[2] + cdef int wkx = nkx // 2 + cdef int wky = nky // 2 + cdef int wkz = nkz // 2 + cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) + cdef unsigned int i, j, k, iii, jjj, kkk + cdef int ii, jj, kk + + cdef int iimin, iimax, jjmin, jjmax, kkmin, kkmax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + for j in range(ny): + for k in range(nz): + if npy_isnan(f[i, j, k]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + kkmin = k - wkz + kkmax = k + wkz + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + for kk in range(kkmin, kkmax): + iii = int_min(int_max(ii, 0), nx - 1) + jjj = int_min(int_max(jj, 0), ny - 1) + kkk = int_min(int_max(kk, 0), nz - 1) + val = f[iii, jjj, kkk] + if not npy_isnan(val): + ker = g[(wkx + ii - i), + (wky + jj - j), + (wkz + kk - k)] + top += val * ker + bot += ker + + if bot != 0.: + fixed[i, j, k] = top / bot + else: + fixed[i, j, k] = f[i, j, k] + else: + fixed[i, j, k] = f[i, j, k] + + # Now run the proper convolution + for i in range(nx): + for j in range(ny): + for k in range(nz): + if not npy_isnan(fixed[i, j, k]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + kkmin = k - wkz + kkmax = k + wkz + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + for kk in range(kkmin, kkmax): + iii = int_min(int_max(ii, 0), nx - 1) + jjj = int_min(int_max(jj, 0), ny - 1) + kkk = int_min(int_max(kk, 0), nz - 1) + val = fixed[iii, jjj, kkk] + ker = g[(wkx + ii - i), + (wky + jj - j), + (wkz + kk - k)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i, j, k] = top / bot + else: + conv[i, j, k] = fixed[i, j, k] + else: + conv[i, j, k] = fixed[i, j, k] + + return conv diff --git a/astropy/convolution/boundary_fill.c b/astropy/convolution/boundary_fill.c new file mode 100644 index 0000000..cc933e6 --- /dev/null +++ b/astropy/convolution/boundary_fill.c @@ -0,0 +1,8737 @@ +/* Generated by Cython 0.18 on Tue Sep 23 16:50:23 2014 */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02040000 + #error Cython requires Python 2.4+. +#else +#include /* For offsetof */ +#ifndef offsetof +#define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION +#define CYTHON_COMPILING_IN_PYPY 1 +#define CYTHON_COMPILING_IN_CPYTHON 0 +#else +#define CYTHON_COMPILING_IN_PYPY 0 +#define CYTHON_COMPILING_IN_CPYTHON 1 +#endif +#if PY_VERSION_HEX < 0x02050000 + typedef int Py_ssize_t; + #define PY_SSIZE_T_MAX INT_MAX + #define PY_SSIZE_T_MIN INT_MIN + #define PY_FORMAT_SIZE_T "" + #define CYTHON_FORMAT_SSIZE_T "" + #define PyInt_FromSsize_t(z) PyInt_FromLong(z) + #define PyInt_AsSsize_t(o) __Pyx_PyInt_AsInt(o) + #define PyNumber_Index(o) ((PyNumber_Check(o) && !PyFloat_Check(o)) ? PyNumber_Int(o) : \ + (PyErr_Format(PyExc_TypeError, \ + "expected index value, got %.200s", Py_TYPE(o)->tp_name), \ + (PyObject*)0)) + #define __Pyx_PyIndex_Check(o) (PyNumber_Check(o) && !PyFloat_Check(o) && \ + !PyComplex_Check(o)) + #define PyIndex_Check __Pyx_PyIndex_Check + #define PyErr_WarnEx(category, message, stacklevel) PyErr_Warn(category, message) + #define __PYX_BUILD_PY_SSIZE_T "i" +#else + #define __PYX_BUILD_PY_SSIZE_T "n" + #define CYTHON_FORMAT_SSIZE_T "z" + #define __Pyx_PyIndex_Check PyIndex_Check +#endif +#if PY_VERSION_HEX < 0x02060000 + #define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt) + #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) + #define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size) + #define PyVarObject_HEAD_INIT(type, size) \ + PyObject_HEAD_INIT(type) size, + #define PyType_Modified(t) + typedef struct { + void *buf; + PyObject *obj; + Py_ssize_t len; + Py_ssize_t itemsize; + int readonly; + int ndim; + char *format; + Py_ssize_t *shape; + Py_ssize_t *strides; + Py_ssize_t *suboffsets; + void *internal; + } Py_buffer; + #define PyBUF_SIMPLE 0 + #define PyBUF_WRITABLE 0x0001 + #define PyBUF_FORMAT 0x0004 + #define PyBUF_ND 0x0008 + #define PyBUF_STRIDES (0x0010 | PyBUF_ND) + #define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES) + #define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES) + #define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES) + #define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES) + #define PyBUF_RECORDS (PyBUF_STRIDES | PyBUF_FORMAT | PyBUF_WRITABLE) + #define PyBUF_FULL (PyBUF_INDIRECT | PyBUF_FORMAT | PyBUF_WRITABLE) + typedef int (*getbufferproc)(PyObject *, Py_buffer *, int); + typedef void (*releasebufferproc)(PyObject *, Py_buffer *); +#endif +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ + PyCode_New(a, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif +#if PY_MAJOR_VERSION < 3 && PY_MINOR_VERSION < 6 + #define PyUnicode_FromString(s) PyUnicode_Decode(s, strlen(s), "UTF-8", "strict") +#endif +#if PY_MAJOR_VERSION >= 3 + #define Py_TPFLAGS_CHECKTYPES 0 + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#if (PY_VERSION_HEX < 0x02060000) || (PY_MAJOR_VERSION >= 3) + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ? \ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) +#else + #define CYTHON_PEP393_ENABLED 0 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_READ(k, d, i) ((k=k), (Py_UCS4)(((Py_UNICODE*)d)[i])) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#endif +#if PY_VERSION_HEX < 0x02060000 + #define PyBytesObject PyStringObject + #define PyBytes_Type PyString_Type + #define PyBytes_Check PyString_Check + #define PyBytes_CheckExact PyString_CheckExact + #define PyBytes_FromString PyString_FromString + #define PyBytes_FromStringAndSize PyString_FromStringAndSize + #define PyBytes_FromFormat PyString_FromFormat + #define PyBytes_DecodeEscape PyString_DecodeEscape + #define PyBytes_AsString PyString_AsString + #define PyBytes_AsStringAndSize PyString_AsStringAndSize + #define PyBytes_Size PyString_Size + #define PyBytes_AS_STRING PyString_AS_STRING + #define PyBytes_GET_SIZE PyString_GET_SIZE + #define PyBytes_Repr PyString_Repr + #define PyBytes_Concat PyString_Concat + #define PyBytes_ConcatAndDel PyString_ConcatAndDel +#endif +#if PY_VERSION_HEX < 0x02060000 + #define PySet_Check(obj) PyObject_TypeCheck(obj, &PySet_Type) + #define PyFrozenSet_Check(obj) PyObject_TypeCheck(obj, &PyFrozenSet_Type) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_VERSION_HEX < 0x03020000 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if (PY_MAJOR_VERSION < 3) || (PY_VERSION_HEX >= 0x03010300) + #define __Pyx_PySequence_GetSlice(obj, a, b) PySequence_GetSlice(obj, a, b) + #define __Pyx_PySequence_SetSlice(obj, a, b, value) PySequence_SetSlice(obj, a, b, value) + #define __Pyx_PySequence_DelSlice(obj, a, b) PySequence_DelSlice(obj, a, b) +#else + #define __Pyx_PySequence_GetSlice(obj, a, b) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), (PyObject*)0) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_GetSlice(obj, a, b)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", (obj)->ob_type->tp_name), (PyObject*)0))) + #define __Pyx_PySequence_SetSlice(obj, a, b, value) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_SetSlice(obj, a, b, value)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice assignment", (obj)->ob_type->tp_name), -1))) + #define __Pyx_PySequence_DelSlice(obj, a, b) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_DelSlice(obj, a, b)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice deletion", (obj)->ob_type->tp_name), -1))) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) +#endif +#if PY_VERSION_HEX < 0x02050000 + #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),((char *)(n))) + #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),((char *)(n)),(a)) + #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),((char *)(n))) +#else + #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),(n)) + #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),(n),(a)) + #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),(n)) +#endif +#if PY_VERSION_HEX < 0x02050000 + #define __Pyx_NAMESTR(n) ((char *)(n)) + #define __Pyx_DOCSTR(n) ((char *)(n)) +#else + #define __Pyx_NAMESTR(n) (n) + #define __Pyx_DOCSTR(n) (n) +#endif + + +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#endif + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) +#define _USE_MATH_DEFINES +#endif +#include +#define __PYX_HAVE__astropy__convolution__boundary_fill +#define __PYX_HAVE_API__astropy__convolution__boundary_fill +#include "string.h" +#include "stdio.h" +#include "stdlib.h" +#include "numpy/arrayobject.h" +#include "numpy/ufuncobject.h" +#include "numpy/npy_math.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#ifdef PYREX_WITHOUT_ASSERTIONS +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +#ifndef CYTHON_INLINE + #if defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +typedef struct {PyObject **p; char *s; const long n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/ + +#define __Pyx_PyBytes_FromUString(s) PyBytes_FromString((char*)s) +#define __Pyx_PyBytes_AsUString(s) ((unsigned char*) PyBytes_AsString(s)) +#define __Pyx_Owned_Py_None(b) (Py_INCREF(Py_None), Py_None) +#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False)) +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x); +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject*); +#if CYTHON_COMPILING_IN_CPYTHON +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) + + +#ifdef __GNUC__ + /* Test for GCC > 2.95 */ + #if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) + #else /* __GNUC__ > 2 ... */ + #define likely(x) (x) + #define unlikely(x) (x) + #endif /* __GNUC__ > 2 ... */ +#else /* __GNUC__ */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ + +static PyObject *__pyx_m; +static PyObject *__pyx_b; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + +#if !defined(CYTHON_CCOMPLEX) + #if defined(__cplusplus) + #define CYTHON_CCOMPLEX 1 + #elif defined(_Complex_I) + #define CYTHON_CCOMPLEX 1 + #else + #define CYTHON_CCOMPLEX 0 + #endif +#endif +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #include + #else + #include + #endif +#endif +#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__) + #undef _Complex_I + #define _Complex_I 1.0fj +#endif + + +static const char *__pyx_f[] = { + "boundary_fill.pyx", + "numpy.pxd", + "type.pxd", +}; +#define IS_UNSIGNED(type) (((type) -1) > 0) +struct __Pyx_StructField_; +#define __PYX_BUF_FLAGS_PACKED_STRUCT (1 << 0) +typedef struct { + const char* name; /* for error messages only */ + struct __Pyx_StructField_* fields; + size_t size; /* sizeof(type) */ + size_t arraysize[8]; /* length of array in each dimension */ + int ndim; + char typegroup; /* _R_eal, _C_omplex, Signed _I_nt, _U_nsigned int, _S_truct, _P_ointer, _O_bject, c_H_ar */ + char is_unsigned; + int flags; +} __Pyx_TypeInfo; +typedef struct __Pyx_StructField_ { + __Pyx_TypeInfo* type; + const char* name; + size_t offset; +} __Pyx_StructField; +typedef struct { + __Pyx_StructField* field; + size_t parent_offset; +} __Pyx_BufFmt_StackElem; +typedef struct { + __Pyx_StructField root; + __Pyx_BufFmt_StackElem* head; + size_t fmt_offset; + size_t new_count, enc_count; + size_t struct_alignment; + int is_complex; + char enc_type; + char new_packmode; + char enc_packmode; + char is_valid_array; +} __Pyx_BufFmt_Context; + + +/* "numpy.pxd":723 + * # in Cython to enable them only on the right systems. + * + * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + */ +typedef npy_int8 __pyx_t_5numpy_int8_t; + +/* "numpy.pxd":724 + * + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t + */ +typedef npy_int16 __pyx_t_5numpy_int16_t; + +/* "numpy.pxd":725 + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< + * ctypedef npy_int64 int64_t + * #ctypedef npy_int96 int96_t + */ +typedef npy_int32 __pyx_t_5numpy_int32_t; + +/* "numpy.pxd":726 + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< + * #ctypedef npy_int96 int96_t + * #ctypedef npy_int128 int128_t + */ +typedef npy_int64 __pyx_t_5numpy_int64_t; + +/* "numpy.pxd":730 + * #ctypedef npy_int128 int128_t + * + * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + */ +typedef npy_uint8 __pyx_t_5numpy_uint8_t; + +/* "numpy.pxd":731 + * + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t + */ +typedef npy_uint16 __pyx_t_5numpy_uint16_t; + +/* "numpy.pxd":732 + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< + * ctypedef npy_uint64 uint64_t + * #ctypedef npy_uint96 uint96_t + */ +typedef npy_uint32 __pyx_t_5numpy_uint32_t; + +/* "numpy.pxd":733 + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< + * #ctypedef npy_uint96 uint96_t + * #ctypedef npy_uint128 uint128_t + */ +typedef npy_uint64 __pyx_t_5numpy_uint64_t; + +/* "numpy.pxd":737 + * #ctypedef npy_uint128 uint128_t + * + * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< + * ctypedef npy_float64 float64_t + * #ctypedef npy_float80 float80_t + */ +typedef npy_float32 __pyx_t_5numpy_float32_t; + +/* "numpy.pxd":738 + * + * ctypedef npy_float32 float32_t + * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< + * #ctypedef npy_float80 float80_t + * #ctypedef npy_float128 float128_t + */ +typedef npy_float64 __pyx_t_5numpy_float64_t; + +/* "numpy.pxd":747 + * # The int types are mapped a bit surprising -- + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t + */ +typedef npy_long __pyx_t_5numpy_int_t; + +/* "numpy.pxd":748 + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong longlong_t + * + */ +typedef npy_longlong __pyx_t_5numpy_long_t; + +/* "numpy.pxd":749 + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_ulong uint_t + */ +typedef npy_longlong __pyx_t_5numpy_longlong_t; + +/* "numpy.pxd":751 + * ctypedef npy_longlong longlong_t + * + * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t + */ +typedef npy_ulong __pyx_t_5numpy_uint_t; + +/* "numpy.pxd":752 + * + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulonglong_t + * + */ +typedef npy_ulonglong __pyx_t_5numpy_ulong_t; + +/* "numpy.pxd":753 + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_intp intp_t + */ +typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; + +/* "numpy.pxd":755 + * ctypedef npy_ulonglong ulonglong_t + * + * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< + * ctypedef npy_uintp uintp_t + * + */ +typedef npy_intp __pyx_t_5numpy_intp_t; + +/* "numpy.pxd":756 + * + * ctypedef npy_intp intp_t + * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< + * + * ctypedef npy_double float_t + */ +typedef npy_uintp __pyx_t_5numpy_uintp_t; + +/* "numpy.pxd":758 + * ctypedef npy_uintp uintp_t + * + * ctypedef npy_double float_t # <<<<<<<<<<<<<< + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t + */ +typedef npy_double __pyx_t_5numpy_float_t; + +/* "numpy.pxd":759 + * + * ctypedef npy_double float_t + * ctypedef npy_double double_t # <<<<<<<<<<<<<< + * ctypedef npy_longdouble longdouble_t + * + */ +typedef npy_double __pyx_t_5numpy_double_t; + +/* "numpy.pxd":760 + * ctypedef npy_double float_t + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cfloat cfloat_t + */ +typedef npy_longdouble __pyx_t_5numpy_longdouble_t; + +/* "astropy/convolution/boundary_fill.pyx":7 + * + * DTYPE = np.float + * ctypedef np.float_t DTYPE_t # <<<<<<<<<<<<<< + * + * cdef extern from "numpy/npy_math.h": + */ +typedef __pyx_t_5numpy_float_t __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t; +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< float > __pyx_t_float_complex; + #else + typedef float _Complex __pyx_t_float_complex; + #endif +#else + typedef struct { float real, imag; } __pyx_t_float_complex; +#endif + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< double > __pyx_t_double_complex; + #else + typedef double _Complex __pyx_t_double_complex; + #endif +#else + typedef struct { double real, imag; } __pyx_t_double_complex; +#endif + + +/*--- Type declarations ---*/ + +/* "numpy.pxd":762 + * ctypedef npy_longdouble longdouble_t + * + * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t + */ +typedef npy_cfloat __pyx_t_5numpy_cfloat_t; + +/* "numpy.pxd":763 + * + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< + * ctypedef npy_clongdouble clongdouble_t + * + */ +typedef npy_cdouble __pyx_t_5numpy_cdouble_t; + +/* "numpy.pxd":764 + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cdouble complex_t + */ +typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; + +/* "numpy.pxd":766 + * ctypedef npy_clongdouble clongdouble_t + * + * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew1(a): + */ +typedef npy_cdouble __pyx_t_5numpy_complex_t; +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); /*proto*/ + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil) \ + if (acquire_gil) { \ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure(); \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \ + PyGILState_Release(__pyx_gilstate_save); \ + } else { \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil) \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext() \ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif /* CYTHON_REFNANNY */ +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/ + +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/ + +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /*proto*/ + +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[], \ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, \ + const char* function_name); /*proto*/ + +static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact); /*proto*/ + +static CYTHON_INLINE int __Pyx_GetBufferAndValidate(Py_buffer* buf, PyObject* obj, + __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack); +static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info); + +static CYTHON_INLINE long __Pyx_mod_long(long, long); /* proto */ + +static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb); /*proto*/ +static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ + +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /*proto*/ + +static CYTHON_INLINE long __Pyx_div_long(long, long); /* proto */ + +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/ + +#define __Pyx_BufPtrStrided1d(type, buf, i0, s0) (type)((char*)buf + i0 * s0) +#define __Pyx_BufPtrStrided2d(type, buf, i0, s0, i1, s1) (type)((char*)buf + i0 * s0 + i1 * s1) +#define __Pyx_BufPtrStrided3d(type, buf, i0, s0, i1, s1, i2, s2) (type)((char*)buf + i0 * s0 + i1 * s1 + i2 * s2) +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +static CYTHON_INLINE int __Pyx_IterFinish(void); /*proto*/ + +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); /*proto*/ + +typedef struct { + Py_ssize_t shape, strides, suboffsets; +} __Pyx_Buf_DimInfo; +typedef struct { + size_t refcount; + Py_buffer pybuffer; +} __Pyx_Buffer; +typedef struct { + __Pyx_Buffer *rcbuffer; + char *data; + __Pyx_Buf_DimInfo diminfo[8]; +} __Pyx_LocalBuf_ND; + +#if PY_MAJOR_VERSION < 3 + static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags); + static void __Pyx_ReleaseBuffer(Py_buffer *view); +#else + #define __Pyx_GetBuffer PyObject_GetBuffer + #define __Pyx_ReleaseBuffer PyBuffer_Release +#endif + + +static Py_ssize_t __Pyx_zeros[] = {0, 0, 0, 0, 0, 0, 0, 0}; +static Py_ssize_t __Pyx_minusones[] = {-1, -1, -1, -1, -1, -1, -1, -1}; + +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); /*proto*/ + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #define __Pyx_CREAL(z) ((z).real()) + #define __Pyx_CIMAG(z) ((z).imag()) + #else + #define __Pyx_CREAL(z) (__real__(z)) + #define __Pyx_CIMAG(z) (__imag__(z)) + #endif +#else + #define __Pyx_CREAL(z) ((z).real) + #define __Pyx_CIMAG(z) ((z).imag) +#endif +#if defined(_WIN32) && defined(__cplusplus) && CYTHON_CCOMPLEX + #define __Pyx_SET_CREAL(z,x) ((z).real(x)) + #define __Pyx_SET_CIMAG(z,y) ((z).imag(y)) +#else + #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x) + #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y) +#endif + +static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float, float); + +#if CYTHON_CCOMPLEX + #define __Pyx_c_eqf(a, b) ((a)==(b)) + #define __Pyx_c_sumf(a, b) ((a)+(b)) + #define __Pyx_c_difff(a, b) ((a)-(b)) + #define __Pyx_c_prodf(a, b) ((a)*(b)) + #define __Pyx_c_quotf(a, b) ((a)/(b)) + #define __Pyx_c_negf(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zerof(z) ((z)==(float)0) + #define __Pyx_c_conjf(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_absf(z) (::std::abs(z)) + #define __Pyx_c_powf(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zerof(z) ((z)==0) + #define __Pyx_c_conjf(z) (conjf(z)) + #if 1 + #define __Pyx_c_absf(z) (cabsf(z)) + #define __Pyx_c_powf(a, b) (cpowf(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eqf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sumf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_difff(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prodf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quotf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_negf(__pyx_t_float_complex); + static CYTHON_INLINE int __Pyx_c_is_zerof(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conjf(__pyx_t_float_complex); + #if 1 + static CYTHON_INLINE float __Pyx_c_absf(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_powf(__pyx_t_float_complex, __pyx_t_float_complex); + #endif +#endif + +static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double, double); + +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq(a, b) ((a)==(b)) + #define __Pyx_c_sum(a, b) ((a)+(b)) + #define __Pyx_c_diff(a, b) ((a)-(b)) + #define __Pyx_c_prod(a, b) ((a)*(b)) + #define __Pyx_c_quot(a, b) ((a)/(b)) + #define __Pyx_c_neg(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero(z) ((z)==(double)0) + #define __Pyx_c_conj(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs(z) (::std::abs(z)) + #define __Pyx_c_pow(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero(z) ((z)==0) + #define __Pyx_c_conj(z) (conj(z)) + #if 1 + #define __Pyx_c_abs(z) (cabs(z)) + #define __Pyx_c_pow(a, b) (cpow(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg(__pyx_t_double_complex); + static CYTHON_INLINE int __Pyx_c_is_zero(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj(__pyx_t_double_complex); + #if 1 + static CYTHON_INLINE double __Pyx_c_abs(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow(__pyx_t_double_complex, __pyx_t_double_complex); + #endif +#endif + +static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject *); + +static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject *); + +static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject *); + +static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject *); + +static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject *); + +static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject *); + +static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject *); + +static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject *); + +static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject *); + +static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject *); + +static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject *); + +static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject *); + +static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject *); + +static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject *); + +static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject *); + +static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject *); + +static int __Pyx_check_binary_version(void); + +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif +#endif + +static PyObject *__Pyx_ImportModule(const char *name); /*proto*/ + +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); /*proto*/ + +typedef struct { + int code_line; + PyCodeObject* code_object; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); /*proto*/ + +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/ + + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'libc.stdlib' */ + +/* Module declarations from 'numpy' */ + +/* Module declarations from 'numpy' */ +static PyTypeObject *__pyx_ptype_5numpy_dtype = 0; +static PyTypeObject *__pyx_ptype_5numpy_flatiter = 0; +static PyTypeObject *__pyx_ptype_5numpy_broadcast = 0; +static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0; +static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0; +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/ + +/* Module declarations from 'cython' */ + +/* Module declarations from 'astropy.convolution.boundary_fill' */ +static __Pyx_TypeInfo __Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t = { "DTYPE_t", NULL, sizeof(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t), { 0 }, 0, 'R', 0, 0 }; +#define __Pyx_MODULE_NAME "astropy.convolution.boundary_fill" +int __pyx_module_is_main_astropy__convolution__boundary_fill = 0; + +/* Implementation of 'astropy.convolution.boundary_fill' */ +static PyObject *__pyx_builtin_ValueError; +static PyObject *__pyx_builtin_range; +static PyObject *__pyx_builtin_RuntimeError; +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_fill_convolve1d_boundary_fill(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g, float __pyx_v_fill_value); /* proto */ +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_fill_2convolve2d_boundary_fill(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g, float __pyx_v_fill_value); /* proto */ +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_fill_4convolve3d_boundary_fill(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g, float __pyx_v_fill_value); /* proto */ +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */ +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */ +static char __pyx_k_1[] = "Convolution kernel must have odd dimensions"; +static char __pyx_k_5[] = "ndarray is not C contiguous"; +static char __pyx_k_7[] = "ndarray is not Fortran contiguous"; +static char __pyx_k_9[] = "Non-native byte order not supported"; +static char __pyx_k_11[] = "unknown dtype code in numpy.pxd (%d)"; +static char __pyx_k_12[] = "Format string allocated too short, see comment in numpy.pxd"; +static char __pyx_k_15[] = "Format string allocated too short."; +static char __pyx_k_19[] = "convolve1d_boundary_fill"; +static char __pyx_k_20[] = "/internal/1/root/src/astropy/astropy/astropy/convolution/boundary_fill.pyx"; +static char __pyx_k_21[] = "astropy.convolution.boundary_fill"; +static char __pyx_k_24[] = "convolve2d_boundary_fill"; +static char __pyx_k_27[] = "convolve3d_boundary_fill"; +static char __pyx_k__B[] = "B"; +static char __pyx_k__H[] = "H"; +static char __pyx_k__I[] = "I"; +static char __pyx_k__L[] = "L"; +static char __pyx_k__O[] = "O"; +static char __pyx_k__Q[] = "Q"; +static char __pyx_k__b[] = "b"; +static char __pyx_k__d[] = "d"; +static char __pyx_k__f[] = "f"; +static char __pyx_k__g[] = "g"; +static char __pyx_k__h[] = "h"; +static char __pyx_k__i[] = "i"; +static char __pyx_k__j[] = "j"; +static char __pyx_k__k[] = "k"; +static char __pyx_k__l[] = "l"; +static char __pyx_k__q[] = "q"; +static char __pyx_k__Zd[] = "Zd"; +static char __pyx_k__Zf[] = "Zf"; +static char __pyx_k__Zg[] = "Zg"; +static char __pyx_k__ii[] = "ii"; +static char __pyx_k__jj[] = "jj"; +static char __pyx_k__kk[] = "kk"; +static char __pyx_k__np[] = "np"; +static char __pyx_k__nx[] = "nx"; +static char __pyx_k__ny[] = "ny"; +static char __pyx_k__nz[] = "nz"; +static char __pyx_k__bot[] = "bot"; +static char __pyx_k__iii[] = "iii"; +static char __pyx_k__jjj[] = "jjj"; +static char __pyx_k__ker[] = "ker"; +static char __pyx_k__kkk[] = "kkk"; +static char __pyx_k__nkx[] = "nkx"; +static char __pyx_k__nky[] = "nky"; +static char __pyx_k__nkz[] = "nkz"; +static char __pyx_k__top[] = "top"; +static char __pyx_k__val[] = "val"; +static char __pyx_k__wkx[] = "wkx"; +static char __pyx_k__wky[] = "wky"; +static char __pyx_k__wkz[] = "wkz"; +static char __pyx_k__conv[] = "conv"; +static char __pyx_k__DTYPE[] = "DTYPE"; +static char __pyx_k__dtype[] = "dtype"; +static char __pyx_k__empty[] = "empty"; +static char __pyx_k__fixed[] = "fixed"; +static char __pyx_k__float[] = "float"; +static char __pyx_k__iimax[] = "iimax"; +static char __pyx_k__iimin[] = "iimin"; +static char __pyx_k__jjmax[] = "jjmax"; +static char __pyx_k__jjmin[] = "jjmin"; +static char __pyx_k__kkmax[] = "kkmax"; +static char __pyx_k__kkmin[] = "kkmin"; +static char __pyx_k__numpy[] = "numpy"; +static char __pyx_k__range[] = "range"; +static char __pyx_k____main__[] = "__main__"; +static char __pyx_k____test__[] = "__test__"; +static char __pyx_k__ValueError[] = "ValueError"; +static char __pyx_k__fill_value[] = "fill_value"; +static char __pyx_k__RuntimeError[] = "RuntimeError"; +static PyObject *__pyx_kp_s_1; +static PyObject *__pyx_kp_u_11; +static PyObject *__pyx_kp_u_12; +static PyObject *__pyx_kp_u_15; +static PyObject *__pyx_n_s_19; +static PyObject *__pyx_kp_s_20; +static PyObject *__pyx_n_s_21; +static PyObject *__pyx_n_s_24; +static PyObject *__pyx_n_s_27; +static PyObject *__pyx_kp_u_5; +static PyObject *__pyx_kp_u_7; +static PyObject *__pyx_kp_u_9; +static PyObject *__pyx_n_s__DTYPE; +static PyObject *__pyx_n_s__RuntimeError; +static PyObject *__pyx_n_s__ValueError; +static PyObject *__pyx_n_s____main__; +static PyObject *__pyx_n_s____test__; +static PyObject *__pyx_n_s__bot; +static PyObject *__pyx_n_s__conv; +static PyObject *__pyx_n_s__dtype; +static PyObject *__pyx_n_s__empty; +static PyObject *__pyx_n_s__f; +static PyObject *__pyx_n_s__fill_value; +static PyObject *__pyx_n_s__fixed; +static PyObject *__pyx_n_s__float; +static PyObject *__pyx_n_s__g; +static PyObject *__pyx_n_s__i; +static PyObject *__pyx_n_s__ii; +static PyObject *__pyx_n_s__iii; +static PyObject *__pyx_n_s__iimax; +static PyObject *__pyx_n_s__iimin; +static PyObject *__pyx_n_s__j; +static PyObject *__pyx_n_s__jj; +static PyObject *__pyx_n_s__jjj; +static PyObject *__pyx_n_s__jjmax; +static PyObject *__pyx_n_s__jjmin; +static PyObject *__pyx_n_s__k; +static PyObject *__pyx_n_s__ker; +static PyObject *__pyx_n_s__kk; +static PyObject *__pyx_n_s__kkk; +static PyObject *__pyx_n_s__kkmax; +static PyObject *__pyx_n_s__kkmin; +static PyObject *__pyx_n_s__nkx; +static PyObject *__pyx_n_s__nky; +static PyObject *__pyx_n_s__nkz; +static PyObject *__pyx_n_s__np; +static PyObject *__pyx_n_s__numpy; +static PyObject *__pyx_n_s__nx; +static PyObject *__pyx_n_s__ny; +static PyObject *__pyx_n_s__nz; +static PyObject *__pyx_n_s__range; +static PyObject *__pyx_n_s__top; +static PyObject *__pyx_n_s__val; +static PyObject *__pyx_n_s__wkx; +static PyObject *__pyx_n_s__wky; +static PyObject *__pyx_n_s__wkz; +static PyObject *__pyx_int_15; +static PyObject *__pyx_k_tuple_2; +static PyObject *__pyx_k_tuple_3; +static PyObject *__pyx_k_tuple_4; +static PyObject *__pyx_k_tuple_6; +static PyObject *__pyx_k_tuple_8; +static PyObject *__pyx_k_tuple_10; +static PyObject *__pyx_k_tuple_13; +static PyObject *__pyx_k_tuple_14; +static PyObject *__pyx_k_tuple_16; +static PyObject *__pyx_k_tuple_17; +static PyObject *__pyx_k_tuple_22; +static PyObject *__pyx_k_tuple_25; +static PyObject *__pyx_k_codeobj_18; +static PyObject *__pyx_k_codeobj_23; +static PyObject *__pyx_k_codeobj_26; + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_fill_1convolve1d_boundary_fill(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_13boundary_fill_1convolve1d_boundary_fill = {__Pyx_NAMESTR("convolve1d_boundary_fill"), (PyCFunction)__pyx_pw_7astropy_11convolution_13boundary_fill_1convolve1d_boundary_fill, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_fill_1convolve1d_boundary_fill(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + float __pyx_v_fill_value; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve1d_boundary_fill (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,&__pyx_n_s__fill_value,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve1d_boundary_fill", 1, 3, 3, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__fill_value)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve1d_boundary_fill", 1, 3, 3, 2); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve1d_boundary_fill") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + __pyx_v_fill_value = __pyx_PyFloat_AsFloat(values[2]); if (unlikely((__pyx_v_fill_value == (float)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 18; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve1d_boundary_fill", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_fill.convolve1d_boundary_fill", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 17; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_13boundary_fill_convolve1d_boundary_fill(__pyx_self, __pyx_v_f, __pyx_v_g, __pyx_v_fill_value); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_fill.pyx":16 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_fill(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g, + * float fill_value): + */ + +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_fill_convolve1d_boundary_fill(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g, float __pyx_v_fill_value) { + int __pyx_v_nx; + int __pyx_v_nkx; + int __pyx_v_wkx; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + int __pyx_v_ii; + int __pyx_v_iimin; + int __pyx_v_iimax; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyArrayObject *__pyx_t_8 = NULL; + PyArrayObject *__pyx_t_9 = NULL; + int __pyx_t_10; + unsigned int __pyx_t_11; + unsigned int __pyx_t_12; + int __pyx_t_13; + int __pyx_t_14; + int __pyx_t_15; + unsigned int __pyx_t_16; + unsigned int __pyx_t_17; + unsigned int __pyx_t_18; + unsigned int __pyx_t_19; + unsigned int __pyx_t_20; + unsigned int __pyx_t_21; + unsigned int __pyx_t_22; + int __pyx_t_23; + unsigned int __pyx_t_24; + unsigned int __pyx_t_25; + unsigned int __pyx_t_26; + unsigned int __pyx_t_27; + unsigned int __pyx_t_28; + unsigned int __pyx_t_29; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve1d_boundary_fill", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; + + /* "astropy/convolution/boundary_fill.pyx":20 + * float fill_value): + * + * if g.shape[0] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_fill.pyx":21 + * + * if g.shape[0] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_2 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_2), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __Pyx_Raise(__pyx_t_2, 0, 0, 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_fill.pyx":23 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_2 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_2, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_1) { + __pyx_t_4 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = PyObject_RichCompare(__pyx_t_4, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_5 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_6 = __pyx_t_5; + } else { + __pyx_t_6 = __pyx_t_1; + } + if (unlikely(!__pyx_t_6)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_fill.pyx":25 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_fill.pyx":26 + * + * cdef int nx = f.shape[0] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_fill.pyx":27 + * cdef int nx = f.shape[0] + * cdef int nkx = g.shape[0] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_fill.pyx":28 + * cdef int nkx = g.shape[0] + * cdef int wkx = nkx // 2 + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) + * cdef unsigned int i, iii + */ + __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__empty); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = PyList_New(1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_4, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __pyx_t_2 = 0; + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_t_4)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_4)); + __pyx_t_4 = 0; + __pyx_t_4 = PyDict_New(); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_4)); + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_t_4, ((PyObject *)__pyx_n_s__dtype), __pyx_t_7) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), ((PyObject *)__pyx_t_4)); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; + if (!(likely(((__pyx_t_7) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_7, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_8 = ((PyArrayObject *)__pyx_t_7); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_8, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 1, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; + } + } + __pyx_t_8 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "astropy/convolution/boundary_fill.pyx":29 + * cdef int wkx = nkx // 2 + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef unsigned int i, iii + * cdef int ii + */ + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = PyObject_GetAttr(__pyx_t_7, __pyx_n_s__empty); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + __pyx_t_7 = 0; + __pyx_t_7 = PyTuple_New(1); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_7, 0, ((PyObject *)__pyx_t_2)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_2)); + __pyx_t_2 = 0; + __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_2)); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_2, ((PyObject *)__pyx_n_s__dtype), __pyx_t_3) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_7), ((PyObject *)__pyx_t_2)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_7)); __pyx_t_7 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_9 = ((PyArrayObject *)__pyx_t_3); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_9, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 1, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; + } + } + __pyx_t_9 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "astropy/convolution/boundary_fill.pyx":39 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * if npy_isnan(f[i]): + * top = 0. + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_fill.pyx":40 + * # neighboring values + * for i in range(nx): + * if npy_isnan(f[i]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_12 = __pyx_v_i; + __pyx_t_6 = npy_isnan((*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_12, __pyx_pybuffernd_f.diminfo[0].strides))); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_fill.pyx":41 + * for i in range(nx): + * if npy_isnan(f[i]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_fill.pyx":42 + * if npy_isnan(f[i]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_fill.pyx":43 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_fill.pyx":44 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * if ii < 0 or ii > nx - 1: + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_fill.pyx":45 + * iimin = i - wkx + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * if ii < 0 or ii > nx - 1: + * val = fill_value + */ + __pyx_t_13 = __pyx_v_iimax; + for (__pyx_t_14 = __pyx_v_iimin; __pyx_t_14 < __pyx_t_13; __pyx_t_14+=1) { + __pyx_v_ii = __pyx_t_14; + + /* "astropy/convolution/boundary_fill.pyx":46 + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + * if ii < 0 or ii > nx - 1: # <<<<<<<<<<<<<< + * val = fill_value + * else: + */ + __pyx_t_6 = (__pyx_v_ii < 0); + if (!__pyx_t_6) { + __pyx_t_1 = (__pyx_v_ii > (__pyx_v_nx - 1)); + __pyx_t_5 = __pyx_t_1; + } else { + __pyx_t_5 = __pyx_t_6; + } + if (__pyx_t_5) { + + /* "astropy/convolution/boundary_fill.pyx":47 + * for ii in range(iimin, iimax): + * if ii < 0 or ii > nx - 1: + * val = fill_value # <<<<<<<<<<<<<< + * else: + * val = f[ii] + */ + __pyx_v_val = __pyx_v_fill_value; + goto __pyx_L9; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":49 + * val = fill_value + * else: + * val = f[ii] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] + */ + __pyx_t_15 = __pyx_v_ii; + if (__pyx_t_15 < 0) __pyx_t_15 += __pyx_pybuffernd_f.diminfo[0].shape; + __pyx_v_val = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_15, __pyx_pybuffernd_f.diminfo[0].strides)); + } + __pyx_L9:; + + /* "astropy/convolution/boundary_fill.pyx":50 + * else: + * val = f[ii] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i)] + * top += val * ker + */ + __pyx_t_5 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_5) { + + /* "astropy/convolution/boundary_fill.pyx":51 + * val = f[ii] + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_16 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_v_ker = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_16, __pyx_pybuffernd_g.diminfo[0].strides)); + + /* "astropy/convolution/boundary_fill.pyx":52 + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0.: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_fill.pyx":53 + * ker = g[(wkx + ii - i)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0.: + * fixed[i] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L10; + } + __pyx_L10:; + } + + /* "astropy/convolution/boundary_fill.pyx":54 + * top += val * ker + * bot += ker + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i] = top / bot + * else: + */ + __pyx_t_5 = (__pyx_v_bot != 0.); + if (__pyx_t_5) { + + /* "astropy/convolution/boundary_fill.pyx":55 + * bot += ker + * if bot != 0.: + * fixed[i] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i] = f[i] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 55; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_17 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_17, __pyx_pybuffernd_fixed.diminfo[0].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L11; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":57 + * fixed[i] = top / bot + * else: + * fixed[i] = f[i] # <<<<<<<<<<<<<< + * else: + * fixed[i] = f[i] + */ + __pyx_t_18 = __pyx_v_i; + __pyx_t_19 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_19, __pyx_pybuffernd_fixed.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_18, __pyx_pybuffernd_f.diminfo[0].strides)); + } + __pyx_L11:; + goto __pyx_L6; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":59 + * fixed[i] = f[i] + * else: + * fixed[i] = f[i] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_20 = __pyx_v_i; + __pyx_t_21 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_21, __pyx_pybuffernd_fixed.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_20, __pyx_pybuffernd_f.diminfo[0].strides)); + } + __pyx_L6:; + } + + /* "astropy/convolution/boundary_fill.pyx":62 + * + * # Now run the proper convolution + * for i in range(nx): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i]): + * top = 0. + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_fill.pyx":63 + * # Now run the proper convolution + * for i in range(nx): + * if not npy_isnan(fixed[i]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_22 = __pyx_v_i; + __pyx_t_5 = (!npy_isnan((*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_22, __pyx_pybuffernd_fixed.diminfo[0].strides)))); + if (__pyx_t_5) { + + /* "astropy/convolution/boundary_fill.pyx":64 + * for i in range(nx): + * if not npy_isnan(fixed[i]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_fill.pyx":65 + * if not npy_isnan(fixed[i]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_fill.pyx":66 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_fill.pyx":67 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * if ii < 0 or ii > nx - 1: + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_fill.pyx":68 + * iimin = i - wkx + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * if ii < 0 or ii > nx - 1: + * val = fill_value + */ + __pyx_t_13 = __pyx_v_iimax; + for (__pyx_t_14 = __pyx_v_iimin; __pyx_t_14 < __pyx_t_13; __pyx_t_14+=1) { + __pyx_v_ii = __pyx_t_14; + + /* "astropy/convolution/boundary_fill.pyx":69 + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + * if ii < 0 or ii > nx - 1: # <<<<<<<<<<<<<< + * val = fill_value + * else: + */ + __pyx_t_5 = (__pyx_v_ii < 0); + if (!__pyx_t_5) { + __pyx_t_6 = (__pyx_v_ii > (__pyx_v_nx - 1)); + __pyx_t_1 = __pyx_t_6; + } else { + __pyx_t_1 = __pyx_t_5; + } + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_fill.pyx":70 + * for ii in range(iimin, iimax): + * if ii < 0 or ii > nx - 1: + * val = fill_value # <<<<<<<<<<<<<< + * else: + * val = fixed[ii] + */ + __pyx_v_val = __pyx_v_fill_value; + goto __pyx_L17; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":72 + * val = fill_value + * else: + * val = fixed[ii] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): + */ + __pyx_t_23 = __pyx_v_ii; + if (__pyx_t_23 < 0) __pyx_t_23 += __pyx_pybuffernd_fixed.diminfo[0].shape; + __pyx_v_val = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_23, __pyx_pybuffernd_fixed.diminfo[0].strides)); + } + __pyx_L17:; + + /* "astropy/convolution/boundary_fill.pyx":73 + * else: + * val = fixed[ii] + * ker = g[(wkx + ii - i)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_24 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_v_ker = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_24, __pyx_pybuffernd_g.diminfo[0].strides)); + + /* "astropy/convolution/boundary_fill.pyx":74 + * val = fixed[ii] + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_1 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_fill.pyx":75 + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_fill.pyx":76 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L18; + } + __pyx_L18:; + } + + /* "astropy/convolution/boundary_fill.pyx":77 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i] = top / bot + * else: + */ + __pyx_t_1 = (__pyx_v_bot != 0.0); + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_fill.pyx":78 + * bot += ker + * if bot != 0: + * conv[i] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i] = fixed[i] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 78; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_25 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_25, __pyx_pybuffernd_conv.diminfo[0].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L19; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":80 + * conv[i] = top / bot + * else: + * conv[i] = fixed[i] # <<<<<<<<<<<<<< + * else: + * conv[i] = fixed[i] + */ + __pyx_t_26 = __pyx_v_i; + __pyx_t_27 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_27, __pyx_pybuffernd_conv.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_26, __pyx_pybuffernd_fixed.diminfo[0].strides)); + } + __pyx_L19:; + goto __pyx_L14; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":82 + * conv[i] = fixed[i] + * else: + * conv[i] = fixed[i] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_28 = __pyx_v_i; + __pyx_t_29 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_29, __pyx_pybuffernd_conv.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_28, __pyx_pybuffernd_fixed.diminfo[0].strides)); + } + __pyx_L14:; + } + + /* "astropy/convolution/boundary_fill.pyx":84 + * conv[i] = fixed[i] + * + * return conv # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_7); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_fill.convolve1d_boundary_fill", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_fill_3convolve2d_boundary_fill(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_13boundary_fill_3convolve2d_boundary_fill = {__Pyx_NAMESTR("convolve2d_boundary_fill"), (PyCFunction)__pyx_pw_7astropy_11convolution_13boundary_fill_3convolve2d_boundary_fill, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_fill_3convolve2d_boundary_fill(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + float __pyx_v_fill_value; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve2d_boundary_fill (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,&__pyx_n_s__fill_value,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve2d_boundary_fill", 1, 3, 3, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__fill_value)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve2d_boundary_fill", 1, 3, 3, 2); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve2d_boundary_fill") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + __pyx_v_fill_value = __pyx_PyFloat_AsFloat(values[2]); if (unlikely((__pyx_v_fill_value == (float)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 90; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve2d_boundary_fill", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_fill.convolve2d_boundary_fill", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 89; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_13boundary_fill_2convolve2d_boundary_fill(__pyx_self, __pyx_v_f, __pyx_v_g, __pyx_v_fill_value); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_fill.pyx":88 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_fill(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g, + * float fill_value): + */ + +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_fill_2convolve2d_boundary_fill(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g, float __pyx_v_fill_value) { + int __pyx_v_nx; + int __pyx_v_ny; + int __pyx_v_nkx; + int __pyx_v_nky; + int __pyx_v_wkx; + int __pyx_v_wky; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + unsigned int __pyx_v_j; + int __pyx_v_ii; + int __pyx_v_jj; + int __pyx_v_iimin; + int __pyx_v_iimax; + int __pyx_v_jjmin; + int __pyx_v_jjmax; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyArrayObject *__pyx_t_8 = NULL; + PyArrayObject *__pyx_t_9 = NULL; + int __pyx_t_10; + unsigned int __pyx_t_11; + int __pyx_t_12; + unsigned int __pyx_t_13; + unsigned int __pyx_t_14; + unsigned int __pyx_t_15; + int __pyx_t_16; + int __pyx_t_17; + int __pyx_t_18; + int __pyx_t_19; + int __pyx_t_20; + int __pyx_t_21; + int __pyx_t_22; + int __pyx_t_23; + unsigned int __pyx_t_24; + unsigned int __pyx_t_25; + unsigned int __pyx_t_26; + unsigned int __pyx_t_27; + unsigned int __pyx_t_28; + unsigned int __pyx_t_29; + unsigned int __pyx_t_30; + unsigned int __pyx_t_31; + unsigned int __pyx_t_32; + unsigned int __pyx_t_33; + unsigned int __pyx_t_34; + unsigned int __pyx_t_35; + unsigned int __pyx_t_36; + unsigned int __pyx_t_37; + int __pyx_t_38; + int __pyx_t_39; + unsigned int __pyx_t_40; + unsigned int __pyx_t_41; + unsigned int __pyx_t_42; + unsigned int __pyx_t_43; + unsigned int __pyx_t_44; + unsigned int __pyx_t_45; + unsigned int __pyx_t_46; + unsigned int __pyx_t_47; + unsigned int __pyx_t_48; + unsigned int __pyx_t_49; + unsigned int __pyx_t_50; + unsigned int __pyx_t_51; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve2d_boundary_fill", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 2, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_f.diminfo[1].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_f.diminfo[1].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[1]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 2, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_g.diminfo[1].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_g.diminfo[1].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[1]; + + /* "astropy/convolution/boundary_fill.pyx":92 + * float fill_value): + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (!__pyx_t_1) { + __pyx_t_2 = (__Pyx_mod_long((__pyx_v_g->dimensions[1]), 2) != 1); + __pyx_t_3 = __pyx_t_2; + } else { + __pyx_t_3 = __pyx_t_1; + } + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_fill.pyx":93 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_3), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_fill.pyx":95 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_4 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 95; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 95; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyObject_RichCompare(__pyx_t_4, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 95; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 95; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__pyx_t_3) { + __pyx_t_6 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 95; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 95; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_6, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 95; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 95; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = __pyx_t_1; + } else { + __pyx_t_2 = __pyx_t_3; + } + if (unlikely(!__pyx_t_2)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 95; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_fill.pyx":97 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_fill.pyx":98 + * + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + */ + __pyx_v_ny = (__pyx_v_f->dimensions[1]); + + /* "astropy/convolution/boundary_fill.pyx":99 + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_fill.pyx":100 + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + */ + __pyx_v_nky = (__pyx_v_g->dimensions[1]); + + /* "astropy/convolution/boundary_fill.pyx":101 + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * cdef int wky = nky // 2 + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_fill.pyx":102 + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) + */ + __pyx_v_wky = __Pyx_div_long(__pyx_v_nky, 2); + + /* "astropy/convolution/boundary_fill.pyx":103 + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) + * cdef unsigned int i, j, iii, jjj + */ + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = PyObject_GetAttr(__pyx_t_4, __pyx_n_s__empty); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PyList_New(2); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_7, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_7, 1, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_4 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_7)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_7)); + __pyx_t_7 = 0; + __pyx_t_7 = PyDict_New(); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_7)); + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + if (PyDict_SetItem(__pyx_t_7, ((PyObject *)__pyx_n_s__dtype), __pyx_t_4) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyObject_Call(__pyx_t_5, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_7)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_7)); __pyx_t_7 = 0; + if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_8 = ((PyArrayObject *)__pyx_t_4); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_8, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 2, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_fixed.diminfo[1].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_fixed.diminfo[1].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[1]; + } + } + __pyx_t_8 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "astropy/convolution/boundary_fill.pyx":104 + * cdef int wky = nky // 2 + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef unsigned int i, j, iii, jjj + * cdef int ii, jj + */ + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = PyObject_GetAttr(__pyx_t_4, __pyx_n_s__empty); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyList_New(2); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_5, 1, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_4 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyDict_New(); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + if (PyDict_SetItem(__pyx_t_5, ((PyObject *)__pyx_n_s__dtype), __pyx_t_4) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyObject_Call(__pyx_t_7, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_5)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0; + if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_9 = ((PyArrayObject *)__pyx_t_4); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_9, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 2, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 104; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_conv.diminfo[1].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_conv.diminfo[1].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[1]; + } + } + __pyx_t_9 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "astropy/convolution/boundary_fill.pyx":114 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * if npy_isnan(f[i, j]): + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_fill.pyx":115 + * # neighboring values + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * if npy_isnan(f[i, j]): + * top = 0. + */ + __pyx_t_12 = __pyx_v_ny; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_j = __pyx_t_13; + + /* "astropy/convolution/boundary_fill.pyx":116 + * for i in range(nx): + * for j in range(ny): + * if npy_isnan(f[i, j]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_14 = __pyx_v_i; + __pyx_t_15 = __pyx_v_j; + __pyx_t_2 = npy_isnan((*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_14, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_15, __pyx_pybuffernd_f.diminfo[1].strides))); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_fill.pyx":117 + * for j in range(ny): + * if npy_isnan(f[i, j]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_fill.pyx":118 + * if npy_isnan(f[i, j]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_fill.pyx":119 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_fill.pyx":120 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_fill.pyx":121 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_fill.pyx":122 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_fill.pyx":123 + * jjmin = j - wky + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1: + */ + __pyx_t_16 = __pyx_v_iimax; + for (__pyx_t_17 = __pyx_v_iimin; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_ii = __pyx_t_17; + + /* "astropy/convolution/boundary_fill.pyx":124 + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1: + * val = fill_value + */ + __pyx_t_18 = __pyx_v_jjmax; + for (__pyx_t_19 = __pyx_v_jjmin; __pyx_t_19 < __pyx_t_18; __pyx_t_19+=1) { + __pyx_v_jj = __pyx_t_19; + + /* "astropy/convolution/boundary_fill.pyx":125 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1: # <<<<<<<<<<<<<< + * val = fill_value + * else: + */ + __pyx_t_2 = (__pyx_v_ii < 0); + if (!__pyx_t_2) { + __pyx_t_3 = (__pyx_v_ii > (__pyx_v_nx - 1)); + if (!__pyx_t_3) { + __pyx_t_1 = (__pyx_v_jj < 0); + if (!__pyx_t_1) { + __pyx_t_20 = (__pyx_v_jj > (__pyx_v_ny - 1)); + __pyx_t_21 = __pyx_t_20; + } else { + __pyx_t_21 = __pyx_t_1; + } + __pyx_t_1 = __pyx_t_21; + } else { + __pyx_t_1 = __pyx_t_3; + } + __pyx_t_3 = __pyx_t_1; + } else { + __pyx_t_3 = __pyx_t_2; + } + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_fill.pyx":126 + * for jj in range(jjmin, jjmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1: + * val = fill_value # <<<<<<<<<<<<<< + * else: + * val = f[ii, jj] + */ + __pyx_v_val = __pyx_v_fill_value; + goto __pyx_L13; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":128 + * val = fill_value + * else: + * val = f[ii, jj] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + */ + __pyx_t_22 = __pyx_v_ii; + __pyx_t_23 = __pyx_v_jj; + if (__pyx_t_22 < 0) __pyx_t_22 += __pyx_pybuffernd_f.diminfo[0].shape; + if (__pyx_t_23 < 0) __pyx_t_23 += __pyx_pybuffernd_f.diminfo[1].shape; + __pyx_v_val = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_22, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_23, __pyx_pybuffernd_f.diminfo[1].strides)); + } + __pyx_L13:; + + /* "astropy/convolution/boundary_fill.pyx":129 + * else: + * val = f[ii, jj] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + */ + __pyx_t_3 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_fill.pyx":131 + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + * (wky + jj - j)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_24 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_25 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_v_ker = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_24, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_25, __pyx_pybuffernd_g.diminfo[1].strides)); + + /* "astropy/convolution/boundary_fill.pyx":132 + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0.: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_fill.pyx":133 + * (wky + jj - j)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0.: + * fixed[i, j] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L14; + } + __pyx_L14:; + } + } + + /* "astropy/convolution/boundary_fill.pyx":134 + * top += val * ker + * bot += ker + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i, j] = top / bot + * else: + */ + __pyx_t_3 = (__pyx_v_bot != 0.); + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_fill.pyx":135 + * bot += ker + * if bot != 0.: + * fixed[i, j] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i, j] = f[i, j] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 135; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_26 = __pyx_v_i; + __pyx_t_27 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_26, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_27, __pyx_pybuffernd_fixed.diminfo[1].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L15; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":137 + * fixed[i, j] = top / bot + * else: + * fixed[i, j] = f[i, j] # <<<<<<<<<<<<<< + * else: + * fixed[i, j] = f[i, j] + */ + __pyx_t_28 = __pyx_v_i; + __pyx_t_29 = __pyx_v_j; + __pyx_t_30 = __pyx_v_i; + __pyx_t_31 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_30, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_31, __pyx_pybuffernd_fixed.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_28, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_29, __pyx_pybuffernd_f.diminfo[1].strides)); + } + __pyx_L15:; + goto __pyx_L8; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":139 + * fixed[i, j] = f[i, j] + * else: + * fixed[i, j] = f[i, j] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_32 = __pyx_v_i; + __pyx_t_33 = __pyx_v_j; + __pyx_t_34 = __pyx_v_i; + __pyx_t_35 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_34, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_35, __pyx_pybuffernd_fixed.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_32, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_33, __pyx_pybuffernd_f.diminfo[1].strides)); + } + __pyx_L8:; + } + } + + /* "astropy/convolution/boundary_fill.pyx":142 + * + * # Now run the proper convolution + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * if not npy_isnan(fixed[i, j]): + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_fill.pyx":143 + * # Now run the proper convolution + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i, j]): + * top = 0. + */ + __pyx_t_12 = __pyx_v_ny; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_j = __pyx_t_13; + + /* "astropy/convolution/boundary_fill.pyx":144 + * for i in range(nx): + * for j in range(ny): + * if not npy_isnan(fixed[i, j]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_36 = __pyx_v_i; + __pyx_t_37 = __pyx_v_j; + __pyx_t_3 = (!npy_isnan((*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_36, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_37, __pyx_pybuffernd_fixed.diminfo[1].strides)))); + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_fill.pyx":145 + * for j in range(ny): + * if not npy_isnan(fixed[i, j]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_fill.pyx":146 + * if not npy_isnan(fixed[i, j]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_fill.pyx":147 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_fill.pyx":148 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_fill.pyx":149 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_fill.pyx":150 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_fill.pyx":151 + * jjmin = j - wky + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1: + */ + __pyx_t_16 = __pyx_v_iimax; + for (__pyx_t_17 = __pyx_v_iimin; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_ii = __pyx_t_17; + + /* "astropy/convolution/boundary_fill.pyx":152 + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1: + * val = fill_value + */ + __pyx_t_18 = __pyx_v_jjmax; + for (__pyx_t_19 = __pyx_v_jjmin; __pyx_t_19 < __pyx_t_18; __pyx_t_19+=1) { + __pyx_v_jj = __pyx_t_19; + + /* "astropy/convolution/boundary_fill.pyx":153 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1: # <<<<<<<<<<<<<< + * val = fill_value + * else: + */ + __pyx_t_3 = (__pyx_v_ii < 0); + if (!__pyx_t_3) { + __pyx_t_2 = (__pyx_v_ii > (__pyx_v_nx - 1)); + if (!__pyx_t_2) { + __pyx_t_1 = (__pyx_v_jj < 0); + if (!__pyx_t_1) { + __pyx_t_21 = (__pyx_v_jj > (__pyx_v_ny - 1)); + __pyx_t_20 = __pyx_t_21; + } else { + __pyx_t_20 = __pyx_t_1; + } + __pyx_t_1 = __pyx_t_20; + } else { + __pyx_t_1 = __pyx_t_2; + } + __pyx_t_2 = __pyx_t_1; + } else { + __pyx_t_2 = __pyx_t_3; + } + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_fill.pyx":154 + * for jj in range(jjmin, jjmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1: + * val = fill_value # <<<<<<<<<<<<<< + * else: + * val = fixed[ii, jj] + */ + __pyx_v_val = __pyx_v_fill_value; + goto __pyx_L25; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":156 + * val = fill_value + * else: + * val = fixed[ii, jj] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + */ + __pyx_t_38 = __pyx_v_ii; + __pyx_t_39 = __pyx_v_jj; + if (__pyx_t_38 < 0) __pyx_t_38 += __pyx_pybuffernd_fixed.diminfo[0].shape; + if (__pyx_t_39 < 0) __pyx_t_39 += __pyx_pybuffernd_fixed.diminfo[1].shape; + __pyx_v_val = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_38, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_39, __pyx_pybuffernd_fixed.diminfo[1].strides)); + } + __pyx_L25:; + + /* "astropy/convolution/boundary_fill.pyx":158 + * val = fixed[ii, jj] + * ker = g[(wkx + ii - i), + * (wky + jj - j)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_40 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_41 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_v_ker = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_40, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_41, __pyx_pybuffernd_g.diminfo[1].strides)); + + /* "astropy/convolution/boundary_fill.pyx":159 + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_2 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_fill.pyx":160 + * (wky + jj - j)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_fill.pyx":161 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i, j] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L26; + } + __pyx_L26:; + } + } + + /* "astropy/convolution/boundary_fill.pyx":162 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i, j] = top / bot + * else: + */ + __pyx_t_2 = (__pyx_v_bot != 0.0); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_fill.pyx":163 + * bot += ker + * if bot != 0: + * conv[i, j] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i, j] = fixed[i, j] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 163; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_42 = __pyx_v_i; + __pyx_t_43 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_42, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_43, __pyx_pybuffernd_conv.diminfo[1].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L27; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":165 + * conv[i, j] = top / bot + * else: + * conv[i, j] = fixed[i, j] # <<<<<<<<<<<<<< + * else: + * conv[i, j] = fixed[i, j] + */ + __pyx_t_44 = __pyx_v_i; + __pyx_t_45 = __pyx_v_j; + __pyx_t_46 = __pyx_v_i; + __pyx_t_47 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_46, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_47, __pyx_pybuffernd_conv.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_44, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_45, __pyx_pybuffernd_fixed.diminfo[1].strides)); + } + __pyx_L27:; + goto __pyx_L20; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":167 + * conv[i, j] = fixed[i, j] + * else: + * conv[i, j] = fixed[i, j] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_48 = __pyx_v_i; + __pyx_t_49 = __pyx_v_j; + __pyx_t_50 = __pyx_v_i; + __pyx_t_51 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_50, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_51, __pyx_pybuffernd_conv.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_48, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_49, __pyx_pybuffernd_fixed.diminfo[1].strides)); + } + __pyx_L20:; + } + } + + /* "astropy/convolution/boundary_fill.pyx":169 + * conv[i, j] = fixed[i, j] + * + * return conv # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_fill.convolve2d_boundary_fill", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_fill_5convolve3d_boundary_fill(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_13boundary_fill_5convolve3d_boundary_fill = {__Pyx_NAMESTR("convolve3d_boundary_fill"), (PyCFunction)__pyx_pw_7astropy_11convolution_13boundary_fill_5convolve3d_boundary_fill, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_fill_5convolve3d_boundary_fill(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + float __pyx_v_fill_value; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve3d_boundary_fill (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,&__pyx_n_s__fill_value,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve3d_boundary_fill", 1, 3, 3, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__fill_value)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve3d_boundary_fill", 1, 3, 3, 2); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve3d_boundary_fill") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + __pyx_v_fill_value = __pyx_PyFloat_AsFloat(values[2]); if (unlikely((__pyx_v_fill_value == (float)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve3d_boundary_fill", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_fill.convolve3d_boundary_fill", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_13boundary_fill_4convolve3d_boundary_fill(__pyx_self, __pyx_v_f, __pyx_v_g, __pyx_v_fill_value); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_fill.pyx":173 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_fill(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g, + * float fill_value): + */ + +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_fill_4convolve3d_boundary_fill(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g, float __pyx_v_fill_value) { + int __pyx_v_nx; + int __pyx_v_ny; + int __pyx_v_nz; + int __pyx_v_nkx; + int __pyx_v_nky; + int __pyx_v_nkz; + int __pyx_v_wkx; + int __pyx_v_wky; + int __pyx_v_wkz; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + unsigned int __pyx_v_j; + unsigned int __pyx_v_k; + int __pyx_v_ii; + int __pyx_v_jj; + int __pyx_v_kk; + int __pyx_v_iimin; + int __pyx_v_iimax; + int __pyx_v_jjmin; + int __pyx_v_jjmax; + int __pyx_v_kkmin; + int __pyx_v_kkmax; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyArrayObject *__pyx_t_10 = NULL; + PyArrayObject *__pyx_t_11 = NULL; + int __pyx_t_12; + unsigned int __pyx_t_13; + int __pyx_t_14; + unsigned int __pyx_t_15; + int __pyx_t_16; + unsigned int __pyx_t_17; + unsigned int __pyx_t_18; + unsigned int __pyx_t_19; + unsigned int __pyx_t_20; + int __pyx_t_21; + int __pyx_t_22; + int __pyx_t_23; + int __pyx_t_24; + int __pyx_t_25; + int __pyx_t_26; + int __pyx_t_27; + int __pyx_t_28; + int __pyx_t_29; + int __pyx_t_30; + int __pyx_t_31; + int __pyx_t_32; + unsigned int __pyx_t_33; + unsigned int __pyx_t_34; + unsigned int __pyx_t_35; + unsigned int __pyx_t_36; + unsigned int __pyx_t_37; + unsigned int __pyx_t_38; + unsigned int __pyx_t_39; + unsigned int __pyx_t_40; + unsigned int __pyx_t_41; + unsigned int __pyx_t_42; + unsigned int __pyx_t_43; + unsigned int __pyx_t_44; + unsigned int __pyx_t_45; + unsigned int __pyx_t_46; + unsigned int __pyx_t_47; + unsigned int __pyx_t_48; + unsigned int __pyx_t_49; + unsigned int __pyx_t_50; + unsigned int __pyx_t_51; + unsigned int __pyx_t_52; + unsigned int __pyx_t_53; + int __pyx_t_54; + int __pyx_t_55; + int __pyx_t_56; + unsigned int __pyx_t_57; + unsigned int __pyx_t_58; + unsigned int __pyx_t_59; + unsigned int __pyx_t_60; + unsigned int __pyx_t_61; + unsigned int __pyx_t_62; + unsigned int __pyx_t_63; + unsigned int __pyx_t_64; + unsigned int __pyx_t_65; + unsigned int __pyx_t_66; + unsigned int __pyx_t_67; + unsigned int __pyx_t_68; + unsigned int __pyx_t_69; + unsigned int __pyx_t_70; + unsigned int __pyx_t_71; + unsigned int __pyx_t_72; + unsigned int __pyx_t_73; + unsigned int __pyx_t_74; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve3d_boundary_fill", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 3, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_f.diminfo[1].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_f.diminfo[1].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_f.diminfo[2].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_f.diminfo[2].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[2]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 3, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_g.diminfo[1].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_g.diminfo[1].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_g.diminfo[2].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_g.diminfo[2].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[2]; + + /* "astropy/convolution/boundary_fill.pyx":177 + * float fill_value): + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (!__pyx_t_1) { + __pyx_t_2 = (__Pyx_mod_long((__pyx_v_g->dimensions[1]), 2) != 1); + if (!__pyx_t_2) { + __pyx_t_3 = (__Pyx_mod_long((__pyx_v_g->dimensions[2]), 2) != 1); + __pyx_t_4 = __pyx_t_3; + } else { + __pyx_t_4 = __pyx_t_2; + } + __pyx_t_2 = __pyx_t_4; + } else { + __pyx_t_2 = __pyx_t_1; + } + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_fill.pyx":178 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_4), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 178; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 178; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_fill.pyx":180 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_5 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 180; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 180; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PyObject_RichCompare(__pyx_t_5, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_7); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 180; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 180; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_2) { + __pyx_t_7 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 180; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 180; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyObject_RichCompare(__pyx_t_7, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 180; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 180; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_4 = __pyx_t_1; + } else { + __pyx_t_4 = __pyx_t_2; + } + if (unlikely(!__pyx_t_4)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 180; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_fill.pyx":182 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_fill.pyx":183 + * + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] # <<<<<<<<<<<<<< + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] + */ + __pyx_v_ny = (__pyx_v_f->dimensions[1]); + + /* "astropy/convolution/boundary_fill.pyx":184 + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + */ + __pyx_v_nz = (__pyx_v_f->dimensions[2]); + + /* "astropy/convolution/boundary_fill.pyx":185 + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_fill.pyx":186 + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] # <<<<<<<<<<<<<< + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nky = (__pyx_v_g->dimensions[1]); + + /* "astropy/convolution/boundary_fill.pyx":187 + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + */ + __pyx_v_nkz = (__pyx_v_g->dimensions[2]); + + /* "astropy/convolution/boundary_fill.pyx":188 + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * cdef int wky = nky // 2 + * cdef int wkz = nkz // 2 + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_fill.pyx":189 + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 # <<<<<<<<<<<<<< + * cdef int wkz = nkz // 2 + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + */ + __pyx_v_wky = __Pyx_div_long(__pyx_v_nky, 2); + + /* "astropy/convolution/boundary_fill.pyx":190 + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + * cdef int wkz = nkz // 2 # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) + */ + __pyx_v_wkz = __Pyx_div_long(__pyx_v_nkz, 2); + + /* "astropy/convolution/boundary_fill.pyx":191 + * cdef int wky = nky // 2 + * cdef int wkz = nkz // 2 + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) + * cdef unsigned int i, j, k, iii, jjj, kkk + */ + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyObject_GetAttr(__pyx_t_5, __pyx_n_s__empty); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyInt_FromLong(__pyx_v_nz); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = PyList_New(3); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_9); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_9, 1, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_9, 2, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_8); + __pyx_t_5 = 0; + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_8 = PyTuple_New(1); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_8, 0, ((PyObject *)__pyx_t_9)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_9)); + __pyx_t_9 = 0; + __pyx_t_9 = PyDict_New(); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_9)); + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_t_9, ((PyObject *)__pyx_n_s__dtype), __pyx_t_7) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyObject_Call(__pyx_t_6, ((PyObject *)__pyx_t_8), ((PyObject *)__pyx_t_9)); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_8)); __pyx_t_8 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_9)); __pyx_t_9 = 0; + if (!(likely(((__pyx_t_7) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_7, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_10 = ((PyArrayObject *)__pyx_t_7); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_10, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 3, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_fixed.diminfo[1].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_fixed.diminfo[1].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_fixed.diminfo[2].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_fixed.diminfo[2].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[2]; + } + } + __pyx_t_10 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "astropy/convolution/boundary_fill.pyx":192 + * cdef int wkz = nkz // 2 + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef unsigned int i, j, k, iii, jjj, kkk + * cdef int ii, jj, kk + */ + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = PyObject_GetAttr(__pyx_t_7, __pyx_n_s__empty); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = PyInt_FromLong(__pyx_v_nz); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyList_New(3); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_5, 0, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_5, 1, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_8); + PyList_SET_ITEM(__pyx_t_5, 2, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyDict_New(); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_8 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + if (PyDict_SetItem(__pyx_t_5, ((PyObject *)__pyx_n_s__dtype), __pyx_t_8) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = PyObject_Call(__pyx_t_9, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_5)); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0; + if (!(likely(((__pyx_t_8) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_8, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_11 = ((PyArrayObject *)__pyx_t_8); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_11, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 3, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_conv.diminfo[1].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_conv.diminfo[1].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_conv.diminfo[2].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_conv.diminfo[2].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[2]; + } + } + __pyx_t_11 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_8); + __pyx_t_8 = 0; + + /* "astropy/convolution/boundary_fill.pyx":202 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * for k in range(nz): + */ + __pyx_t_12 = __pyx_v_nx; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_i = __pyx_t_13; + + /* "astropy/convolution/boundary_fill.pyx":203 + * # neighboring values + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * for k in range(nz): + * if npy_isnan(f[i, j, k]): + */ + __pyx_t_14 = __pyx_v_ny; + for (__pyx_t_15 = 0; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_j = __pyx_t_15; + + /* "astropy/convolution/boundary_fill.pyx":204 + * for i in range(nx): + * for j in range(ny): + * for k in range(nz): # <<<<<<<<<<<<<< + * if npy_isnan(f[i, j, k]): + * top = 0. + */ + __pyx_t_16 = __pyx_v_nz; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_k = __pyx_t_17; + + /* "astropy/convolution/boundary_fill.pyx":205 + * for j in range(ny): + * for k in range(nz): + * if npy_isnan(f[i, j, k]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_18 = __pyx_v_i; + __pyx_t_19 = __pyx_v_j; + __pyx_t_20 = __pyx_v_k; + __pyx_t_4 = npy_isnan((*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_18, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_19, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_20, __pyx_pybuffernd_f.diminfo[2].strides))); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_fill.pyx":206 + * for k in range(nz): + * if npy_isnan(f[i, j, k]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_fill.pyx":207 + * if npy_isnan(f[i, j, k]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_fill.pyx":208 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_fill.pyx":209 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_fill.pyx":210 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * kkmin = k - wkz + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_fill.pyx":211 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * kkmin = k - wkz + * kkmax = k + wkz + 1 + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_fill.pyx":212 + * jjmin = j - wky + * jjmax = j + wky + 1 + * kkmin = k - wkz # <<<<<<<<<<<<<< + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_kkmin = (__pyx_v_k - __pyx_v_wkz); + + /* "astropy/convolution/boundary_fill.pyx":213 + * jjmax = j + wky + 1 + * kkmin = k - wkz + * kkmax = k + wkz + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_kkmax = ((__pyx_v_k + __pyx_v_wkz) + 1); + + /* "astropy/convolution/boundary_fill.pyx":214 + * kkmin = k - wkz + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + */ + __pyx_t_21 = __pyx_v_iimax; + for (__pyx_t_22 = __pyx_v_iimin; __pyx_t_22 < __pyx_t_21; __pyx_t_22+=1) { + __pyx_v_ii = __pyx_t_22; + + /* "astropy/convolution/boundary_fill.pyx":215 + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * for kk in range(kkmin, kkmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1 or kk < 0 or kk > nz - 1: + */ + __pyx_t_23 = __pyx_v_jjmax; + for (__pyx_t_24 = __pyx_v_jjmin; __pyx_t_24 < __pyx_t_23; __pyx_t_24+=1) { + __pyx_v_jj = __pyx_t_24; + + /* "astropy/convolution/boundary_fill.pyx":216 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): # <<<<<<<<<<<<<< + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1 or kk < 0 or kk > nz - 1: + * val = fill_value + */ + __pyx_t_25 = __pyx_v_kkmax; + for (__pyx_t_26 = __pyx_v_kkmin; __pyx_t_26 < __pyx_t_25; __pyx_t_26+=1) { + __pyx_v_kk = __pyx_t_26; + + /* "astropy/convolution/boundary_fill.pyx":217 + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1 or kk < 0 or kk > nz - 1: # <<<<<<<<<<<<<< + * val = fill_value + * else: + */ + __pyx_t_4 = (__pyx_v_ii < 0); + if (!__pyx_t_4) { + __pyx_t_2 = (__pyx_v_ii > (__pyx_v_nx - 1)); + if (!__pyx_t_2) { + __pyx_t_1 = (__pyx_v_jj < 0); + if (!__pyx_t_1) { + __pyx_t_3 = (__pyx_v_jj > (__pyx_v_ny - 1)); + if (!__pyx_t_3) { + __pyx_t_27 = (__pyx_v_kk < 0); + if (!__pyx_t_27) { + __pyx_t_28 = (__pyx_v_kk > (__pyx_v_nz - 1)); + __pyx_t_29 = __pyx_t_28; + } else { + __pyx_t_29 = __pyx_t_27; + } + __pyx_t_27 = __pyx_t_29; + } else { + __pyx_t_27 = __pyx_t_3; + } + __pyx_t_3 = __pyx_t_27; + } else { + __pyx_t_3 = __pyx_t_1; + } + __pyx_t_1 = __pyx_t_3; + } else { + __pyx_t_1 = __pyx_t_2; + } + __pyx_t_2 = __pyx_t_1; + } else { + __pyx_t_2 = __pyx_t_4; + } + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_fill.pyx":218 + * for kk in range(kkmin, kkmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1 or kk < 0 or kk > nz - 1: + * val = fill_value # <<<<<<<<<<<<<< + * else: + * val = f[ii, jj, kk] + */ + __pyx_v_val = __pyx_v_fill_value; + goto __pyx_L17; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":220 + * val = fill_value + * else: + * val = f[ii, jj, kk] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + */ + __pyx_t_30 = __pyx_v_ii; + __pyx_t_31 = __pyx_v_jj; + __pyx_t_32 = __pyx_v_kk; + if (__pyx_t_30 < 0) __pyx_t_30 += __pyx_pybuffernd_f.diminfo[0].shape; + if (__pyx_t_31 < 0) __pyx_t_31 += __pyx_pybuffernd_f.diminfo[1].shape; + if (__pyx_t_32 < 0) __pyx_t_32 += __pyx_pybuffernd_f.diminfo[2].shape; + __pyx_v_val = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_30, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_31, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_32, __pyx_pybuffernd_f.diminfo[2].strides)); + } + __pyx_L17:; + + /* "astropy/convolution/boundary_fill.pyx":221 + * else: + * val = f[ii, jj, kk] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j), + */ + __pyx_t_2 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_fill.pyx":224 + * ker = g[(wkx + ii - i), + * (wky + jj - j), + * (wkz + kk - k)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_33 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_34 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_t_35 = ((unsigned int)((__pyx_v_wkz + __pyx_v_kk) - __pyx_v_k)); + __pyx_v_ker = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_33, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_34, __pyx_pybuffernd_g.diminfo[1].strides, __pyx_t_35, __pyx_pybuffernd_g.diminfo[2].strides)); + + /* "astropy/convolution/boundary_fill.pyx":225 + * (wky + jj - j), + * (wkz + kk - k)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0.: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_fill.pyx":226 + * (wkz + kk - k)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0.: + * fixed[i, j, k] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L18; + } + __pyx_L18:; + } + } + } + + /* "astropy/convolution/boundary_fill.pyx":227 + * top += val * ker + * bot += ker + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i, j, k] = top / bot + * else: + */ + __pyx_t_2 = (__pyx_v_bot != 0.); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_fill.pyx":228 + * bot += ker + * if bot != 0.: + * fixed[i, j, k] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i, j, k] = f[i, j, k] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 228; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_36 = __pyx_v_i; + __pyx_t_37 = __pyx_v_j; + __pyx_t_38 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_36, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_37, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_38, __pyx_pybuffernd_fixed.diminfo[2].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L19; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":230 + * fixed[i, j, k] = top / bot + * else: + * fixed[i, j, k] = f[i, j, k] # <<<<<<<<<<<<<< + * else: + * fixed[i, j, k] = f[i, j, k] + */ + __pyx_t_39 = __pyx_v_i; + __pyx_t_40 = __pyx_v_j; + __pyx_t_41 = __pyx_v_k; + __pyx_t_42 = __pyx_v_i; + __pyx_t_43 = __pyx_v_j; + __pyx_t_44 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_42, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_43, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_44, __pyx_pybuffernd_fixed.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_39, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_40, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_41, __pyx_pybuffernd_f.diminfo[2].strides)); + } + __pyx_L19:; + goto __pyx_L10; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":232 + * fixed[i, j, k] = f[i, j, k] + * else: + * fixed[i, j, k] = f[i, j, k] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_45 = __pyx_v_i; + __pyx_t_46 = __pyx_v_j; + __pyx_t_47 = __pyx_v_k; + __pyx_t_48 = __pyx_v_i; + __pyx_t_49 = __pyx_v_j; + __pyx_t_50 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_48, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_49, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_50, __pyx_pybuffernd_fixed.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_45, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_46, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_47, __pyx_pybuffernd_f.diminfo[2].strides)); + } + __pyx_L10:; + } + } + } + + /* "astropy/convolution/boundary_fill.pyx":235 + * + * # Now run the proper convolution + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * for k in range(nz): + */ + __pyx_t_12 = __pyx_v_nx; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_i = __pyx_t_13; + + /* "astropy/convolution/boundary_fill.pyx":236 + * # Now run the proper convolution + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * for k in range(nz): + * if not npy_isnan(fixed[i, j, k]): + */ + __pyx_t_14 = __pyx_v_ny; + for (__pyx_t_15 = 0; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_j = __pyx_t_15; + + /* "astropy/convolution/boundary_fill.pyx":237 + * for i in range(nx): + * for j in range(ny): + * for k in range(nz): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i, j, k]): + * top = 0. + */ + __pyx_t_16 = __pyx_v_nz; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_k = __pyx_t_17; + + /* "astropy/convolution/boundary_fill.pyx":238 + * for j in range(ny): + * for k in range(nz): + * if not npy_isnan(fixed[i, j, k]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_51 = __pyx_v_i; + __pyx_t_52 = __pyx_v_j; + __pyx_t_53 = __pyx_v_k; + __pyx_t_2 = (!npy_isnan((*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_51, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_52, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_53, __pyx_pybuffernd_fixed.diminfo[2].strides)))); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_fill.pyx":239 + * for k in range(nz): + * if not npy_isnan(fixed[i, j, k]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_fill.pyx":240 + * if not npy_isnan(fixed[i, j, k]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_fill.pyx":241 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_fill.pyx":242 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_fill.pyx":243 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * kkmin = k - wkz + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_fill.pyx":244 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * kkmin = k - wkz + * kkmax = k + wkz + 1 + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_fill.pyx":245 + * jjmin = j - wky + * jjmax = j + wky + 1 + * kkmin = k - wkz # <<<<<<<<<<<<<< + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_kkmin = (__pyx_v_k - __pyx_v_wkz); + + /* "astropy/convolution/boundary_fill.pyx":246 + * jjmax = j + wky + 1 + * kkmin = k - wkz + * kkmax = k + wkz + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_kkmax = ((__pyx_v_k + __pyx_v_wkz) + 1); + + /* "astropy/convolution/boundary_fill.pyx":247 + * kkmin = k - wkz + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + */ + __pyx_t_21 = __pyx_v_iimax; + for (__pyx_t_22 = __pyx_v_iimin; __pyx_t_22 < __pyx_t_21; __pyx_t_22+=1) { + __pyx_v_ii = __pyx_t_22; + + /* "astropy/convolution/boundary_fill.pyx":248 + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * for kk in range(kkmin, kkmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1 or kk < 0 or kk > nz - 1: + */ + __pyx_t_23 = __pyx_v_jjmax; + for (__pyx_t_24 = __pyx_v_jjmin; __pyx_t_24 < __pyx_t_23; __pyx_t_24+=1) { + __pyx_v_jj = __pyx_t_24; + + /* "astropy/convolution/boundary_fill.pyx":249 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): # <<<<<<<<<<<<<< + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1 or kk < 0 or kk > nz - 1: + * val = fill_value + */ + __pyx_t_25 = __pyx_v_kkmax; + for (__pyx_t_26 = __pyx_v_kkmin; __pyx_t_26 < __pyx_t_25; __pyx_t_26+=1) { + __pyx_v_kk = __pyx_t_26; + + /* "astropy/convolution/boundary_fill.pyx":250 + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1 or kk < 0 or kk > nz - 1: # <<<<<<<<<<<<<< + * val = fill_value + * else: + */ + __pyx_t_2 = (__pyx_v_ii < 0); + if (!__pyx_t_2) { + __pyx_t_4 = (__pyx_v_ii > (__pyx_v_nx - 1)); + if (!__pyx_t_4) { + __pyx_t_1 = (__pyx_v_jj < 0); + if (!__pyx_t_1) { + __pyx_t_3 = (__pyx_v_jj > (__pyx_v_ny - 1)); + if (!__pyx_t_3) { + __pyx_t_27 = (__pyx_v_kk < 0); + if (!__pyx_t_27) { + __pyx_t_29 = (__pyx_v_kk > (__pyx_v_nz - 1)); + __pyx_t_28 = __pyx_t_29; + } else { + __pyx_t_28 = __pyx_t_27; + } + __pyx_t_27 = __pyx_t_28; + } else { + __pyx_t_27 = __pyx_t_3; + } + __pyx_t_3 = __pyx_t_27; + } else { + __pyx_t_3 = __pyx_t_1; + } + __pyx_t_1 = __pyx_t_3; + } else { + __pyx_t_1 = __pyx_t_4; + } + __pyx_t_4 = __pyx_t_1; + } else { + __pyx_t_4 = __pyx_t_2; + } + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_fill.pyx":251 + * for kk in range(kkmin, kkmax): + * if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1 or kk < 0 or kk > nz - 1: + * val = fill_value # <<<<<<<<<<<<<< + * else: + * val = fixed[ii, jj, kk] + */ + __pyx_v_val = __pyx_v_fill_value; + goto __pyx_L33; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":253 + * val = fill_value + * else: + * val = fixed[ii, jj, kk] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j), + */ + __pyx_t_54 = __pyx_v_ii; + __pyx_t_55 = __pyx_v_jj; + __pyx_t_56 = __pyx_v_kk; + if (__pyx_t_54 < 0) __pyx_t_54 += __pyx_pybuffernd_fixed.diminfo[0].shape; + if (__pyx_t_55 < 0) __pyx_t_55 += __pyx_pybuffernd_fixed.diminfo[1].shape; + if (__pyx_t_56 < 0) __pyx_t_56 += __pyx_pybuffernd_fixed.diminfo[2].shape; + __pyx_v_val = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_54, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_55, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_56, __pyx_pybuffernd_fixed.diminfo[2].strides)); + } + __pyx_L33:; + + /* "astropy/convolution/boundary_fill.pyx":256 + * ker = g[(wkx + ii - i), + * (wky + jj - j), + * (wkz + kk - k)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_57 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_58 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_t_59 = ((unsigned int)((__pyx_v_wkz + __pyx_v_kk) - __pyx_v_k)); + __pyx_v_ker = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_57, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_58, __pyx_pybuffernd_g.diminfo[1].strides, __pyx_t_59, __pyx_pybuffernd_g.diminfo[2].strides)); + + /* "astropy/convolution/boundary_fill.pyx":257 + * (wky + jj - j), + * (wkz + kk - k)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_4 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_fill.pyx":258 + * (wkz + kk - k)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_fill.pyx":259 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i, j, k] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L34; + } + __pyx_L34:; + } + } + } + + /* "astropy/convolution/boundary_fill.pyx":260 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i, j, k] = top / bot + * else: + */ + __pyx_t_4 = (__pyx_v_bot != 0.0); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_fill.pyx":261 + * bot += ker + * if bot != 0: + * conv[i, j, k] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i, j, k] = fixed[i, j, k] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 261; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_60 = __pyx_v_i; + __pyx_t_61 = __pyx_v_j; + __pyx_t_62 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_60, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_61, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_62, __pyx_pybuffernd_conv.diminfo[2].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L35; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":263 + * conv[i, j, k] = top / bot + * else: + * conv[i, j, k] = fixed[i, j, k] # <<<<<<<<<<<<<< + * else: + * conv[i, j, k] = fixed[i, j, k] + */ + __pyx_t_63 = __pyx_v_i; + __pyx_t_64 = __pyx_v_j; + __pyx_t_65 = __pyx_v_k; + __pyx_t_66 = __pyx_v_i; + __pyx_t_67 = __pyx_v_j; + __pyx_t_68 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_66, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_67, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_68, __pyx_pybuffernd_conv.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_63, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_64, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_65, __pyx_pybuffernd_fixed.diminfo[2].strides)); + } + __pyx_L35:; + goto __pyx_L26; + } + /*else*/ { + + /* "astropy/convolution/boundary_fill.pyx":265 + * conv[i, j, k] = fixed[i, j, k] + * else: + * conv[i, j, k] = fixed[i, j, k] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_69 = __pyx_v_i; + __pyx_t_70 = __pyx_v_j; + __pyx_t_71 = __pyx_v_k; + __pyx_t_72 = __pyx_v_i; + __pyx_t_73 = __pyx_v_j; + __pyx_t_74 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_72, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_73, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_74, __pyx_pybuffernd_conv.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_fill_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_69, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_70, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_71, __pyx_pybuffernd_fixed.diminfo[2].strides)); + } + __pyx_L26:; + } + } + } + + /* "astropy/convolution/boundary_fill.pyx":267 + * conv[i, j, k] = fixed[i, j, k] + * + * return conv # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_fill.convolve3d_boundary_fill", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0); + __pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags)); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":194 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fullfill the PEP. + */ + +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_v_copy_shape; + int __pyx_v_i; + int __pyx_v_ndim; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + int __pyx_v_t; + char *__pyx_v_f; + PyArray_Descr *__pyx_v_descr = 0; + int __pyx_v_offset; + int __pyx_v_hasfields; + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + char *__pyx_t_9; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__getbuffer__", 0); + if (__pyx_v_info != NULL) { + __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(__pyx_v_info->obj); + } + + /* "numpy.pxd":200 + * # of flags + * + * if info == NULL: return # <<<<<<<<<<<<<< + * + * cdef int copy_shape, i, ndim + */ + __pyx_t_1 = (__pyx_v_info == NULL); + if (__pyx_t_1) { + __pyx_r = 0; + goto __pyx_L0; + goto __pyx_L3; + } + __pyx_L3:; + + /* "numpy.pxd":203 + * + * cdef int copy_shape, i, ndim + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + */ + __pyx_v_endian_detector = 1; + + /* "numpy.pxd":204 + * cdef int copy_shape, i, ndim + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * + * ndim = PyArray_NDIM(self) + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "numpy.pxd":206 + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); + + /* "numpy.pxd":208 + * ndim = PyArray_NDIM(self) + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * copy_shape = 1 + * else: + */ + __pyx_t_1 = ((sizeof(npy_intp)) != (sizeof(Py_ssize_t))); + if (__pyx_t_1) { + + /* "numpy.pxd":209 + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * copy_shape = 1 # <<<<<<<<<<<<<< + * else: + * copy_shape = 0 + */ + __pyx_v_copy_shape = 1; + goto __pyx_L4; + } + /*else*/ { + + /* "numpy.pxd":211 + * copy_shape = 1 + * else: + * copy_shape = 0 # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + */ + __pyx_v_copy_shape = 0; + } + __pyx_L4:; + + /* "numpy.pxd":213 + * copy_shape = 0 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + __pyx_t_1 = ((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS); + if (__pyx_t_1) { + + /* "numpy.pxd":214 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not C contiguous") + * + */ + __pyx_t_2 = (!PyArray_CHKFLAGS(__pyx_v_self, NPY_C_CONTIGUOUS)); + __pyx_t_3 = __pyx_t_2; + } else { + __pyx_t_3 = __pyx_t_1; + } + if (__pyx_t_3) { + + /* "numpy.pxd":215 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_6), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L5; + } + __pyx_L5:; + + /* "numpy.pxd":217 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + __pyx_t_3 = ((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS); + if (__pyx_t_3) { + + /* "numpy.pxd":218 + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not Fortran contiguous") + * + */ + __pyx_t_1 = (!PyArray_CHKFLAGS(__pyx_v_self, NPY_F_CONTIGUOUS)); + __pyx_t_2 = __pyx_t_1; + } else { + __pyx_t_2 = __pyx_t_3; + } + if (__pyx_t_2) { + + /* "numpy.pxd":219 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_8), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L6; + } + __pyx_L6:; + + /* "numpy.pxd":221 + * raise ValueError(u"ndarray is not Fortran contiguous") + * + * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< + * info.ndim = ndim + * if copy_shape: + */ + __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); + + /* "numpy.pxd":222 + * + * info.buf = PyArray_DATA(self) + * info.ndim = ndim # <<<<<<<<<<<<<< + * if copy_shape: + * # Allocate new buffer for strides and shape info. + */ + __pyx_v_info->ndim = __pyx_v_ndim; + + /* "numpy.pxd":223 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if copy_shape: # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + if (__pyx_v_copy_shape) { + + /* "numpy.pxd":226 + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) # <<<<<<<<<<<<<< + * info.shape = info.strides + ndim + * for i in range(ndim): + */ + __pyx_v_info->strides = ((Py_ssize_t *)malloc((((sizeof(Py_ssize_t)) * ((size_t)__pyx_v_ndim)) * 2))); + + /* "numpy.pxd":227 + * # This is allocated as one block, strides first. + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) + * info.shape = info.strides + ndim # <<<<<<<<<<<<<< + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + */ + __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); + + /* "numpy.pxd":228 + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) + * info.shape = info.strides + ndim + * for i in range(ndim): # <<<<<<<<<<<<<< + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] + */ + __pyx_t_5 = __pyx_v_ndim; + for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { + __pyx_v_i = __pyx_t_6; + + /* "numpy.pxd":229 + * info.shape = info.strides + ndim + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + */ + (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); + + /* "numpy.pxd":230 + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< + * else: + * info.strides = PyArray_STRIDES(self) + */ + (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); + } + goto __pyx_L7; + } + /*else*/ { + + /* "numpy.pxd":232 + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + */ + __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); + + /* "numpy.pxd":233 + * else: + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + */ + __pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self)); + } + __pyx_L7:; + + /* "numpy.pxd":234 + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL # <<<<<<<<<<<<<< + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) + */ + __pyx_v_info->suboffsets = NULL; + + /* "numpy.pxd":235 + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< + * info.readonly = not PyArray_ISWRITEABLE(self) + * + */ + __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); + + /* "numpy.pxd":236 + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< + * + * cdef int t + */ + __pyx_v_info->readonly = (!PyArray_ISWRITEABLE(__pyx_v_self)); + + /* "numpy.pxd":239 + * + * cdef int t + * cdef char* f = NULL # <<<<<<<<<<<<<< + * cdef dtype descr = self.descr + * cdef list stack + */ + __pyx_v_f = NULL; + + /* "numpy.pxd":240 + * cdef int t + * cdef char* f = NULL + * cdef dtype descr = self.descr # <<<<<<<<<<<<<< + * cdef list stack + * cdef int offset + */ + __pyx_t_4 = ((PyObject *)__pyx_v_self->descr); + __Pyx_INCREF(__pyx_t_4); + __pyx_v_descr = ((PyArray_Descr *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "numpy.pxd":244 + * cdef int offset + * + * cdef bint hasfields = PyDataType_HASFIELDS(descr) # <<<<<<<<<<<<<< + * + * if not hasfields and not copy_shape: + */ + __pyx_v_hasfields = PyDataType_HASFIELDS(__pyx_v_descr); + + /* "numpy.pxd":246 + * cdef bint hasfields = PyDataType_HASFIELDS(descr) + * + * if not hasfields and not copy_shape: # <<<<<<<<<<<<<< + * # do not call releasebuffer + * info.obj = None + */ + __pyx_t_2 = (!__pyx_v_hasfields); + if (__pyx_t_2) { + __pyx_t_3 = (!__pyx_v_copy_shape); + __pyx_t_1 = __pyx_t_3; + } else { + __pyx_t_1 = __pyx_t_2; + } + if (__pyx_t_1) { + + /* "numpy.pxd":248 + * if not hasfields and not copy_shape: + * # do not call releasebuffer + * info.obj = None # <<<<<<<<<<<<<< + * else: + * # need to call releasebuffer + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = Py_None; + goto __pyx_L10; + } + /*else*/ { + + /* "numpy.pxd":251 + * else: + * # need to call releasebuffer + * info.obj = self # <<<<<<<<<<<<<< + * + * if not hasfields: + */ + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = ((PyObject *)__pyx_v_self); + } + __pyx_L10:; + + /* "numpy.pxd":253 + * info.obj = self + * + * if not hasfields: # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + __pyx_t_1 = (!__pyx_v_hasfields); + if (__pyx_t_1) { + + /* "numpy.pxd":254 + * + * if not hasfields: + * t = descr.type_num # <<<<<<<<<<<<<< + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + */ + __pyx_t_5 = __pyx_v_descr->type_num; + __pyx_v_t = __pyx_t_5; + + /* "numpy.pxd":255 + * if not hasfields: + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_1 = (__pyx_v_descr->byteorder == '>'); + if (__pyx_t_1) { + __pyx_t_2 = __pyx_v_little_endian; + } else { + __pyx_t_2 = __pyx_t_1; + } + if (!__pyx_t_2) { + + /* "numpy.pxd":256 + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + */ + __pyx_t_1 = (__pyx_v_descr->byteorder == '<'); + if (__pyx_t_1) { + __pyx_t_3 = (!__pyx_v_little_endian); + __pyx_t_7 = __pyx_t_3; + } else { + __pyx_t_7 = __pyx_t_1; + } + __pyx_t_1 = __pyx_t_7; + } else { + __pyx_t_1 = __pyx_t_2; + } + if (__pyx_t_1) { + + /* "numpy.pxd":257 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_10), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L12; + } + __pyx_L12:; + + /* "numpy.pxd":258 + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + */ + __pyx_t_1 = (__pyx_v_t == NPY_BYTE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__b; + goto __pyx_L13; + } + + /* "numpy.pxd":259 + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + */ + __pyx_t_1 = (__pyx_v_t == NPY_UBYTE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__B; + goto __pyx_L13; + } + + /* "numpy.pxd":260 + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + */ + __pyx_t_1 = (__pyx_v_t == NPY_SHORT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__h; + goto __pyx_L13; + } + + /* "numpy.pxd":261 + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + */ + __pyx_t_1 = (__pyx_v_t == NPY_USHORT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__H; + goto __pyx_L13; + } + + /* "numpy.pxd":262 + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + */ + __pyx_t_1 = (__pyx_v_t == NPY_INT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__i; + goto __pyx_L13; + } + + /* "numpy.pxd":263 + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + */ + __pyx_t_1 = (__pyx_v_t == NPY_UINT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__I; + goto __pyx_L13; + } + + /* "numpy.pxd":264 + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__l; + goto __pyx_L13; + } + + /* "numpy.pxd":265 + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + */ + __pyx_t_1 = (__pyx_v_t == NPY_ULONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__L; + goto __pyx_L13; + } + + /* "numpy.pxd":266 + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONGLONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__q; + goto __pyx_L13; + } + + /* "numpy.pxd":267 + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + */ + __pyx_t_1 = (__pyx_v_t == NPY_ULONGLONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Q; + goto __pyx_L13; + } + + /* "numpy.pxd":268 + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + */ + __pyx_t_1 = (__pyx_v_t == NPY_FLOAT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__f; + goto __pyx_L13; + } + + /* "numpy.pxd":269 + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + */ + __pyx_t_1 = (__pyx_v_t == NPY_DOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__d; + goto __pyx_L13; + } + + /* "numpy.pxd":270 + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONGDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__g; + goto __pyx_L13; + } + + /* "numpy.pxd":271 + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + */ + __pyx_t_1 = (__pyx_v_t == NPY_CFLOAT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zf; + goto __pyx_L13; + } + + /* "numpy.pxd":272 + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" + */ + __pyx_t_1 = (__pyx_v_t == NPY_CDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zd; + goto __pyx_L13; + } + + /* "numpy.pxd":273 + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f = "O" + * else: + */ + __pyx_t_1 = (__pyx_v_t == NPY_CLONGDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zg; + goto __pyx_L13; + } + + /* "numpy.pxd":274 + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_1 = (__pyx_v_t == NPY_OBJECT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__O; + goto __pyx_L13; + } + /*else*/ { + + /* "numpy.pxd":276 + * elif t == NPY_OBJECT: f = "O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * info.format = f + * return + */ + __pyx_t_4 = PyInt_FromLong(__pyx_v_t); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_8 = PyNumber_Remainder(((PyObject *)__pyx_kp_u_11), __pyx_t_4); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_8)); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)__pyx_t_8)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_8)); + __pyx_t_8 = 0; + __pyx_t_8 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_L13:; + + /* "numpy.pxd":277 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f # <<<<<<<<<<<<<< + * return + * else: + */ + __pyx_v_info->format = __pyx_v_f; + + /* "numpy.pxd":278 + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f + * return # <<<<<<<<<<<<<< + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) + */ + __pyx_r = 0; + goto __pyx_L0; + goto __pyx_L11; + } + /*else*/ { + + /* "numpy.pxd":280 + * return + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + */ + __pyx_v_info->format = ((char *)malloc(255)); + + /* "numpy.pxd":281 + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, + */ + (__pyx_v_info->format[0]) = '^'; + + /* "numpy.pxd":282 + * info.format = stdlib.malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 # <<<<<<<<<<<<<< + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + */ + __pyx_v_offset = 0; + + /* "numpy.pxd":285 + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + * &offset) # <<<<<<<<<<<<<< + * f[0] = c'\0' # Terminate format string + * + */ + __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 255), (&__pyx_v_offset)); if (unlikely(__pyx_t_9 == NULL)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 283; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_v_f = __pyx_t_9; + + /* "numpy.pxd":286 + * info.format + _buffer_format_string_len, + * &offset) + * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + */ + (__pyx_v_f[0]) = '\x00'; + } + __pyx_L11:; + + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.ndarray.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + if (__pyx_v_info != NULL && __pyx_v_info->obj != NULL) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = NULL; + } + goto __pyx_L2; + __pyx_L0:; + if (__pyx_v_info != NULL && __pyx_v_info->obj == Py_None) { + __Pyx_GOTREF(Py_None); + __Pyx_DECREF(Py_None); __pyx_v_info->obj = NULL; + } + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_descr); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0); + __pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info)); + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":288 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) + */ + +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__releasebuffer__", 0); + + /* "numpy.pxd":289 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_t_1 = PyArray_HASFIELDS(__pyx_v_self); + if (__pyx_t_1) { + + /* "numpy.pxd":290 + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * stdlib.free(info.strides) + */ + free(__pyx_v_info->format); + goto __pyx_L3; + } + __pyx_L3:; + + /* "numpy.pxd":291 + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * stdlib.free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + __pyx_t_1 = ((sizeof(npy_intp)) != (sizeof(Py_ssize_t))); + if (__pyx_t_1) { + + /* "numpy.pxd":292 + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * stdlib.free(info.strides) # <<<<<<<<<<<<<< + * # info.shape was stored after info.strides in the same block + * + */ + free(__pyx_v_info->strides); + goto __pyx_L4; + } + __pyx_L4:; + + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":768 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); + + /* "numpy.pxd":769 + * + * cdef inline object PyArray_MultiIterNew1(a): + * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew2(a, b): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 769; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":771 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); + + /* "numpy.pxd":772 + * + * cdef inline object PyArray_MultiIterNew2(a, b): + * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 772; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":774 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); + + /* "numpy.pxd":775 + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 775; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":777 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); + + /* "numpy.pxd":778 + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 778; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":780 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); + + /* "numpy.pxd":781 + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 781; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":783 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) { + PyArray_Descr *__pyx_v_child = 0; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + PyObject *__pyx_v_fields = 0; + PyObject *__pyx_v_childname = NULL; + PyObject *__pyx_v_new_offset = NULL; + PyObject *__pyx_v_t = NULL; + char *__pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *(*__pyx_t_6)(PyObject *); + int __pyx_t_7; + int __pyx_t_8; + int __pyx_t_9; + int __pyx_t_10; + long __pyx_t_11; + char *__pyx_t_12; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_util_dtypestring", 0); + + /* "numpy.pxd":790 + * cdef int delta_offset + * cdef tuple i + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * cdef tuple fields + */ + __pyx_v_endian_detector = 1; + + /* "numpy.pxd":791 + * cdef tuple i + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * cdef tuple fields + * + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "numpy.pxd":794 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + if (unlikely(((PyObject *)__pyx_v_descr->names) == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_1 = ((PyObject *)__pyx_v_descr->names); __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_COMPILING_IN_CPYTHON + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + __Pyx_XDECREF(__pyx_v_childname); + __pyx_v_childname = __pyx_t_3; + __pyx_t_3 = 0; + + /* "numpy.pxd":795 + * + * for childname in descr.names: + * fields = descr.fields[childname] # <<<<<<<<<<<<<< + * child, new_offset = fields + * + */ + __pyx_t_3 = PyObject_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (!__pyx_t_3) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 795; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected tuple, got %.200s", Py_TYPE(__pyx_t_3)->tp_name), 0))) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 795; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_XDECREF(((PyObject *)__pyx_v_fields)); + __pyx_v_fields = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "numpy.pxd":796 + * for childname in descr.names: + * fields = descr.fields[childname] + * child, new_offset = fields # <<<<<<<<<<<<<< + * + * if (end - f) - (new_offset - offset[0]) < 15: + */ + if (likely(PyTuple_CheckExact(((PyObject *)__pyx_v_fields)))) { + PyObject* sequence = ((PyObject *)__pyx_v_fields); + #if CYTHON_COMPILING_IN_CPYTHON + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #if CYTHON_COMPILING_IN_CPYTHON + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + #endif + } else if (1) { + __Pyx_RaiseNoneNotIterableError(); {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else + { + Py_ssize_t index = -1; + __pyx_t_5 = PyObject_GetIter(((PyObject *)__pyx_v_fields)); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = Py_TYPE(__pyx_t_5)->tp_iternext; + index = 0; __pyx_t_3 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_3)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + index = 1; __pyx_t_4 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_4)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_6(__pyx_t_5), 2) < 0) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_6 = NULL; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L6_unpacking_done; + __pyx_L5_unpacking_failed:; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_6 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_L6_unpacking_done:; + } + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_XDECREF(((PyObject *)__pyx_v_child)); + __pyx_v_child = ((PyArray_Descr *)__pyx_t_3); + __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_v_new_offset); + __pyx_v_new_offset = __pyx_t_4; + __pyx_t_4 = 0; + + /* "numpy.pxd":798 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + __pyx_t_4 = PyInt_FromLong((__pyx_v_end - __pyx_v_f)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyInt_FromLong((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_3); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyNumber_Subtract(__pyx_t_4, __pyx_t_5); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = PyObject_RichCompare(__pyx_t_3, __pyx_int_15, Py_LT); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + + /* "numpy.pxd":799 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_RuntimeError, ((PyObject *)__pyx_k_tuple_13), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L7; + } + __pyx_L7:; + + /* "numpy.pxd":801 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_7 = (__pyx_v_child->byteorder == '>'); + if (__pyx_t_7) { + __pyx_t_8 = __pyx_v_little_endian; + } else { + __pyx_t_8 = __pyx_t_7; + } + if (!__pyx_t_8) { + + /* "numpy.pxd":802 + * + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * # One could encode it in the format string and have Cython + */ + __pyx_t_7 = (__pyx_v_child->byteorder == '<'); + if (__pyx_t_7) { + __pyx_t_9 = (!__pyx_v_little_endian); + __pyx_t_10 = __pyx_t_9; + } else { + __pyx_t_10 = __pyx_t_7; + } + __pyx_t_7 = __pyx_t_10; + } else { + __pyx_t_7 = __pyx_t_8; + } + if (__pyx_t_7) { + + /* "numpy.pxd":803 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_14), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L8; + } + __pyx_L8:; + + /* "numpy.pxd":813 + * + * # Output padding bytes + * while offset[0] < new_offset: # <<<<<<<<<<<<<< + * f[0] = 120 # "x"; pad byte + * f += 1 + */ + while (1) { + __pyx_t_5 = PyInt_FromLong((__pyx_v_offset[0])); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_5, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!__pyx_t_7) break; + + /* "numpy.pxd":814 + * # Output padding bytes + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< + * f += 1 + * offset[0] += 1 + */ + (__pyx_v_f[0]) = 120; + + /* "numpy.pxd":815 + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte + * f += 1 # <<<<<<<<<<<<<< + * offset[0] += 1 + * + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "numpy.pxd":816 + * f[0] = 120 # "x"; pad byte + * f += 1 + * offset[0] += 1 # <<<<<<<<<<<<<< + * + * offset[0] += child.itemsize + */ + __pyx_t_11 = 0; + (__pyx_v_offset[__pyx_t_11]) = ((__pyx_v_offset[__pyx_t_11]) + 1); + } + + /* "numpy.pxd":818 + * offset[0] += 1 + * + * offset[0] += child.itemsize # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(child): + */ + __pyx_t_11 = 0; + (__pyx_v_offset[__pyx_t_11]) = ((__pyx_v_offset[__pyx_t_11]) + __pyx_v_child->elsize); + + /* "numpy.pxd":820 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + __pyx_t_7 = (!PyDataType_HASFIELDS(__pyx_v_child)); + if (__pyx_t_7) { + + /* "numpy.pxd":821 + * + * if not PyDataType_HASFIELDS(child): + * t = child.type_num # <<<<<<<<<<<<<< + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") + */ + __pyx_t_3 = PyInt_FromLong(__pyx_v_child->type_num); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 821; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_v_t); + __pyx_v_t = __pyx_t_3; + __pyx_t_3 = 0; + + /* "numpy.pxd":822 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + __pyx_t_7 = ((__pyx_v_end - __pyx_v_f) < 5); + if (__pyx_t_7) { + + /* "numpy.pxd":823 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_t_3 = PyObject_Call(__pyx_builtin_RuntimeError, ((PyObject *)__pyx_k_tuple_16), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L12; + } + __pyx_L12:; + + /* "numpy.pxd":826 + * + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + */ + __pyx_t_3 = PyInt_FromLong(NPY_BYTE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 98; + goto __pyx_L13; + } + + /* "numpy.pxd":827 + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + */ + __pyx_t_5 = PyInt_FromLong(NPY_UBYTE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 66; + goto __pyx_L13; + } + + /* "numpy.pxd":828 + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + */ + __pyx_t_3 = PyInt_FromLong(NPY_SHORT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 104; + goto __pyx_L13; + } + + /* "numpy.pxd":829 + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + */ + __pyx_t_5 = PyInt_FromLong(NPY_USHORT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 72; + goto __pyx_L13; + } + + /* "numpy.pxd":830 + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + */ + __pyx_t_3 = PyInt_FromLong(NPY_INT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 105; + goto __pyx_L13; + } + + /* "numpy.pxd":831 + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + */ + __pyx_t_5 = PyInt_FromLong(NPY_UINT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 73; + goto __pyx_L13; + } + + /* "numpy.pxd":832 + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONG); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 108; + goto __pyx_L13; + } + + /* "numpy.pxd":833 + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + */ + __pyx_t_5 = PyInt_FromLong(NPY_ULONG); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 76; + goto __pyx_L13; + } + + /* "numpy.pxd":834 + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONGLONG); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 113; + goto __pyx_L13; + } + + /* "numpy.pxd":835 + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + */ + __pyx_t_5 = PyInt_FromLong(NPY_ULONGLONG); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 81; + goto __pyx_L13; + } + + /* "numpy.pxd":836 + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + */ + __pyx_t_3 = PyInt_FromLong(NPY_FLOAT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 102; + goto __pyx_L13; + } + + /* "numpy.pxd":837 + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + */ + __pyx_t_5 = PyInt_FromLong(NPY_DOUBLE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 100; + goto __pyx_L13; + } + + /* "numpy.pxd":838 + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 103; + goto __pyx_L13; + } + + /* "numpy.pxd":839 + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + */ + __pyx_t_5 = PyInt_FromLong(NPY_CFLOAT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 102; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":840 + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" + */ + __pyx_t_3 = PyInt_FromLong(NPY_CDOUBLE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 100; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":841 + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + */ + __pyx_t_5 = PyInt_FromLong(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 103; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":842 + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_3 = PyInt_FromLong(NPY_OBJECT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 79; + goto __pyx_L13; + } + /*else*/ { + + /* "numpy.pxd":844 + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * f += 1 + * else: + */ + __pyx_t_5 = PyNumber_Remainder(((PyObject *)__pyx_kp_u_11), __pyx_v_t); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_L13:; + + /* "numpy.pxd":845 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * f += 1 # <<<<<<<<<<<<<< + * else: + * # Cython ignores struct boundary information ("T{...}"), + */ + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L11; + } + /*else*/ { + + /* "numpy.pxd":849 + * # Cython ignores struct boundary information ("T{...}"), + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< + * return f + * + */ + __pyx_t_12 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_12 == NULL)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 849; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_v_f = __pyx_t_12; + } + __pyx_L11:; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "numpy.pxd":850 + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) + * return f # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = __pyx_v_f; + goto __pyx_L0; + + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("numpy._util_dtypestring", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_child); + __Pyx_XDECREF(__pyx_v_fields); + __Pyx_XDECREF(__pyx_v_childname); + __Pyx_XDECREF(__pyx_v_new_offset); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":965 + * + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * cdef PyObject* baseptr + * if base is None: + */ + +static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_arr, PyObject *__pyx_v_base) { + PyObject *__pyx_v_baseptr; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("set_array_base", 0); + + /* "numpy.pxd":967 + * cdef inline void set_array_base(ndarray arr, object base): + * cdef PyObject* baseptr + * if base is None: # <<<<<<<<<<<<<< + * baseptr = NULL + * else: + */ + __pyx_t_1 = (__pyx_v_base == Py_None); + if (__pyx_t_1) { + + /* "numpy.pxd":968 + * cdef PyObject* baseptr + * if base is None: + * baseptr = NULL # <<<<<<<<<<<<<< + * else: + * Py_INCREF(base) # important to do this before decref below! + */ + __pyx_v_baseptr = NULL; + goto __pyx_L3; + } + /*else*/ { + + /* "numpy.pxd":970 + * baseptr = NULL + * else: + * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< + * baseptr = base + * Py_XDECREF(arr.base) + */ + Py_INCREF(__pyx_v_base); + + /* "numpy.pxd":971 + * else: + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base # <<<<<<<<<<<<<< + * Py_XDECREF(arr.base) + * arr.base = baseptr + */ + __pyx_v_baseptr = ((PyObject *)__pyx_v_base); + } + __pyx_L3:; + + /* "numpy.pxd":972 + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base + * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< + * arr.base = baseptr + * + */ + Py_XDECREF(__pyx_v_arr->base); + + /* "numpy.pxd":973 + * baseptr = base + * Py_XDECREF(arr.base) + * arr.base = baseptr # <<<<<<<<<<<<<< + * + * cdef inline object get_array_base(ndarray arr): + */ + __pyx_v_arr->base = __pyx_v_baseptr; + + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":975 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__pyx_v_arr) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("get_array_base", 0); + + /* "numpy.pxd":976 + * + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: # <<<<<<<<<<<<<< + * return None + * else: + */ + __pyx_t_1 = (__pyx_v_arr->base == NULL); + if (__pyx_t_1) { + + /* "numpy.pxd":977 + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: + * return None # <<<<<<<<<<<<<< + * else: + * return arr.base + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L0; + goto __pyx_L3; + } + /*else*/ { + + /* "numpy.pxd":979 + * return None + * else: + * return arr.base # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_arr->base)); + __pyx_r = ((PyObject *)__pyx_v_arr->base); + goto __pyx_L0; + } + __pyx_L3:; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef __pyx_moduledef = { + #if PY_VERSION_HEX < 0x03020000 + { PyObject_HEAD_INIT(NULL) NULL, 0, NULL }, + #else + PyModuleDef_HEAD_INIT, + #endif + __Pyx_NAMESTR("boundary_fill"), + 0, /* m_doc */ + -1, /* m_size */ + __pyx_methods /* m_methods */, + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_s_1, __pyx_k_1, sizeof(__pyx_k_1), 0, 0, 1, 0}, + {&__pyx_kp_u_11, __pyx_k_11, sizeof(__pyx_k_11), 0, 1, 0, 0}, + {&__pyx_kp_u_12, __pyx_k_12, sizeof(__pyx_k_12), 0, 1, 0, 0}, + {&__pyx_kp_u_15, __pyx_k_15, sizeof(__pyx_k_15), 0, 1, 0, 0}, + {&__pyx_n_s_19, __pyx_k_19, sizeof(__pyx_k_19), 0, 0, 1, 1}, + {&__pyx_kp_s_20, __pyx_k_20, sizeof(__pyx_k_20), 0, 0, 1, 0}, + {&__pyx_n_s_21, __pyx_k_21, sizeof(__pyx_k_21), 0, 0, 1, 1}, + {&__pyx_n_s_24, __pyx_k_24, sizeof(__pyx_k_24), 0, 0, 1, 1}, + {&__pyx_n_s_27, __pyx_k_27, sizeof(__pyx_k_27), 0, 0, 1, 1}, + {&__pyx_kp_u_5, __pyx_k_5, sizeof(__pyx_k_5), 0, 1, 0, 0}, + {&__pyx_kp_u_7, __pyx_k_7, sizeof(__pyx_k_7), 0, 1, 0, 0}, + {&__pyx_kp_u_9, __pyx_k_9, sizeof(__pyx_k_9), 0, 1, 0, 0}, + {&__pyx_n_s__DTYPE, __pyx_k__DTYPE, sizeof(__pyx_k__DTYPE), 0, 0, 1, 1}, + {&__pyx_n_s__RuntimeError, __pyx_k__RuntimeError, sizeof(__pyx_k__RuntimeError), 0, 0, 1, 1}, + {&__pyx_n_s__ValueError, __pyx_k__ValueError, sizeof(__pyx_k__ValueError), 0, 0, 1, 1}, + {&__pyx_n_s____main__, __pyx_k____main__, sizeof(__pyx_k____main__), 0, 0, 1, 1}, + {&__pyx_n_s____test__, __pyx_k____test__, sizeof(__pyx_k____test__), 0, 0, 1, 1}, + {&__pyx_n_s__bot, __pyx_k__bot, sizeof(__pyx_k__bot), 0, 0, 1, 1}, + {&__pyx_n_s__conv, __pyx_k__conv, sizeof(__pyx_k__conv), 0, 0, 1, 1}, + {&__pyx_n_s__dtype, __pyx_k__dtype, sizeof(__pyx_k__dtype), 0, 0, 1, 1}, + {&__pyx_n_s__empty, __pyx_k__empty, sizeof(__pyx_k__empty), 0, 0, 1, 1}, + {&__pyx_n_s__f, __pyx_k__f, sizeof(__pyx_k__f), 0, 0, 1, 1}, + {&__pyx_n_s__fill_value, __pyx_k__fill_value, sizeof(__pyx_k__fill_value), 0, 0, 1, 1}, + {&__pyx_n_s__fixed, __pyx_k__fixed, sizeof(__pyx_k__fixed), 0, 0, 1, 1}, + {&__pyx_n_s__float, __pyx_k__float, sizeof(__pyx_k__float), 0, 0, 1, 1}, + {&__pyx_n_s__g, __pyx_k__g, sizeof(__pyx_k__g), 0, 0, 1, 1}, + {&__pyx_n_s__i, __pyx_k__i, sizeof(__pyx_k__i), 0, 0, 1, 1}, + {&__pyx_n_s__ii, __pyx_k__ii, sizeof(__pyx_k__ii), 0, 0, 1, 1}, + {&__pyx_n_s__iii, __pyx_k__iii, sizeof(__pyx_k__iii), 0, 0, 1, 1}, + {&__pyx_n_s__iimax, __pyx_k__iimax, sizeof(__pyx_k__iimax), 0, 0, 1, 1}, + {&__pyx_n_s__iimin, __pyx_k__iimin, sizeof(__pyx_k__iimin), 0, 0, 1, 1}, + {&__pyx_n_s__j, __pyx_k__j, sizeof(__pyx_k__j), 0, 0, 1, 1}, + {&__pyx_n_s__jj, __pyx_k__jj, sizeof(__pyx_k__jj), 0, 0, 1, 1}, + {&__pyx_n_s__jjj, __pyx_k__jjj, sizeof(__pyx_k__jjj), 0, 0, 1, 1}, + {&__pyx_n_s__jjmax, __pyx_k__jjmax, sizeof(__pyx_k__jjmax), 0, 0, 1, 1}, + {&__pyx_n_s__jjmin, __pyx_k__jjmin, sizeof(__pyx_k__jjmin), 0, 0, 1, 1}, + {&__pyx_n_s__k, __pyx_k__k, sizeof(__pyx_k__k), 0, 0, 1, 1}, + {&__pyx_n_s__ker, __pyx_k__ker, sizeof(__pyx_k__ker), 0, 0, 1, 1}, + {&__pyx_n_s__kk, __pyx_k__kk, sizeof(__pyx_k__kk), 0, 0, 1, 1}, + {&__pyx_n_s__kkk, __pyx_k__kkk, sizeof(__pyx_k__kkk), 0, 0, 1, 1}, + {&__pyx_n_s__kkmax, __pyx_k__kkmax, sizeof(__pyx_k__kkmax), 0, 0, 1, 1}, + {&__pyx_n_s__kkmin, __pyx_k__kkmin, sizeof(__pyx_k__kkmin), 0, 0, 1, 1}, + {&__pyx_n_s__nkx, __pyx_k__nkx, sizeof(__pyx_k__nkx), 0, 0, 1, 1}, + {&__pyx_n_s__nky, __pyx_k__nky, sizeof(__pyx_k__nky), 0, 0, 1, 1}, + {&__pyx_n_s__nkz, __pyx_k__nkz, sizeof(__pyx_k__nkz), 0, 0, 1, 1}, + {&__pyx_n_s__np, __pyx_k__np, sizeof(__pyx_k__np), 0, 0, 1, 1}, + {&__pyx_n_s__numpy, __pyx_k__numpy, sizeof(__pyx_k__numpy), 0, 0, 1, 1}, + {&__pyx_n_s__nx, __pyx_k__nx, sizeof(__pyx_k__nx), 0, 0, 1, 1}, + {&__pyx_n_s__ny, __pyx_k__ny, sizeof(__pyx_k__ny), 0, 0, 1, 1}, + {&__pyx_n_s__nz, __pyx_k__nz, sizeof(__pyx_k__nz), 0, 0, 1, 1}, + {&__pyx_n_s__range, __pyx_k__range, sizeof(__pyx_k__range), 0, 0, 1, 1}, + {&__pyx_n_s__top, __pyx_k__top, sizeof(__pyx_k__top), 0, 0, 1, 1}, + {&__pyx_n_s__val, __pyx_k__val, sizeof(__pyx_k__val), 0, 0, 1, 1}, + {&__pyx_n_s__wkx, __pyx_k__wkx, sizeof(__pyx_k__wkx), 0, 0, 1, 1}, + {&__pyx_n_s__wky, __pyx_k__wky, sizeof(__pyx_k__wky), 0, 0, 1, 1}, + {&__pyx_n_s__wkz, __pyx_k__wkz, sizeof(__pyx_k__wkz), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_ValueError = __Pyx_GetName(__pyx_b, __pyx_n_s__ValueError); if (!__pyx_builtin_ValueError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_builtin_range = __Pyx_GetName(__pyx_b, __pyx_n_s__range); if (!__pyx_builtin_range) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 39; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_builtin_RuntimeError = __Pyx_GetName(__pyx_b, __pyx_n_s__RuntimeError); if (!__pyx_builtin_RuntimeError) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "astropy/convolution/boundary_fill.pyx":21 + * + * if g.shape[0] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_2 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_2); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_2)); + + /* "astropy/convolution/boundary_fill.pyx":93 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_3 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 93; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_3); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_3)); + + /* "astropy/convolution/boundary_fill.pyx":178 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_4 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 178; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_4); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_4)); + + /* "numpy.pxd":215 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_k_tuple_6 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_5)); if (unlikely(!__pyx_k_tuple_6)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_6); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_6)); + + /* "numpy.pxd":219 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_k_tuple_8 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_7)); if (unlikely(!__pyx_k_tuple_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_8); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_8)); + + /* "numpy.pxd":257 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_k_tuple_10 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_9)); if (unlikely(!__pyx_k_tuple_10)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_10); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_10)); + + /* "numpy.pxd":799 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_k_tuple_13 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_12)); if (unlikely(!__pyx_k_tuple_13)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_13); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_13)); + + /* "numpy.pxd":803 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_k_tuple_14 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_9)); if (unlikely(!__pyx_k_tuple_14)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_14); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_14)); + + /* "numpy.pxd":823 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_k_tuple_16 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_15)); if (unlikely(!__pyx_k_tuple_16)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_16); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_16)); + + /* "astropy/convolution/boundary_fill.pyx":16 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_fill(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g, + * float fill_value): + */ + __pyx_k_tuple_17 = PyTuple_Pack(17, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__fill_value), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__iii), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_17)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_17); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_17)); + __pyx_k_codeobj_18 = (PyObject*)__Pyx_PyCode_New(3, 0, 17, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_19, 16, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_18)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + + /* "astropy/convolution/boundary_fill.pyx":88 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_fill(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g, + * float fill_value): + */ + __pyx_k_tuple_22 = PyTuple_Pack(25, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__fill_value), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__ny), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__nky), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__wky), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__j), ((PyObject *)__pyx_n_s__iii), ((PyObject *)__pyx_n_s__jjj), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__jj), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__jjmin), ((PyObject *)__pyx_n_s__jjmax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_22)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_22); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_22)); + __pyx_k_codeobj_23 = (PyObject*)__Pyx_PyCode_New(3, 0, 25, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_24, 88, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_23)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + + /* "astropy/convolution/boundary_fill.pyx":173 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_fill(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g, + * float fill_value): + */ + __pyx_k_tuple_25 = PyTuple_Pack(33, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__fill_value), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__ny), ((PyObject *)__pyx_n_s__nz), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__nky), ((PyObject *)__pyx_n_s__nkz), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__wky), ((PyObject *)__pyx_n_s__wkz), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__j), ((PyObject *)__pyx_n_s__k), ((PyObject *)__pyx_n_s__iii), ((PyObject *)__pyx_n_s__jjj), ((PyObject *)__pyx_n_s__kkk), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__jj), ((PyObject *)__pyx_n_s__kk), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__jjmin), ((PyObject *)__pyx_n_s__jjmax), ((PyObject *)__pyx_n_s__kkmin), ((PyObject *)__pyx_n_s__kkmax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_25)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_25); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_25)); + __pyx_k_codeobj_26 = (PyObject*)__Pyx_PyCode_New(3, 0, 33, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_27, 173, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_26)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + __pyx_int_15 = PyInt_FromLong(15); if (unlikely(!__pyx_int_15)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + return 0; + __pyx_L1_error:; + return -1; +} + +#if PY_MAJOR_VERSION < 3 +PyMODINIT_FUNC initboundary_fill(void); /*proto*/ +PyMODINIT_FUNC initboundary_fill(void) +#else +PyMODINIT_FUNC PyInit_boundary_fill(void); /*proto*/ +PyMODINIT_FUNC PyInit_boundary_fill(void) +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_REFNANNY + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); + if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); + } + #endif + __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit_boundary_fill(void)", 0); + if ( __Pyx_check_binary_version() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #ifdef __Pyx_CyFunction_USED + if (__Pyx_CyFunction_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4(__Pyx_NAMESTR("boundary_fill"), __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (!PyDict_GetItemString(modules, "astropy.convolution.boundary_fill")) { + if (unlikely(PyDict_SetItemString(modules, "astropy.convolution.boundary_fill", __pyx_m) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + } + #endif + __pyx_b = PyImport_AddModule(__Pyx_NAMESTR(__Pyx_BUILTIN_MODULE_NAME)); if (unlikely(!__pyx_b)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (__Pyx_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + /*--- Initialize various global constants etc. ---*/ + if (unlikely(__Pyx_InitGlobals() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (__pyx_module_is_main_astropy__convolution__boundary_fill) { + if (__Pyx_SetAttrString(__pyx_m, "__name__", __pyx_n_s____main__) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + } + /*--- Builtin init code ---*/ + if (unlikely(__Pyx_InitCachedBuiltins() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Constants init code ---*/ + if (unlikely(__Pyx_InitCachedConstants() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Global init code ---*/ + /*--- Variable export code ---*/ + /*--- Function export code ---*/ + /*--- Type init code ---*/ + /*--- Type import code ---*/ + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", + #if CYTHON_COMPILING_IN_PYPY + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 9; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_dtype = __Pyx_ImportType("numpy", "dtype", sizeof(PyArray_Descr), 0); if (unlikely(!__pyx_ptype_5numpy_dtype)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 155; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_flatiter = __Pyx_ImportType("numpy", "flatiter", sizeof(PyArrayIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_flatiter)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 165; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_broadcast = __Pyx_ImportType("numpy", "broadcast", sizeof(PyArrayMultiIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_broadcast)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 169; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_ndarray = __Pyx_ImportType("numpy", "ndarray", sizeof(PyArrayObject), 0); if (unlikely(!__pyx_ptype_5numpy_ndarray)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 178; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_ufunc = __Pyx_ImportType("numpy", "ufunc", sizeof(PyUFuncObject), 0); if (unlikely(!__pyx_ptype_5numpy_ufunc)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 861; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Variable import code ---*/ + /*--- Function import code ---*/ + /*--- Execution code ---*/ + + /* "astropy/convolution/boundary_fill.pyx":3 + * # Licensed under a 3-clause BSD style license - see LICENSE.rst + * from __future__ import division + * import numpy as np # <<<<<<<<<<<<<< + * cimport numpy as np + * + */ + __pyx_t_1 = __Pyx_Import(((PyObject *)__pyx_n_s__numpy), 0, -1); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s__np, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "astropy/convolution/boundary_fill.pyx":6 + * cimport numpy as np + * + * DTYPE = np.float # <<<<<<<<<<<<<< + * ctypedef np.float_t DTYPE_t + * + */ + __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__float); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (PyObject_SetAttr(__pyx_m, __pyx_n_s__DTYPE, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_fill.pyx":16 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_fill(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g, + * float fill_value): + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_13boundary_fill_1convolve1d_boundary_fill, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_19, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_fill.pyx":88 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_fill(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g, + * float fill_value): + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_13boundary_fill_3convolve2d_boundary_fill, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_24, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_fill.pyx":173 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_fill(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g, + * float fill_value): + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_13boundary_fill_5convolve3d_boundary_fill, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_27, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_fill.pyx":1 + * # Licensed under a 3-clause BSD style license - see LICENSE.rst # <<<<<<<<<<<<<< + * from __future__ import division + * import numpy as np + */ + __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_2)); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s____test__, ((PyObject *)__pyx_t_2)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + + /* "numpy.pxd":975 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + if (__pyx_m) { + __Pyx_AddTraceback("init astropy.convolution.boundary_fill", __pyx_clineno, __pyx_lineno, __pyx_filename); + Py_DECREF(__pyx_m); __pyx_m = 0; + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init astropy.convolution.boundary_fill"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if PY_MAJOR_VERSION < 3 + return; + #else + return __pyx_m; + #endif +} + +/* Runtime support code */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule((char *)modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif /* CYTHON_REFNANNY */ + +static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name) { + PyObject *result; + result = PyObject_GetAttr(dict, name); + if (!result) { + if (dict != __pyx_b) { + PyErr_Clear(); + result = PyObject_GetAttr(__pyx_b, name); + } + if (!result) { + PyErr_SetObject(PyExc_NameError, name); + } + } + return result; +} + +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%s() takes %s %" CYTHON_FORMAT_SSIZE_T "d positional argument%s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%s() got an unexpected keyword argument '%s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact) +{ + if (!type) { + PyErr_Format(PyExc_SystemError, "Missing type object"); + return 0; + } + if (none_allowed && obj == Py_None) return 1; + else if (exact) { + if (Py_TYPE(obj) == type) return 1; + } + else { + if (PyObject_TypeCheck(obj, type)) return 1; + } + PyErr_Format(PyExc_TypeError, + "Argument '%s' has incorrect type (expected %s, got %s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); + return 0; +} + +static CYTHON_INLINE int __Pyx_IsLittleEndian(void) { + unsigned int n = 1; + return *(unsigned char*)(&n) != 0; +} +static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx, + __Pyx_BufFmt_StackElem* stack, + __Pyx_TypeInfo* type) { + stack[0].field = &ctx->root; + stack[0].parent_offset = 0; + ctx->root.type = type; + ctx->root.name = "buffer dtype"; + ctx->root.offset = 0; + ctx->head = stack; + ctx->head->field = &ctx->root; + ctx->fmt_offset = 0; + ctx->head->parent_offset = 0; + ctx->new_packmode = '@'; + ctx->enc_packmode = '@'; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->is_complex = 0; + ctx->is_valid_array = 0; + ctx->struct_alignment = 0; + while (type->typegroup == 'S') { + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = 0; + type = type->fields->type; + } +} +static int __Pyx_BufFmt_ParseNumber(const char** ts) { + int count; + const char* t = *ts; + if (*t < '0' || *t > '9') { + return -1; + } else { + count = *t++ - '0'; + while (*t >= '0' && *t < '9') { + count *= 10; + count += *t++ - '0'; + } + } + *ts = t; + return count; +} +static int __Pyx_BufFmt_ExpectNumber(const char **ts) { + int number = __Pyx_BufFmt_ParseNumber(ts); + if (number == -1) /* First char was not a digit */ + PyErr_Format(PyExc_ValueError,\ + "Does not understand character buffer dtype format string ('%c')", **ts); + return number; +} +static void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) { + PyErr_Format(PyExc_ValueError, + "Unexpected format string character: '%c'", ch); +} +static const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) { + switch (ch) { + case 'c': return "'char'"; + case 'b': return "'signed char'"; + case 'B': return "'unsigned char'"; + case 'h': return "'short'"; + case 'H': return "'unsigned short'"; + case 'i': return "'int'"; + case 'I': return "'unsigned int'"; + case 'l': return "'long'"; + case 'L': return "'unsigned long'"; + case 'q': return "'long long'"; + case 'Q': return "'unsigned long long'"; + case 'f': return (is_complex ? "'complex float'" : "'float'"); + case 'd': return (is_complex ? "'complex double'" : "'double'"); + case 'g': return (is_complex ? "'complex long double'" : "'long double'"); + case 'T': return "a struct"; + case 'O': return "Python object"; + case 'P': return "a pointer"; + case 's': case 'p': return "a string"; + case 0: return "end"; + default: return "unparseable format string"; + } +} +static size_t __Pyx_BufFmt_TypeCharToStandardSize(char ch, int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return 2; + case 'i': case 'I': case 'l': case 'L': return 4; + case 'q': case 'Q': return 8; + case 'f': return (is_complex ? 8 : 4); + case 'd': return (is_complex ? 16 : 8); + case 'g': { + PyErr_SetString(PyExc_ValueError, "Python does not define a standard format string size for long double ('g').."); + return 0; + } + case 'O': case 'P': return sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +static size_t __Pyx_BufFmt_TypeCharToNativeSize(char ch, int is_complex) { + switch (ch) { + case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(short); + case 'i': case 'I': return sizeof(int); + case 'l': case 'L': return sizeof(long); + #ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(PY_LONG_LONG); + #endif + case 'f': return sizeof(float) * (is_complex ? 2 : 1); + case 'd': return sizeof(double) * (is_complex ? 2 : 1); + case 'g': return sizeof(long double) * (is_complex ? 2 : 1); + case 'O': case 'P': return sizeof(void*); + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} +typedef struct { char c; short x; } __Pyx_st_short; +typedef struct { char c; int x; } __Pyx_st_int; +typedef struct { char c; long x; } __Pyx_st_long; +typedef struct { char c; float x; } __Pyx_st_float; +typedef struct { char c; double x; } __Pyx_st_double; +typedef struct { char c; long double x; } __Pyx_st_longdouble; +typedef struct { char c; void *x; } __Pyx_st_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { char c; PY_LONG_LONG x; } __Pyx_st_longlong; +#endif +static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_st_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_st_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_st_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_st_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_st_float) - sizeof(float); + case 'd': return sizeof(__Pyx_st_double) - sizeof(double); + case 'g': return sizeof(__Pyx_st_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_st_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +/* These are for computing the padding at the end of the struct to align + on the first member of the struct. This will probably the same as above, + but we don't have any guarantees. + */ +typedef struct { short x; char c; } __Pyx_pad_short; +typedef struct { int x; char c; } __Pyx_pad_int; +typedef struct { long x; char c; } __Pyx_pad_long; +typedef struct { float x; char c; } __Pyx_pad_float; +typedef struct { double x; char c; } __Pyx_pad_double; +typedef struct { long double x; char c; } __Pyx_pad_longdouble; +typedef struct { void *x; char c; } __Pyx_pad_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { PY_LONG_LONG x; char c; } __Pyx_pad_longlong; +#endif +static size_t __Pyx_BufFmt_TypeCharToPadding(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_pad_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_pad_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_pad_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_pad_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_pad_float) - sizeof(float); + case 'd': return sizeof(__Pyx_pad_double) - sizeof(double); + case 'g': return sizeof(__Pyx_pad_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_pad_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +static char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) { + switch (ch) { + case 'c': + return 'H'; + case 'b': case 'h': case 'i': + case 'l': case 'q': case 's': case 'p': + return 'I'; + case 'B': case 'H': case 'I': case 'L': case 'Q': + return 'U'; + case 'f': case 'd': case 'g': + return (is_complex ? 'C' : 'R'); + case 'O': + return 'O'; + case 'P': + return 'P'; + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} +static void __Pyx_BufFmt_RaiseExpected(__Pyx_BufFmt_Context* ctx) { + if (ctx->head == NULL || ctx->head->field == &ctx->root) { + const char* expected; + const char* quote; + if (ctx->head == NULL) { + expected = "end"; + quote = ""; + } else { + expected = ctx->head->field->type->name; + quote = "'"; + } + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected %s%s%s but got %s", + quote, expected, quote, + __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex)); + } else { + __Pyx_StructField* field = ctx->head->field; + __Pyx_StructField* parent = (ctx->head - 1)->field; + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected '%s' but got %s in '%s.%s'", + field->type->name, __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex), + parent->type->name, field->name); + } +} +static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) { + char group; + size_t size, offset, arraysize = 1; + if (ctx->enc_type == 0) return 0; + if (ctx->head->field->type->arraysize[0]) { + int i, ndim = 0; + if (ctx->enc_type == 's' || ctx->enc_type == 'p') { + ctx->is_valid_array = ctx->head->field->type->ndim == 1; + ndim = 1; + if (ctx->enc_count != ctx->head->field->type->arraysize[0]) { + PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %zu", + ctx->head->field->type->arraysize[0], ctx->enc_count); + return -1; + } + } + if (!ctx->is_valid_array) { + PyErr_Format(PyExc_ValueError, "Expected %d dimensions, got %d", + ctx->head->field->type->ndim, ndim); + return -1; + } + for (i = 0; i < ctx->head->field->type->ndim; i++) { + arraysize *= ctx->head->field->type->arraysize[i]; + } + ctx->is_valid_array = 0; + ctx->enc_count = 1; + } + group = __Pyx_BufFmt_TypeCharToGroup(ctx->enc_type, ctx->is_complex); + do { + __Pyx_StructField* field = ctx->head->field; + __Pyx_TypeInfo* type = field->type; + if (ctx->enc_packmode == '@' || ctx->enc_packmode == '^') { + size = __Pyx_BufFmt_TypeCharToNativeSize(ctx->enc_type, ctx->is_complex); + } else { + size = __Pyx_BufFmt_TypeCharToStandardSize(ctx->enc_type, ctx->is_complex); + } + if (ctx->enc_packmode == '@') { + size_t align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex); + size_t align_mod_offset; + if (align_at == 0) return -1; + align_mod_offset = ctx->fmt_offset % align_at; + if (align_mod_offset > 0) ctx->fmt_offset += align_at - align_mod_offset; + if (ctx->struct_alignment == 0) + ctx->struct_alignment = __Pyx_BufFmt_TypeCharToPadding(ctx->enc_type, + ctx->is_complex); + } + if (type->size != size || type->typegroup != group) { + if (type->typegroup == 'C' && type->fields != NULL) { + size_t parent_offset = ctx->head->parent_offset + field->offset; + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = parent_offset; + continue; + } + if ((type->typegroup == 'H' || group == 'H') && type->size == size) { + } else { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + } + offset = ctx->head->parent_offset + field->offset; + if (ctx->fmt_offset != offset) { + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch; next field is at offset %" CYTHON_FORMAT_SSIZE_T "d but %" CYTHON_FORMAT_SSIZE_T "d expected", + (Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset); + return -1; + } + ctx->fmt_offset += size; + if (arraysize) + ctx->fmt_offset += (arraysize - 1) * size; + --ctx->enc_count; /* Consume from buffer string */ + while (1) { + if (field == &ctx->root) { + ctx->head = NULL; + if (ctx->enc_count != 0) { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + break; /* breaks both loops as ctx->enc_count == 0 */ + } + ctx->head->field = ++field; + if (field->type == NULL) { + --ctx->head; + field = ctx->head->field; + continue; + } else if (field->type->typegroup == 'S') { + size_t parent_offset = ctx->head->parent_offset + field->offset; + if (field->type->fields->type == NULL) continue; /* empty struct */ + field = field->type->fields; + ++ctx->head; + ctx->head->field = field; + ctx->head->parent_offset = parent_offset; + break; + } else { + break; + } + } + } while (ctx->enc_count); + ctx->enc_type = 0; + ctx->is_complex = 0; + return 0; +} +static CYTHON_INLINE PyObject * +__pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp) +{ + const char *ts = *tsp; + int i = 0, number; + int ndim = ctx->head->field->type->ndim; +; + ++ts; + if (ctx->new_count != 1) { + PyErr_SetString(PyExc_ValueError, + "Cannot handle repeated arrays in format string"); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + while (*ts && *ts != ')') { + if (isspace(*ts)) + continue; + number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + if (i < ndim && (size_t) number != ctx->head->field->type->arraysize[i]) + return PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %d", + ctx->head->field->type->arraysize[i], number); + if (*ts != ',' && *ts != ')') + return PyErr_Format(PyExc_ValueError, + "Expected a comma in format string, got '%c'", *ts); + if (*ts == ',') ts++; + i++; + } + if (i != ndim) + return PyErr_Format(PyExc_ValueError, "Expected %d dimension(s), got %d", + ctx->head->field->type->ndim, i); + if (!*ts) { + PyErr_SetString(PyExc_ValueError, + "Unexpected end of format string, expected ')'"); + return NULL; + } + ctx->is_valid_array = 1; + ctx->new_count = 1; + *tsp = ++ts; + return Py_None; +} +static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts) { + int got_Z = 0; + while (1) { + switch(*ts) { + case 0: + if (ctx->enc_type != 0 && ctx->head == NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + if (ctx->head != NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + return ts; + case ' ': + case 10: + case 13: + ++ts; + break; + case '<': + if (!__Pyx_IsLittleEndian()) { + PyErr_SetString(PyExc_ValueError, "Little-endian buffer not supported on big-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '>': + case '!': + if (__Pyx_IsLittleEndian()) { + PyErr_SetString(PyExc_ValueError, "Big-endian buffer not supported on little-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '=': + case '@': + case '^': + ctx->new_packmode = *ts++; + break; + case 'T': /* substruct */ + { + const char* ts_after_sub; + size_t i, struct_count = ctx->new_count; + size_t struct_alignment = ctx->struct_alignment; + ctx->new_count = 1; + ++ts; + if (*ts != '{') { + PyErr_SetString(PyExc_ValueError, "Buffer acquisition: Expected '{' after 'T'"); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; /* Erase processed last struct element */ + ctx->enc_count = 0; + ctx->struct_alignment = 0; + ++ts; + ts_after_sub = ts; + for (i = 0; i != struct_count; ++i) { + ts_after_sub = __Pyx_BufFmt_CheckString(ctx, ts); + if (!ts_after_sub) return NULL; + } + ts = ts_after_sub; + if (struct_alignment) ctx->struct_alignment = struct_alignment; + } + break; + case '}': /* end of substruct; either repeat or move on */ + { + size_t alignment = ctx->struct_alignment; + ++ts; + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; /* Erase processed last struct element */ + if (alignment && ctx->fmt_offset % alignment) { + ctx->fmt_offset += alignment - (ctx->fmt_offset % alignment); + } + } + return ts; + case 'x': + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->fmt_offset += ctx->new_count; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->enc_packmode = ctx->new_packmode; + ++ts; + break; + case 'Z': + got_Z = 1; + ++ts; + if (*ts != 'f' && *ts != 'd' && *ts != 'g') { + __Pyx_BufFmt_RaiseUnexpectedChar('Z'); + return NULL; + } /* fall through */ + case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I': + case 'l': case 'L': case 'q': case 'Q': + case 'f': case 'd': case 'g': + case 'O': case 's': case 'p': + if (ctx->enc_type == *ts && got_Z == ctx->is_complex && + ctx->enc_packmode == ctx->new_packmode) { + ctx->enc_count += ctx->new_count; + } else { + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_count = ctx->new_count; + ctx->enc_packmode = ctx->new_packmode; + ctx->enc_type = *ts; + ctx->is_complex = got_Z; + } + ++ts; + ctx->new_count = 1; + got_Z = 0; + break; + case ':': + ++ts; + while(*ts != ':') ++ts; + ++ts; + break; + case '(': + if (!__pyx_buffmt_parse_array(ctx, &ts)) return NULL; + break; + default: + { + int number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + ctx->new_count = (size_t)number; + } + } + } +} +static CYTHON_INLINE void __Pyx_ZeroBuffer(Py_buffer* buf) { + buf->buf = NULL; + buf->obj = NULL; + buf->strides = __Pyx_zeros; + buf->shape = __Pyx_zeros; + buf->suboffsets = __Pyx_minusones; +} +static CYTHON_INLINE int __Pyx_GetBufferAndValidate( + Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, + int nd, int cast, __Pyx_BufFmt_StackElem* stack) +{ + if (obj == Py_None || obj == NULL) { + __Pyx_ZeroBuffer(buf); + return 0; + } + buf->buf = NULL; + if (__Pyx_GetBuffer(obj, buf, flags) == -1) goto fail; + if (buf->ndim != nd) { + PyErr_Format(PyExc_ValueError, + "Buffer has wrong number of dimensions (expected %d, got %d)", + nd, buf->ndim); + goto fail; + } + if (!cast) { + __Pyx_BufFmt_Context ctx; + __Pyx_BufFmt_Init(&ctx, stack, dtype); + if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail; + } + if ((unsigned)buf->itemsize != dtype->size) { + PyErr_Format(PyExc_ValueError, + "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "d byte%s) does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "d byte%s)", + buf->itemsize, (buf->itemsize > 1) ? "s" : "", + dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : ""); + goto fail; + } + if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones; + return 0; +fail:; + __Pyx_ZeroBuffer(buf); + return -1; +} +static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) { + if (info->buf == NULL) return; + if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL; + __Pyx_ReleaseBuffer(info); +} + +static CYTHON_INLINE long __Pyx_mod_long(long a, long b) { + long r = a % b; + r += ((r != 0) & ((r ^ b) < 0)) * b; + return r; +} + +static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb) { +#if CYTHON_COMPILING_IN_CPYTHON + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyThreadState *tstate = PyThreadState_GET(); + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_Restore(type, value, tb); +#endif +} +static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb) { +#if CYTHON_COMPILING_IN_CPYTHON + PyThreadState *tstate = PyThreadState_GET(); + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(type, value, tb); +#endif +} + +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + #if PY_VERSION_HEX < 0x02050000 + if (PyClass_Check(type)) { + #else + if (PyType_Check(type)) { + #endif +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + #if PY_VERSION_HEX < 0x02050000 + if (PyInstance_Check(type)) { + type = (PyObject*) ((PyInstanceObject*)type)->in_class; + Py_INCREF(type); + } + else { + type = 0; + PyErr_SetString(PyExc_TypeError, + "raise: exception must be an old-style class or instance"); + goto raise_error; + } + #else + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + #endif + } + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else /* Python 3+ */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } + else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyEval_CallObject(type, args); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause && cause != Py_None) { + PyObject *fixed_cause; + if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } + else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } + else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { + PyThreadState *tstate = PyThreadState_GET(); + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +static CYTHON_INLINE long __Pyx_div_long(long a, long b) { + long q = a / b; + long r = a - q*b; + q -= ((r != 0) & ((r ^ b) < 0)); + return q; +} + +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_Format(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(PyObject_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%s to unpack", + index, (index == 1) ? "" : "s"); +} + +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_COMPILING_IN_CPYTHON + PyThreadState *tstate = PyThreadState_GET(); + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(exc_type == PyExc_StopIteration) || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +#if PY_MAJOR_VERSION < 3 +static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) { + CYTHON_UNUSED PyObject *getbuffer_cobj; + #if PY_VERSION_HEX >= 0x02060000 + if (PyObject_CheckBuffer(obj)) return PyObject_GetBuffer(obj, view, flags); + #endif + if (PyObject_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) return __pyx_pw_5numpy_7ndarray_1__getbuffer__(obj, view, flags); + #if PY_VERSION_HEX < 0x02060000 + if (obj->ob_type->tp_dict && + (getbuffer_cobj = PyMapping_GetItemString(obj->ob_type->tp_dict, + "__pyx_getbuffer"))) { + getbufferproc func; + #if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION == 0) + func = (getbufferproc) PyCapsule_GetPointer(getbuffer_cobj, "getbuffer(obj, view, flags)"); + #else + func = (getbufferproc) PyCObject_AsVoidPtr(getbuffer_cobj); + #endif + Py_DECREF(getbuffer_cobj); + if (!func) + goto fail; + return func(obj, view, flags); + } else { + PyErr_Clear(); + } + #endif + PyErr_Format(PyExc_TypeError, "'%100s' does not have the buffer interface", Py_TYPE(obj)->tp_name); +#if PY_VERSION_HEX < 0x02060000 +fail: +#endif + return -1; +} +static void __Pyx_ReleaseBuffer(Py_buffer *view) { + PyObject *obj = view->obj; + CYTHON_UNUSED PyObject *releasebuffer_cobj; + if (!obj) return; + #if PY_VERSION_HEX >= 0x02060000 + if (PyObject_CheckBuffer(obj)) { + PyBuffer_Release(view); + return; + } + #endif + if (PyObject_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) { __pyx_pw_5numpy_7ndarray_3__releasebuffer__(obj, view); return; } + #if PY_VERSION_HEX < 0x02060000 + if (obj->ob_type->tp_dict && + (releasebuffer_cobj = PyMapping_GetItemString(obj->ob_type->tp_dict, + "__pyx_releasebuffer"))) { + releasebufferproc func; + #if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION == 0) + func = (releasebufferproc) PyCapsule_GetPointer(releasebuffer_cobj, "releasebuffer(obj, view)"); + #else + func = (releasebufferproc) PyCObject_AsVoidPtr(releasebuffer_cobj); + #endif + Py_DECREF(releasebuffer_cobj); + if (!func) + goto fail; + func(obj, view); + return; + } else { + PyErr_Clear(); + } + #endif + goto nofail; +#if PY_VERSION_HEX < 0x02060000 +fail: +#endif + PyErr_WriteUnraisable(obj); +nofail: + Py_DECREF(obj); + view->obj = NULL; +} +#endif /* PY_MAJOR_VERSION < 3 */ + + + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_import = 0; + py_import = __Pyx_GetAttrString(__pyx_b, "__import__"); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + #if PY_VERSION_HEX >= 0x02050000 + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(1); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + #endif + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; /* try absolute import on failure */ + } + #endif + if (!module) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } + #else + if (level>0) { + PyErr_SetString(PyExc_RuntimeError, "Relative import is not supported for Python <=2.4."); + goto bad; + } + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, NULL); + #endif +bad: + #if PY_VERSION_HEX < 0x03030000 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return ::std::complex< float >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return x + y*(__pyx_t_float_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + __pyx_t_float_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +#if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eqf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sumf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_difff(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prodf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quotf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float denom = b.real * b.real + b.imag * b.imag; + z.real = (a.real * b.real + a.imag * b.imag) / denom; + z.imag = (a.imag * b.real - a.real * b.imag) / denom; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_negf(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zerof(__pyx_t_float_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conjf(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE float __Pyx_c_absf(__pyx_t_float_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrtf(z.real*z.real + z.imag*z.imag); + #else + return hypotf(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_powf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + float denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(a, a); + case 3: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(z, a); + case 4: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } + r = a.real; + theta = 0; + } else { + r = __Pyx_c_absf(a); + theta = atan2f(a.imag, a.real); + } + lnr = logf(r); + z_r = expf(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cosf(z_theta); + z.imag = z_r * sinf(z_theta); + return z; + } + #endif +#endif + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return ::std::complex< double >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return x + y*(__pyx_t_double_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + __pyx_t_double_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +#if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq(__pyx_t_double_complex a, __pyx_t_double_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double denom = b.real * b.real + b.imag * b.imag; + z.real = (a.real * b.real + a.imag * b.imag) / denom; + z.imag = (a.imag * b.real - a.real * b.imag) / denom; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero(__pyx_t_double_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE double __Pyx_c_abs(__pyx_t_double_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrt(z.real*z.real + z.imag*z.imag); + #else + return hypot(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + double denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(a, a); + case 3: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(z, a); + case 4: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } + r = a.real; + theta = 0; + } else { + r = __Pyx_c_abs(a); + theta = atan2(a.imag, a.real); + } + lnr = log(r); + z_r = exp(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cos(z_theta); + z.imag = z_r * sin(z_theta); + return z; + } + #endif +#endif + +static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject* x) { + const unsigned char neg_one = (unsigned char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned char" : + "value too large to convert to unsigned char"); + } + return (unsigned char)-1; + } + return (unsigned char)val; + } + return (unsigned char)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject* x) { + const unsigned short neg_one = (unsigned short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned short" : + "value too large to convert to unsigned short"); + } + return (unsigned short)-1; + } + return (unsigned short)val; + } + return (unsigned short)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject* x) { + const unsigned int neg_one = (unsigned int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned int" : + "value too large to convert to unsigned int"); + } + return (unsigned int)-1; + } + return (unsigned int)val; + } + return (unsigned int)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject* x) { + const char neg_one = (char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to char" : + "value too large to convert to char"); + } + return (char)-1; + } + return (char)val; + } + return (char)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject* x) { + const short neg_one = (short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to short" : + "value too large to convert to short"); + } + return (short)-1; + } + return (short)val; + } + return (short)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject* x) { + const int neg_one = (int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to int" : + "value too large to convert to int"); + } + return (int)-1; + } + return (int)val; + } + return (int)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject* x) { + const signed char neg_one = (signed char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed char" : + "value too large to convert to signed char"); + } + return (signed char)-1; + } + return (signed char)val; + } + return (signed char)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject* x) { + const signed short neg_one = (signed short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed short" : + "value too large to convert to signed short"); + } + return (signed short)-1; + } + return (signed short)val; + } + return (signed short)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject* x) { + const signed int neg_one = (signed int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed int" : + "value too large to convert to signed int"); + } + return (signed int)-1; + } + return (signed int)val; + } + return (signed int)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject* x) { + const int neg_one = (int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to int" : + "value too large to convert to int"); + } + return (int)-1; + } + return (int)val; + } + return (int)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject* x) { + const unsigned long neg_one = (unsigned long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned long"); + return (unsigned long)-1; + } + return (unsigned long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned long"); + return (unsigned long)-1; + } + return (unsigned long)PyLong_AsUnsignedLong(x); + } else { + return (unsigned long)PyLong_AsLong(x); + } + } else { + unsigned long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (unsigned long)-1; + val = __Pyx_PyInt_AsUnsignedLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject* x) { + const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned PY_LONG_LONG"); + return (unsigned PY_LONG_LONG)-1; + } + return (unsigned PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned PY_LONG_LONG"); + return (unsigned PY_LONG_LONG)-1; + } + return (unsigned PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (unsigned PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + unsigned PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (unsigned PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsUnsignedLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject* x) { + const long neg_one = (long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long)-1; + } + return (long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long)-1; + } + return (long)PyLong_AsUnsignedLong(x); + } else { + return (long)PyLong_AsLong(x); + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (long)-1; + val = __Pyx_PyInt_AsLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject* x) { + const PY_LONG_LONG neg_one = (PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to PY_LONG_LONG"); + return (PY_LONG_LONG)-1; + } + return (PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to PY_LONG_LONG"); + return (PY_LONG_LONG)-1; + } + return (PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject* x) { + const signed long neg_one = (signed long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed long"); + return (signed long)-1; + } + return (signed long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed long"); + return (signed long)-1; + } + return (signed long)PyLong_AsUnsignedLong(x); + } else { + return (signed long)PyLong_AsLong(x); + } + } else { + signed long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (signed long)-1; + val = __Pyx_PyInt_AsSignedLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject* x) { + const signed PY_LONG_LONG neg_one = (signed PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed PY_LONG_LONG"); + return (signed PY_LONG_LONG)-1; + } + return (signed PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed PY_LONG_LONG"); + return (signed PY_LONG_LONG)-1; + } + return (signed PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (signed PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + signed PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (signed PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsSignedLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + #if PY_VERSION_HEX < 0x02050000 + return PyErr_Warn(NULL, message); + #else + return PyErr_WarnEx(NULL, message, 1); + #endif + } + return 0; +} + +#ifndef __PYX_HAVE_RT_ImportModule +#define __PYX_HAVE_RT_ImportModule +static PyObject *__Pyx_ImportModule(const char *name) { + PyObject *py_name = 0; + PyObject *py_module = 0; + py_name = __Pyx_PyIdentifier_FromString(name); + if (!py_name) + goto bad; + py_module = PyImport_Import(py_name); + Py_DECREF(py_name); + return py_module; +bad: + Py_XDECREF(py_name); + return 0; +} +#endif + +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, + size_t size, int strict) +{ + PyObject *py_module = 0; + PyObject *result = 0; + PyObject *py_name = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + py_module = __Pyx_ImportModule(module_name); + if (!py_module) + goto bad; + py_name = __Pyx_PyIdentifier_FromString(class_name); + if (!py_name) + goto bad; + result = PyObject_GetAttr(py_module, py_name); + Py_DECREF(py_name); + py_name = 0; + Py_DECREF(py_module); + py_module = 0; + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%s.%s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if (!strict && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility", + module_name, class_name); + #if PY_VERSION_HEX < 0x02050000 + if (PyErr_Warn(NULL, warning) < 0) goto bad; + #else + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + #endif + } + else if ((size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%s.%s has the wrong size, try recompiling", + module_name, class_name); + goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(py_module); + Py_XDECREF(result); + return NULL; +} +#endif + +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = (start + end) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, /*int argcount,*/ + 0, /*int kwonlyargcount,*/ + 0, /*int nlocals,*/ + 0, /*int stacksize,*/ + 0, /*int flags,*/ + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, /*int firstlineno,*/ + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_globals = 0; + PyFrameObject *py_frame = 0; + py_code = __pyx_find_code_object(c_line ? c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? c_line : py_line, py_code); + } + py_globals = PyModule_GetDict(__pyx_m); + if (!py_globals) goto bad; + py_frame = PyFrame_New( + PyThreadState_GET(), /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + py_globals, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + py_frame->f_lineno = py_line; + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else /* Python 3+ has unicode identifiers */ + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) { + PyNumberMethods *m; + const char *name = NULL; + PyObject *res = NULL; +#if PY_VERSION_HEX < 0x03000000 + if (PyInt_Check(x) || PyLong_Check(x)) +#else + if (PyLong_Check(x)) +#endif + return Py_INCREF(x), x; + m = Py_TYPE(x)->tp_as_number; +#if PY_VERSION_HEX < 0x03000000 + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = PyNumber_Long(x); + } +#else + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Long(x); + } +#endif + if (res) { +#if PY_VERSION_HEX < 0x03000000 + if (!PyInt_Check(res) && !PyLong_Check(res)) { +#else + if (!PyLong_Check(res)) { +#endif + PyErr_Format(PyExc_TypeError, + "__%s__ returned non-%s (type %.200s)", + name, name, Py_TYPE(res)->tp_name); + Py_DECREF(res); + return NULL; + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject* x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { +#if PY_VERSION_HEX < 0x02050000 + if (ival <= LONG_MAX) + return PyInt_FromLong((long)ival); + else { + unsigned char *bytes = (unsigned char *) &ival; + int one = 1; int little = (int)*(unsigned char*)&one; + return _PyLong_FromByteArray(bytes, sizeof(size_t), little, 0); + } +#else + return PyInt_FromSize_t(ival); +#endif +} +static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject* x) { + unsigned PY_LONG_LONG val = __Pyx_PyInt_AsUnsignedLongLong(x); + if (unlikely(val == (unsigned PY_LONG_LONG)-1 && PyErr_Occurred())) { + return (size_t)-1; + } else if (unlikely(val != (unsigned PY_LONG_LONG)(size_t)val)) { + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to size_t"); + return (size_t)-1; + } + return (size_t)val; +} + + +#endif /* Py_PYTHON_H */ diff --git a/astropy/convolution/boundary_fill.pyx b/astropy/convolution/boundary_fill.pyx new file mode 100644 index 0000000..a27d8ae --- /dev/null +++ b/astropy/convolution/boundary_fill.pyx @@ -0,0 +1,267 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import division +import numpy as np +cimport numpy as np + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + +cdef extern from "numpy/npy_math.h": + bint npy_isnan(double x) + +cimport cython + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve1d_boundary_fill(np.ndarray[DTYPE_t, ndim=1] f, + np.ndarray[DTYPE_t, ndim=1] g, + float fill_value): + + if g.shape[0] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int nkx = g.shape[0] + cdef int wkx = nkx // 2 + cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) + cdef unsigned int i, iii + cdef int ii + + cdef int iimin, iimax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + if npy_isnan(f[i]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + for ii in range(iimin, iimax): + if ii < 0 or ii > nx - 1: + val = fill_value + else: + val = f[ii] + if not npy_isnan(val): + ker = g[(wkx + ii - i)] + top += val * ker + bot += ker + if bot != 0.: + fixed[i] = top / bot + else: + fixed[i] = f[i] + else: + fixed[i] = f[i] + + # Now run the proper convolution + for i in range(nx): + if not npy_isnan(fixed[i]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + for ii in range(iimin, iimax): + if ii < 0 or ii > nx - 1: + val = fill_value + else: + val = fixed[ii] + ker = g[(wkx + ii - i)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i] = top / bot + else: + conv[i] = fixed[i] + else: + conv[i] = fixed[i] + + return conv + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve2d_boundary_fill(np.ndarray[DTYPE_t, ndim=2] f, + np.ndarray[DTYPE_t, ndim=2] g, + float fill_value): + + if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int ny = f.shape[1] + cdef int nkx = g.shape[0] + cdef int nky = g.shape[1] + cdef int wkx = nkx // 2 + cdef int wky = nky // 2 + cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) + cdef unsigned int i, j, iii, jjj + cdef int ii, jj + + cdef int iimin, iimax, jjmin, jjmax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + for j in range(ny): + if npy_isnan(f[i, j]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1: + val = fill_value + else: + val = f[ii, jj] + if not npy_isnan(val): + ker = g[(wkx + ii - i), + (wky + jj - j)] + top += val * ker + bot += ker + if bot != 0.: + fixed[i, j] = top / bot + else: + fixed[i, j] = f[i, j] + else: + fixed[i, j] = f[i, j] + + # Now run the proper convolution + for i in range(nx): + for j in range(ny): + if not npy_isnan(fixed[i, j]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1: + val = fill_value + else: + val = fixed[ii, jj] + ker = g[(wkx + ii - i), + (wky + jj - j)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i, j] = top / bot + else: + conv[i, j] = fixed[i, j] + else: + conv[i, j] = fixed[i, j] + + return conv + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve3d_boundary_fill(np.ndarray[DTYPE_t, ndim=3] f, + np.ndarray[DTYPE_t, ndim=3] g, + float fill_value): + + if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int ny = f.shape[1] + cdef int nz = f.shape[2] + cdef int nkx = g.shape[0] + cdef int nky = g.shape[1] + cdef int nkz = g.shape[2] + cdef int wkx = nkx // 2 + cdef int wky = nky // 2 + cdef int wkz = nkz // 2 + cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) + cdef unsigned int i, j, k, iii, jjj, kkk + cdef int ii, jj, kk + + cdef int iimin, iimax, jjmin, jjmax, kkmin, kkmax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + for j in range(ny): + for k in range(nz): + if npy_isnan(f[i, j, k]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + kkmin = k - wkz + kkmax = k + wkz + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + for kk in range(kkmin, kkmax): + if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1 or kk < 0 or kk > nz - 1: + val = fill_value + else: + val = f[ii, jj, kk] + if not npy_isnan(val): + ker = g[(wkx + ii - i), + (wky + jj - j), + (wkz + kk - k)] + top += val * ker + bot += ker + if bot != 0.: + fixed[i, j, k] = top / bot + else: + fixed[i, j, k] = f[i, j, k] + else: + fixed[i, j, k] = f[i, j, k] + + # Now run the proper convolution + for i in range(nx): + for j in range(ny): + for k in range(nz): + if not npy_isnan(fixed[i, j, k]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + kkmin = k - wkz + kkmax = k + wkz + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + for kk in range(kkmin, kkmax): + if ii < 0 or ii > nx - 1 or jj < 0 or jj > ny - 1 or kk < 0 or kk > nz - 1: + val = fill_value + else: + val = fixed[ii, jj, kk] + ker = g[(wkx + ii - i), + (wky + jj - j), + (wkz + kk - k)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i, j, k] = top / bot + else: + conv[i, j, k] = fixed[i, j, k] + else: + conv[i, j, k] = fixed[i, j, k] + + return conv diff --git a/astropy/convolution/boundary_none.c b/astropy/convolution/boundary_none.c new file mode 100644 index 0000000..c0a01b5 --- /dev/null +++ b/astropy/convolution/boundary_none.c @@ -0,0 +1,8291 @@ +/* Generated by Cython 0.18 on Tue Sep 23 16:50:23 2014 */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02040000 + #error Cython requires Python 2.4+. +#else +#include /* For offsetof */ +#ifndef offsetof +#define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION +#define CYTHON_COMPILING_IN_PYPY 1 +#define CYTHON_COMPILING_IN_CPYTHON 0 +#else +#define CYTHON_COMPILING_IN_PYPY 0 +#define CYTHON_COMPILING_IN_CPYTHON 1 +#endif +#if PY_VERSION_HEX < 0x02050000 + typedef int Py_ssize_t; + #define PY_SSIZE_T_MAX INT_MAX + #define PY_SSIZE_T_MIN INT_MIN + #define PY_FORMAT_SIZE_T "" + #define CYTHON_FORMAT_SSIZE_T "" + #define PyInt_FromSsize_t(z) PyInt_FromLong(z) + #define PyInt_AsSsize_t(o) __Pyx_PyInt_AsInt(o) + #define PyNumber_Index(o) ((PyNumber_Check(o) && !PyFloat_Check(o)) ? PyNumber_Int(o) : \ + (PyErr_Format(PyExc_TypeError, \ + "expected index value, got %.200s", Py_TYPE(o)->tp_name), \ + (PyObject*)0)) + #define __Pyx_PyIndex_Check(o) (PyNumber_Check(o) && !PyFloat_Check(o) && \ + !PyComplex_Check(o)) + #define PyIndex_Check __Pyx_PyIndex_Check + #define PyErr_WarnEx(category, message, stacklevel) PyErr_Warn(category, message) + #define __PYX_BUILD_PY_SSIZE_T "i" +#else + #define __PYX_BUILD_PY_SSIZE_T "n" + #define CYTHON_FORMAT_SSIZE_T "z" + #define __Pyx_PyIndex_Check PyIndex_Check +#endif +#if PY_VERSION_HEX < 0x02060000 + #define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt) + #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) + #define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size) + #define PyVarObject_HEAD_INIT(type, size) \ + PyObject_HEAD_INIT(type) size, + #define PyType_Modified(t) + typedef struct { + void *buf; + PyObject *obj; + Py_ssize_t len; + Py_ssize_t itemsize; + int readonly; + int ndim; + char *format; + Py_ssize_t *shape; + Py_ssize_t *strides; + Py_ssize_t *suboffsets; + void *internal; + } Py_buffer; + #define PyBUF_SIMPLE 0 + #define PyBUF_WRITABLE 0x0001 + #define PyBUF_FORMAT 0x0004 + #define PyBUF_ND 0x0008 + #define PyBUF_STRIDES (0x0010 | PyBUF_ND) + #define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES) + #define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES) + #define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES) + #define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES) + #define PyBUF_RECORDS (PyBUF_STRIDES | PyBUF_FORMAT | PyBUF_WRITABLE) + #define PyBUF_FULL (PyBUF_INDIRECT | PyBUF_FORMAT | PyBUF_WRITABLE) + typedef int (*getbufferproc)(PyObject *, Py_buffer *, int); + typedef void (*releasebufferproc)(PyObject *, Py_buffer *); +#endif +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ + PyCode_New(a, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif +#if PY_MAJOR_VERSION < 3 && PY_MINOR_VERSION < 6 + #define PyUnicode_FromString(s) PyUnicode_Decode(s, strlen(s), "UTF-8", "strict") +#endif +#if PY_MAJOR_VERSION >= 3 + #define Py_TPFLAGS_CHECKTYPES 0 + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#if (PY_VERSION_HEX < 0x02060000) || (PY_MAJOR_VERSION >= 3) + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ? \ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) +#else + #define CYTHON_PEP393_ENABLED 0 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_READ(k, d, i) ((k=k), (Py_UCS4)(((Py_UNICODE*)d)[i])) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#endif +#if PY_VERSION_HEX < 0x02060000 + #define PyBytesObject PyStringObject + #define PyBytes_Type PyString_Type + #define PyBytes_Check PyString_Check + #define PyBytes_CheckExact PyString_CheckExact + #define PyBytes_FromString PyString_FromString + #define PyBytes_FromStringAndSize PyString_FromStringAndSize + #define PyBytes_FromFormat PyString_FromFormat + #define PyBytes_DecodeEscape PyString_DecodeEscape + #define PyBytes_AsString PyString_AsString + #define PyBytes_AsStringAndSize PyString_AsStringAndSize + #define PyBytes_Size PyString_Size + #define PyBytes_AS_STRING PyString_AS_STRING + #define PyBytes_GET_SIZE PyString_GET_SIZE + #define PyBytes_Repr PyString_Repr + #define PyBytes_Concat PyString_Concat + #define PyBytes_ConcatAndDel PyString_ConcatAndDel +#endif +#if PY_VERSION_HEX < 0x02060000 + #define PySet_Check(obj) PyObject_TypeCheck(obj, &PySet_Type) + #define PyFrozenSet_Check(obj) PyObject_TypeCheck(obj, &PyFrozenSet_Type) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_VERSION_HEX < 0x03020000 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if (PY_MAJOR_VERSION < 3) || (PY_VERSION_HEX >= 0x03010300) + #define __Pyx_PySequence_GetSlice(obj, a, b) PySequence_GetSlice(obj, a, b) + #define __Pyx_PySequence_SetSlice(obj, a, b, value) PySequence_SetSlice(obj, a, b, value) + #define __Pyx_PySequence_DelSlice(obj, a, b) PySequence_DelSlice(obj, a, b) +#else + #define __Pyx_PySequence_GetSlice(obj, a, b) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), (PyObject*)0) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_GetSlice(obj, a, b)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", (obj)->ob_type->tp_name), (PyObject*)0))) + #define __Pyx_PySequence_SetSlice(obj, a, b, value) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_SetSlice(obj, a, b, value)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice assignment", (obj)->ob_type->tp_name), -1))) + #define __Pyx_PySequence_DelSlice(obj, a, b) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_DelSlice(obj, a, b)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice deletion", (obj)->ob_type->tp_name), -1))) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) +#endif +#if PY_VERSION_HEX < 0x02050000 + #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),((char *)(n))) + #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),((char *)(n)),(a)) + #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),((char *)(n))) +#else + #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),(n)) + #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),(n),(a)) + #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),(n)) +#endif +#if PY_VERSION_HEX < 0x02050000 + #define __Pyx_NAMESTR(n) ((char *)(n)) + #define __Pyx_DOCSTR(n) ((char *)(n)) +#else + #define __Pyx_NAMESTR(n) (n) + #define __Pyx_DOCSTR(n) (n) +#endif + + +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#endif + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) +#define _USE_MATH_DEFINES +#endif +#include +#define __PYX_HAVE__astropy__convolution__boundary_none +#define __PYX_HAVE_API__astropy__convolution__boundary_none +#include "string.h" +#include "stdio.h" +#include "stdlib.h" +#include "numpy/arrayobject.h" +#include "numpy/ufuncobject.h" +#include "numpy/npy_math.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#ifdef PYREX_WITHOUT_ASSERTIONS +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +#ifndef CYTHON_INLINE + #if defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +typedef struct {PyObject **p; char *s; const long n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/ + +#define __Pyx_PyBytes_FromUString(s) PyBytes_FromString((char*)s) +#define __Pyx_PyBytes_AsUString(s) ((unsigned char*) PyBytes_AsString(s)) +#define __Pyx_Owned_Py_None(b) (Py_INCREF(Py_None), Py_None) +#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False)) +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x); +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject*); +#if CYTHON_COMPILING_IN_CPYTHON +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) + + +#ifdef __GNUC__ + /* Test for GCC > 2.95 */ + #if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) + #else /* __GNUC__ > 2 ... */ + #define likely(x) (x) + #define unlikely(x) (x) + #endif /* __GNUC__ > 2 ... */ +#else /* __GNUC__ */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ + +static PyObject *__pyx_m; +static PyObject *__pyx_b; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + +#if !defined(CYTHON_CCOMPLEX) + #if defined(__cplusplus) + #define CYTHON_CCOMPLEX 1 + #elif defined(_Complex_I) + #define CYTHON_CCOMPLEX 1 + #else + #define CYTHON_CCOMPLEX 0 + #endif +#endif +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #include + #else + #include + #endif +#endif +#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__) + #undef _Complex_I + #define _Complex_I 1.0fj +#endif + + +static const char *__pyx_f[] = { + "boundary_none.pyx", + "numpy.pxd", + "type.pxd", +}; +#define IS_UNSIGNED(type) (((type) -1) > 0) +struct __Pyx_StructField_; +#define __PYX_BUF_FLAGS_PACKED_STRUCT (1 << 0) +typedef struct { + const char* name; /* for error messages only */ + struct __Pyx_StructField_* fields; + size_t size; /* sizeof(type) */ + size_t arraysize[8]; /* length of array in each dimension */ + int ndim; + char typegroup; /* _R_eal, _C_omplex, Signed _I_nt, _U_nsigned int, _S_truct, _P_ointer, _O_bject, c_H_ar */ + char is_unsigned; + int flags; +} __Pyx_TypeInfo; +typedef struct __Pyx_StructField_ { + __Pyx_TypeInfo* type; + const char* name; + size_t offset; +} __Pyx_StructField; +typedef struct { + __Pyx_StructField* field; + size_t parent_offset; +} __Pyx_BufFmt_StackElem; +typedef struct { + __Pyx_StructField root; + __Pyx_BufFmt_StackElem* head; + size_t fmt_offset; + size_t new_count, enc_count; + size_t struct_alignment; + int is_complex; + char enc_type; + char new_packmode; + char enc_packmode; + char is_valid_array; +} __Pyx_BufFmt_Context; + + +/* "numpy.pxd":723 + * # in Cython to enable them only on the right systems. + * + * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + */ +typedef npy_int8 __pyx_t_5numpy_int8_t; + +/* "numpy.pxd":724 + * + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t + */ +typedef npy_int16 __pyx_t_5numpy_int16_t; + +/* "numpy.pxd":725 + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< + * ctypedef npy_int64 int64_t + * #ctypedef npy_int96 int96_t + */ +typedef npy_int32 __pyx_t_5numpy_int32_t; + +/* "numpy.pxd":726 + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< + * #ctypedef npy_int96 int96_t + * #ctypedef npy_int128 int128_t + */ +typedef npy_int64 __pyx_t_5numpy_int64_t; + +/* "numpy.pxd":730 + * #ctypedef npy_int128 int128_t + * + * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + */ +typedef npy_uint8 __pyx_t_5numpy_uint8_t; + +/* "numpy.pxd":731 + * + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t + */ +typedef npy_uint16 __pyx_t_5numpy_uint16_t; + +/* "numpy.pxd":732 + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< + * ctypedef npy_uint64 uint64_t + * #ctypedef npy_uint96 uint96_t + */ +typedef npy_uint32 __pyx_t_5numpy_uint32_t; + +/* "numpy.pxd":733 + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< + * #ctypedef npy_uint96 uint96_t + * #ctypedef npy_uint128 uint128_t + */ +typedef npy_uint64 __pyx_t_5numpy_uint64_t; + +/* "numpy.pxd":737 + * #ctypedef npy_uint128 uint128_t + * + * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< + * ctypedef npy_float64 float64_t + * #ctypedef npy_float80 float80_t + */ +typedef npy_float32 __pyx_t_5numpy_float32_t; + +/* "numpy.pxd":738 + * + * ctypedef npy_float32 float32_t + * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< + * #ctypedef npy_float80 float80_t + * #ctypedef npy_float128 float128_t + */ +typedef npy_float64 __pyx_t_5numpy_float64_t; + +/* "numpy.pxd":747 + * # The int types are mapped a bit surprising -- + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t + */ +typedef npy_long __pyx_t_5numpy_int_t; + +/* "numpy.pxd":748 + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong longlong_t + * + */ +typedef npy_longlong __pyx_t_5numpy_long_t; + +/* "numpy.pxd":749 + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_ulong uint_t + */ +typedef npy_longlong __pyx_t_5numpy_longlong_t; + +/* "numpy.pxd":751 + * ctypedef npy_longlong longlong_t + * + * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t + */ +typedef npy_ulong __pyx_t_5numpy_uint_t; + +/* "numpy.pxd":752 + * + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulonglong_t + * + */ +typedef npy_ulonglong __pyx_t_5numpy_ulong_t; + +/* "numpy.pxd":753 + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_intp intp_t + */ +typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; + +/* "numpy.pxd":755 + * ctypedef npy_ulonglong ulonglong_t + * + * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< + * ctypedef npy_uintp uintp_t + * + */ +typedef npy_intp __pyx_t_5numpy_intp_t; + +/* "numpy.pxd":756 + * + * ctypedef npy_intp intp_t + * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< + * + * ctypedef npy_double float_t + */ +typedef npy_uintp __pyx_t_5numpy_uintp_t; + +/* "numpy.pxd":758 + * ctypedef npy_uintp uintp_t + * + * ctypedef npy_double float_t # <<<<<<<<<<<<<< + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t + */ +typedef npy_double __pyx_t_5numpy_float_t; + +/* "numpy.pxd":759 + * + * ctypedef npy_double float_t + * ctypedef npy_double double_t # <<<<<<<<<<<<<< + * ctypedef npy_longdouble longdouble_t + * + */ +typedef npy_double __pyx_t_5numpy_double_t; + +/* "numpy.pxd":760 + * ctypedef npy_double float_t + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cfloat cfloat_t + */ +typedef npy_longdouble __pyx_t_5numpy_longdouble_t; + +/* "astropy/convolution/boundary_none.pyx":7 + * + * DTYPE = np.float + * ctypedef np.float_t DTYPE_t # <<<<<<<<<<<<<< + * + * cdef extern from "numpy/npy_math.h": + */ +typedef __pyx_t_5numpy_float_t __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t; +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< float > __pyx_t_float_complex; + #else + typedef float _Complex __pyx_t_float_complex; + #endif +#else + typedef struct { float real, imag; } __pyx_t_float_complex; +#endif + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< double > __pyx_t_double_complex; + #else + typedef double _Complex __pyx_t_double_complex; + #endif +#else + typedef struct { double real, imag; } __pyx_t_double_complex; +#endif + + +/*--- Type declarations ---*/ + +/* "numpy.pxd":762 + * ctypedef npy_longdouble longdouble_t + * + * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t + */ +typedef npy_cfloat __pyx_t_5numpy_cfloat_t; + +/* "numpy.pxd":763 + * + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< + * ctypedef npy_clongdouble clongdouble_t + * + */ +typedef npy_cdouble __pyx_t_5numpy_cdouble_t; + +/* "numpy.pxd":764 + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cdouble complex_t + */ +typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; + +/* "numpy.pxd":766 + * ctypedef npy_clongdouble clongdouble_t + * + * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew1(a): + */ +typedef npy_cdouble __pyx_t_5numpy_complex_t; +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); /*proto*/ + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil) \ + if (acquire_gil) { \ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure(); \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \ + PyGILState_Release(__pyx_gilstate_save); \ + } else { \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil) \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext() \ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif /* CYTHON_REFNANNY */ +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/ + +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/ + +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /*proto*/ + +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[], \ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, \ + const char* function_name); /*proto*/ + +static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact); /*proto*/ + +static CYTHON_INLINE int __Pyx_GetBufferAndValidate(Py_buffer* buf, PyObject* obj, + __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack); +static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info); + +static CYTHON_INLINE long __Pyx_mod_long(long, long); /* proto */ + +static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb); /*proto*/ +static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ + +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /*proto*/ + +static CYTHON_INLINE long __Pyx_div_long(long, long); /* proto */ + +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/ + +#define __Pyx_BufPtrStrided1d(type, buf, i0, s0) (type)((char*)buf + i0 * s0) +#define __Pyx_BufPtrStrided2d(type, buf, i0, s0, i1, s1) (type)((char*)buf + i0 * s0 + i1 * s1) +#define __Pyx_BufPtrStrided3d(type, buf, i0, s0, i1, s1, i2, s2) (type)((char*)buf + i0 * s0 + i1 * s1 + i2 * s2) +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +static CYTHON_INLINE int __Pyx_IterFinish(void); /*proto*/ + +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); /*proto*/ + +typedef struct { + Py_ssize_t shape, strides, suboffsets; +} __Pyx_Buf_DimInfo; +typedef struct { + size_t refcount; + Py_buffer pybuffer; +} __Pyx_Buffer; +typedef struct { + __Pyx_Buffer *rcbuffer; + char *data; + __Pyx_Buf_DimInfo diminfo[8]; +} __Pyx_LocalBuf_ND; + +#if PY_MAJOR_VERSION < 3 + static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags); + static void __Pyx_ReleaseBuffer(Py_buffer *view); +#else + #define __Pyx_GetBuffer PyObject_GetBuffer + #define __Pyx_ReleaseBuffer PyBuffer_Release +#endif + + +static Py_ssize_t __Pyx_zeros[] = {0, 0, 0, 0, 0, 0, 0, 0}; +static Py_ssize_t __Pyx_minusones[] = {-1, -1, -1, -1, -1, -1, -1, -1}; + +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); /*proto*/ + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #define __Pyx_CREAL(z) ((z).real()) + #define __Pyx_CIMAG(z) ((z).imag()) + #else + #define __Pyx_CREAL(z) (__real__(z)) + #define __Pyx_CIMAG(z) (__imag__(z)) + #endif +#else + #define __Pyx_CREAL(z) ((z).real) + #define __Pyx_CIMAG(z) ((z).imag) +#endif +#if defined(_WIN32) && defined(__cplusplus) && CYTHON_CCOMPLEX + #define __Pyx_SET_CREAL(z,x) ((z).real(x)) + #define __Pyx_SET_CIMAG(z,y) ((z).imag(y)) +#else + #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x) + #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y) +#endif + +static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float, float); + +#if CYTHON_CCOMPLEX + #define __Pyx_c_eqf(a, b) ((a)==(b)) + #define __Pyx_c_sumf(a, b) ((a)+(b)) + #define __Pyx_c_difff(a, b) ((a)-(b)) + #define __Pyx_c_prodf(a, b) ((a)*(b)) + #define __Pyx_c_quotf(a, b) ((a)/(b)) + #define __Pyx_c_negf(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zerof(z) ((z)==(float)0) + #define __Pyx_c_conjf(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_absf(z) (::std::abs(z)) + #define __Pyx_c_powf(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zerof(z) ((z)==0) + #define __Pyx_c_conjf(z) (conjf(z)) + #if 1 + #define __Pyx_c_absf(z) (cabsf(z)) + #define __Pyx_c_powf(a, b) (cpowf(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eqf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sumf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_difff(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prodf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quotf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_negf(__pyx_t_float_complex); + static CYTHON_INLINE int __Pyx_c_is_zerof(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conjf(__pyx_t_float_complex); + #if 1 + static CYTHON_INLINE float __Pyx_c_absf(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_powf(__pyx_t_float_complex, __pyx_t_float_complex); + #endif +#endif + +static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double, double); + +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq(a, b) ((a)==(b)) + #define __Pyx_c_sum(a, b) ((a)+(b)) + #define __Pyx_c_diff(a, b) ((a)-(b)) + #define __Pyx_c_prod(a, b) ((a)*(b)) + #define __Pyx_c_quot(a, b) ((a)/(b)) + #define __Pyx_c_neg(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero(z) ((z)==(double)0) + #define __Pyx_c_conj(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs(z) (::std::abs(z)) + #define __Pyx_c_pow(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero(z) ((z)==0) + #define __Pyx_c_conj(z) (conj(z)) + #if 1 + #define __Pyx_c_abs(z) (cabs(z)) + #define __Pyx_c_pow(a, b) (cpow(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg(__pyx_t_double_complex); + static CYTHON_INLINE int __Pyx_c_is_zero(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj(__pyx_t_double_complex); + #if 1 + static CYTHON_INLINE double __Pyx_c_abs(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow(__pyx_t_double_complex, __pyx_t_double_complex); + #endif +#endif + +static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject *); + +static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject *); + +static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject *); + +static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject *); + +static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject *); + +static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject *); + +static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject *); + +static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject *); + +static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject *); + +static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject *); + +static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject *); + +static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject *); + +static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject *); + +static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject *); + +static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject *); + +static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject *); + +static int __Pyx_check_binary_version(void); + +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif +#endif + +static PyObject *__Pyx_ImportModule(const char *name); /*proto*/ + +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); /*proto*/ + +typedef struct { + int code_line; + PyCodeObject* code_object; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); /*proto*/ + +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/ + + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'libc.stdlib' */ + +/* Module declarations from 'numpy' */ + +/* Module declarations from 'numpy' */ +static PyTypeObject *__pyx_ptype_5numpy_dtype = 0; +static PyTypeObject *__pyx_ptype_5numpy_flatiter = 0; +static PyTypeObject *__pyx_ptype_5numpy_broadcast = 0; +static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0; +static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0; +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/ + +/* Module declarations from 'cython' */ + +/* Module declarations from 'astropy.convolution.boundary_none' */ +static __Pyx_TypeInfo __Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t = { "DTYPE_t", NULL, sizeof(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t), { 0 }, 0, 'R', 0, 0 }; +#define __Pyx_MODULE_NAME "astropy.convolution.boundary_none" +int __pyx_module_is_main_astropy__convolution__boundary_none = 0; + +/* Implementation of 'astropy.convolution.boundary_none' */ +static PyObject *__pyx_builtin_ValueError; +static PyObject *__pyx_builtin_range; +static PyObject *__pyx_builtin_RuntimeError; +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_none_convolve1d_boundary_none(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g); /* proto */ +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_none_2convolve2d_boundary_none(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g); /* proto */ +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_none_4convolve3d_boundary_none(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g); /* proto */ +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */ +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */ +static char __pyx_k_1[] = "Convolution kernel must have odd dimensions"; +static char __pyx_k_5[] = "ndarray is not C contiguous"; +static char __pyx_k_7[] = "ndarray is not Fortran contiguous"; +static char __pyx_k_9[] = "Non-native byte order not supported"; +static char __pyx_k_11[] = "unknown dtype code in numpy.pxd (%d)"; +static char __pyx_k_12[] = "Format string allocated too short, see comment in numpy.pxd"; +static char __pyx_k_15[] = "Format string allocated too short."; +static char __pyx_k_19[] = "convolve1d_boundary_none"; +static char __pyx_k_20[] = "/internal/1/root/src/astropy/astropy/astropy/convolution/boundary_none.pyx"; +static char __pyx_k_21[] = "astropy.convolution.boundary_none"; +static char __pyx_k_24[] = "convolve2d_boundary_none"; +static char __pyx_k_27[] = "convolve3d_boundary_none"; +static char __pyx_k__B[] = "B"; +static char __pyx_k__H[] = "H"; +static char __pyx_k__I[] = "I"; +static char __pyx_k__L[] = "L"; +static char __pyx_k__O[] = "O"; +static char __pyx_k__Q[] = "Q"; +static char __pyx_k__b[] = "b"; +static char __pyx_k__d[] = "d"; +static char __pyx_k__f[] = "f"; +static char __pyx_k__g[] = "g"; +static char __pyx_k__h[] = "h"; +static char __pyx_k__i[] = "i"; +static char __pyx_k__j[] = "j"; +static char __pyx_k__k[] = "k"; +static char __pyx_k__l[] = "l"; +static char __pyx_k__q[] = "q"; +static char __pyx_k__Zd[] = "Zd"; +static char __pyx_k__Zf[] = "Zf"; +static char __pyx_k__Zg[] = "Zg"; +static char __pyx_k__ii[] = "ii"; +static char __pyx_k__jj[] = "jj"; +static char __pyx_k__kk[] = "kk"; +static char __pyx_k__np[] = "np"; +static char __pyx_k__nx[] = "nx"; +static char __pyx_k__ny[] = "ny"; +static char __pyx_k__nz[] = "nz"; +static char __pyx_k__bot[] = "bot"; +static char __pyx_k__ker[] = "ker"; +static char __pyx_k__nkx[] = "nkx"; +static char __pyx_k__nky[] = "nky"; +static char __pyx_k__nkz[] = "nkz"; +static char __pyx_k__top[] = "top"; +static char __pyx_k__val[] = "val"; +static char __pyx_k__wkx[] = "wkx"; +static char __pyx_k__wky[] = "wky"; +static char __pyx_k__wkz[] = "wkz"; +static char __pyx_k__conv[] = "conv"; +static char __pyx_k__DTYPE[] = "DTYPE"; +static char __pyx_k__dtype[] = "dtype"; +static char __pyx_k__fixed[] = "fixed"; +static char __pyx_k__float[] = "float"; +static char __pyx_k__iimax[] = "iimax"; +static char __pyx_k__iimin[] = "iimin"; +static char __pyx_k__jjmax[] = "jjmax"; +static char __pyx_k__jjmin[] = "jjmin"; +static char __pyx_k__kkmax[] = "kkmax"; +static char __pyx_k__kkmin[] = "kkmin"; +static char __pyx_k__numpy[] = "numpy"; +static char __pyx_k__range[] = "range"; +static char __pyx_k__zeros[] = "zeros"; +static char __pyx_k____main__[] = "__main__"; +static char __pyx_k____test__[] = "__test__"; +static char __pyx_k__ValueError[] = "ValueError"; +static char __pyx_k__RuntimeError[] = "RuntimeError"; +static PyObject *__pyx_kp_s_1; +static PyObject *__pyx_kp_u_11; +static PyObject *__pyx_kp_u_12; +static PyObject *__pyx_kp_u_15; +static PyObject *__pyx_n_s_19; +static PyObject *__pyx_kp_s_20; +static PyObject *__pyx_n_s_21; +static PyObject *__pyx_n_s_24; +static PyObject *__pyx_n_s_27; +static PyObject *__pyx_kp_u_5; +static PyObject *__pyx_kp_u_7; +static PyObject *__pyx_kp_u_9; +static PyObject *__pyx_n_s__DTYPE; +static PyObject *__pyx_n_s__RuntimeError; +static PyObject *__pyx_n_s__ValueError; +static PyObject *__pyx_n_s____main__; +static PyObject *__pyx_n_s____test__; +static PyObject *__pyx_n_s__bot; +static PyObject *__pyx_n_s__conv; +static PyObject *__pyx_n_s__dtype; +static PyObject *__pyx_n_s__f; +static PyObject *__pyx_n_s__fixed; +static PyObject *__pyx_n_s__float; +static PyObject *__pyx_n_s__g; +static PyObject *__pyx_n_s__i; +static PyObject *__pyx_n_s__ii; +static PyObject *__pyx_n_s__iimax; +static PyObject *__pyx_n_s__iimin; +static PyObject *__pyx_n_s__j; +static PyObject *__pyx_n_s__jj; +static PyObject *__pyx_n_s__jjmax; +static PyObject *__pyx_n_s__jjmin; +static PyObject *__pyx_n_s__k; +static PyObject *__pyx_n_s__ker; +static PyObject *__pyx_n_s__kk; +static PyObject *__pyx_n_s__kkmax; +static PyObject *__pyx_n_s__kkmin; +static PyObject *__pyx_n_s__nkx; +static PyObject *__pyx_n_s__nky; +static PyObject *__pyx_n_s__nkz; +static PyObject *__pyx_n_s__np; +static PyObject *__pyx_n_s__numpy; +static PyObject *__pyx_n_s__nx; +static PyObject *__pyx_n_s__ny; +static PyObject *__pyx_n_s__nz; +static PyObject *__pyx_n_s__range; +static PyObject *__pyx_n_s__top; +static PyObject *__pyx_n_s__val; +static PyObject *__pyx_n_s__wkx; +static PyObject *__pyx_n_s__wky; +static PyObject *__pyx_n_s__wkz; +static PyObject *__pyx_n_s__zeros; +static PyObject *__pyx_int_15; +static PyObject *__pyx_k_tuple_2; +static PyObject *__pyx_k_tuple_3; +static PyObject *__pyx_k_tuple_4; +static PyObject *__pyx_k_tuple_6; +static PyObject *__pyx_k_tuple_8; +static PyObject *__pyx_k_tuple_10; +static PyObject *__pyx_k_tuple_13; +static PyObject *__pyx_k_tuple_14; +static PyObject *__pyx_k_tuple_16; +static PyObject *__pyx_k_tuple_17; +static PyObject *__pyx_k_tuple_22; +static PyObject *__pyx_k_tuple_25; +static PyObject *__pyx_k_codeobj_18; +static PyObject *__pyx_k_codeobj_23; +static PyObject *__pyx_k_codeobj_26; + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_none_1convolve1d_boundary_none(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_13boundary_none_1convolve1d_boundary_none = {__Pyx_NAMESTR("convolve1d_boundary_none"), (PyCFunction)__pyx_pw_7astropy_11convolution_13boundary_none_1convolve1d_boundary_none, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_none_1convolve1d_boundary_none(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve1d_boundary_none (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve1d_boundary_none", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve1d_boundary_none") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve1d_boundary_none", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_none.convolve1d_boundary_none", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 17; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_13boundary_none_convolve1d_boundary_none(__pyx_self, __pyx_v_f, __pyx_v_g); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_none.pyx":16 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_none(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g): + * + */ + +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_none_convolve1d_boundary_none(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g) { + int __pyx_v_nx; + int __pyx_v_nkx; + int __pyx_v_wkx; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + unsigned int __pyx_v_ii; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyArrayObject *__pyx_t_8 = NULL; + PyArrayObject *__pyx_t_9 = NULL; + int __pyx_t_10; + unsigned int __pyx_t_11; + unsigned int __pyx_t_12; + int __pyx_t_13; + long __pyx_t_14; + unsigned int __pyx_t_15; + unsigned int __pyx_t_16; + unsigned int __pyx_t_17; + unsigned int __pyx_t_18; + unsigned int __pyx_t_19; + unsigned int __pyx_t_20; + unsigned int __pyx_t_21; + unsigned int __pyx_t_22; + unsigned int __pyx_t_23; + unsigned int __pyx_t_24; + unsigned int __pyx_t_25; + unsigned int __pyx_t_26; + unsigned int __pyx_t_27; + unsigned int __pyx_t_28; + unsigned int __pyx_t_29; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve1d_boundary_none", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; + + /* "astropy/convolution/boundary_none.pyx":19 + * np.ndarray[DTYPE_t, ndim=1] g): + * + * if g.shape[0] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_none.pyx":20 + * + * if g.shape[0] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_2 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_2), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __Pyx_Raise(__pyx_t_2, 0, 0, 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_none.pyx":22 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_2 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_2, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_1) { + __pyx_t_4 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = PyObject_RichCompare(__pyx_t_4, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_5 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_6 = __pyx_t_5; + } else { + __pyx_t_6 = __pyx_t_1; + } + if (unlikely(!__pyx_t_6)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_none.pyx":24 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_none.pyx":25 + * + * cdef int nx = f.shape[0] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_none.pyx":26 + * cdef int nx = f.shape[0] + * cdef int nkx = g.shape[0] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * + * # The following need to be set to zeros rather than empty because the + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_none.pyx":30 + * # The following need to be set to zeros rather than empty because the + * # boundary does not get reset. + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.zeros([nx], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=1] conv = np.zeros([nx], dtype=DTYPE) + * + */ + __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__zeros); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = PyList_New(1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_4, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __pyx_t_2 = 0; + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_t_4)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_4)); + __pyx_t_4 = 0; + __pyx_t_4 = PyDict_New(); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_4)); + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_t_4, ((PyObject *)__pyx_n_s__dtype), __pyx_t_7) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), ((PyObject *)__pyx_t_4)); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; + if (!(likely(((__pyx_t_7) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_7, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_8 = ((PyArrayObject *)__pyx_t_7); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_8, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 1, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; + } + } + __pyx_t_8 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "astropy/convolution/boundary_none.pyx":31 + * # boundary does not get reset. + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.zeros([nx], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=1] conv = np.zeros([nx], dtype=DTYPE) # <<<<<<<<<<<<<< + * + * cdef unsigned int i, ii + */ + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = PyObject_GetAttr(__pyx_t_7, __pyx_n_s__zeros); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + __pyx_t_7 = 0; + __pyx_t_7 = PyTuple_New(1); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_7, 0, ((PyObject *)__pyx_t_2)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_2)); + __pyx_t_2 = 0; + __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_2)); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_2, ((PyObject *)__pyx_n_s__dtype), __pyx_t_3) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_7), ((PyObject *)__pyx_t_2)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_7)); __pyx_t_7 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_9 = ((PyArrayObject *)__pyx_t_3); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_9, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 1, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; + } + } + __pyx_t_9 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "astropy/convolution/boundary_none.pyx":41 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * if npy_isnan(f[i]) and i >= wkx and i < nx - wkx: + * top = 0. + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_none.pyx":42 + * # neighboring values + * for i in range(nx): + * if npy_isnan(f[i]) and i >= wkx and i < nx - wkx: # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_12 = __pyx_v_i; + __pyx_t_6 = npy_isnan((*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_12, __pyx_pybuffernd_f.diminfo[0].strides))); + if (__pyx_t_6) { + __pyx_t_1 = (__pyx_v_i >= __pyx_v_wkx); + if (__pyx_t_1) { + __pyx_t_5 = (__pyx_v_i < (__pyx_v_nx - __pyx_v_wkx)); + __pyx_t_13 = __pyx_t_5; + } else { + __pyx_t_13 = __pyx_t_1; + } + __pyx_t_1 = __pyx_t_13; + } else { + __pyx_t_1 = __pyx_t_6; + } + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_none.pyx":43 + * for i in range(nx): + * if npy_isnan(f[i]) and i >= wkx and i < nx - wkx: + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_none.pyx":44 + * if npy_isnan(f[i]) and i >= wkx and i < nx - wkx: + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * for ii in range(i - wkx, i + wkx + 1): + * val = f[ii] + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_none.pyx":45 + * top = 0. + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): # <<<<<<<<<<<<<< + * val = f[ii] + * if not npy_isnan(val): + */ + __pyx_t_14 = ((__pyx_v_i + __pyx_v_wkx) + 1); + for (__pyx_t_15 = (__pyx_v_i - __pyx_v_wkx); __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_ii = __pyx_t_15; + + /* "astropy/convolution/boundary_none.pyx":46 + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + * val = f[ii] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] + */ + __pyx_t_16 = __pyx_v_ii; + __pyx_v_val = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_16, __pyx_pybuffernd_f.diminfo[0].strides)); + + /* "astropy/convolution/boundary_none.pyx":47 + * for ii in range(i - wkx, i + wkx + 1): + * val = f[ii] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i)] + * top += val * ker + */ + __pyx_t_1 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_none.pyx":48 + * val = f[ii] + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_17 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_v_ker = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_17, __pyx_pybuffernd_g.diminfo[0].strides)); + + /* "astropy/convolution/boundary_none.pyx":49 + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0.: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_none.pyx":50 + * ker = g[(wkx + ii - i)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0.: + * fixed[i] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L9; + } + __pyx_L9:; + } + + /* "astropy/convolution/boundary_none.pyx":51 + * top += val * ker + * bot += ker + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i] = top / bot + * else: + */ + __pyx_t_1 = (__pyx_v_bot != 0.); + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_none.pyx":52 + * bot += ker + * if bot != 0.: + * fixed[i] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i] = f[i] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 52; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_15 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_15, __pyx_pybuffernd_fixed.diminfo[0].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L10; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":54 + * fixed[i] = top / bot + * else: + * fixed[i] = f[i] # <<<<<<<<<<<<<< + * else: + * fixed[i] = f[i] + */ + __pyx_t_18 = __pyx_v_i; + __pyx_t_19 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_19, __pyx_pybuffernd_fixed.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_18, __pyx_pybuffernd_f.diminfo[0].strides)); + } + __pyx_L10:; + goto __pyx_L6; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":56 + * fixed[i] = f[i] + * else: + * fixed[i] = f[i] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_20 = __pyx_v_i; + __pyx_t_21 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_21, __pyx_pybuffernd_fixed.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_20, __pyx_pybuffernd_f.diminfo[0].strides)); + } + __pyx_L6:; + } + + /* "astropy/convolution/boundary_none.pyx":59 + * + * # Now run the proper convolution + * for i in range(wkx, nx - wkx): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i]): + * top = 0. + */ + __pyx_t_10 = (__pyx_v_nx - __pyx_v_wkx); + for (__pyx_t_11 = __pyx_v_wkx; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_none.pyx":60 + * # Now run the proper convolution + * for i in range(wkx, nx - wkx): + * if not npy_isnan(fixed[i]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_22 = __pyx_v_i; + __pyx_t_1 = (!npy_isnan((*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_22, __pyx_pybuffernd_fixed.diminfo[0].strides)))); + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_none.pyx":61 + * for i in range(wkx, nx - wkx): + * if not npy_isnan(fixed[i]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_none.pyx":62 + * if not npy_isnan(fixed[i]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * for ii in range(i - wkx, i + wkx + 1): + * val = fixed[ii] + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_none.pyx":63 + * top = 0. + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): # <<<<<<<<<<<<<< + * val = fixed[ii] + * ker = g[(wkx + ii - i)] + */ + __pyx_t_14 = ((__pyx_v_i + __pyx_v_wkx) + 1); + for (__pyx_t_23 = (__pyx_v_i - __pyx_v_wkx); __pyx_t_23 < __pyx_t_14; __pyx_t_23+=1) { + __pyx_v_ii = __pyx_t_23; + + /* "astropy/convolution/boundary_none.pyx":64 + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + * val = fixed[ii] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): + */ + __pyx_t_24 = __pyx_v_ii; + __pyx_v_val = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_24, __pyx_pybuffernd_fixed.diminfo[0].strides)); + + /* "astropy/convolution/boundary_none.pyx":65 + * for ii in range(i - wkx, i + wkx + 1): + * val = fixed[ii] + * ker = g[(wkx + ii - i)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_25 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_v_ker = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_25, __pyx_pybuffernd_g.diminfo[0].strides)); + + /* "astropy/convolution/boundary_none.pyx":66 + * val = fixed[ii] + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_1 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_none.pyx":67 + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_none.pyx":68 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L16; + } + __pyx_L16:; + } + + /* "astropy/convolution/boundary_none.pyx":69 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i] = top / bot + * else: + */ + __pyx_t_1 = (__pyx_v_bot != 0.0); + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_none.pyx":70 + * bot += ker + * if bot != 0: + * conv[i] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i] = fixed[i] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 70; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_23 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_23, __pyx_pybuffernd_conv.diminfo[0].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L17; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":72 + * conv[i] = top / bot + * else: + * conv[i] = fixed[i] # <<<<<<<<<<<<<< + * else: + * conv[i] = fixed[i] + */ + __pyx_t_26 = __pyx_v_i; + __pyx_t_27 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_27, __pyx_pybuffernd_conv.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_26, __pyx_pybuffernd_fixed.diminfo[0].strides)); + } + __pyx_L17:; + goto __pyx_L13; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":74 + * conv[i] = fixed[i] + * else: + * conv[i] = fixed[i] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_28 = __pyx_v_i; + __pyx_t_29 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_29, __pyx_pybuffernd_conv.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_28, __pyx_pybuffernd_fixed.diminfo[0].strides)); + } + __pyx_L13:; + } + + /* "astropy/convolution/boundary_none.pyx":76 + * conv[i] = fixed[i] + * + * return conv # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_7); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_none.convolve1d_boundary_none", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_none_3convolve2d_boundary_none(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_13boundary_none_3convolve2d_boundary_none = {__Pyx_NAMESTR("convolve2d_boundary_none"), (PyCFunction)__pyx_pw_7astropy_11convolution_13boundary_none_3convolve2d_boundary_none, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_none_3convolve2d_boundary_none(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve2d_boundary_none (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve2d_boundary_none", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 80; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve2d_boundary_none") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 80; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve2d_boundary_none", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 80; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_none.convolve2d_boundary_none", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 80; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 81; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_13boundary_none_2convolve2d_boundary_none(__pyx_self, __pyx_v_f, __pyx_v_g); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_none.pyx":80 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_none(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g): + * + */ + +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_none_2convolve2d_boundary_none(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g) { + int __pyx_v_nx; + int __pyx_v_ny; + int __pyx_v_nkx; + int __pyx_v_nky; + int __pyx_v_wkx; + int __pyx_v_wky; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + unsigned int __pyx_v_j; + unsigned int __pyx_v_ii; + unsigned int __pyx_v_jj; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyArrayObject *__pyx_t_8 = NULL; + PyArrayObject *__pyx_t_9 = NULL; + int __pyx_t_10; + unsigned int __pyx_t_11; + int __pyx_t_12; + unsigned int __pyx_t_13; + unsigned int __pyx_t_14; + unsigned int __pyx_t_15; + int __pyx_t_16; + int __pyx_t_17; + int __pyx_t_18; + long __pyx_t_19; + unsigned int __pyx_t_20; + long __pyx_t_21; + unsigned int __pyx_t_22; + unsigned int __pyx_t_23; + unsigned int __pyx_t_24; + unsigned int __pyx_t_25; + unsigned int __pyx_t_26; + unsigned int __pyx_t_27; + unsigned int __pyx_t_28; + unsigned int __pyx_t_29; + unsigned int __pyx_t_30; + unsigned int __pyx_t_31; + unsigned int __pyx_t_32; + unsigned int __pyx_t_33; + unsigned int __pyx_t_34; + unsigned int __pyx_t_35; + unsigned int __pyx_t_36; + unsigned int __pyx_t_37; + unsigned int __pyx_t_38; + unsigned int __pyx_t_39; + unsigned int __pyx_t_40; + unsigned int __pyx_t_41; + unsigned int __pyx_t_42; + unsigned int __pyx_t_43; + unsigned int __pyx_t_44; + unsigned int __pyx_t_45; + unsigned int __pyx_t_46; + unsigned int __pyx_t_47; + unsigned int __pyx_t_48; + unsigned int __pyx_t_49; + unsigned int __pyx_t_50; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve2d_boundary_none", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 2, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 80; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_f.diminfo[1].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_f.diminfo[1].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[1]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 2, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 80; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_g.diminfo[1].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_g.diminfo[1].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[1]; + + /* "astropy/convolution/boundary_none.pyx":83 + * np.ndarray[DTYPE_t, ndim=2] g): + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (!__pyx_t_1) { + __pyx_t_2 = (__Pyx_mod_long((__pyx_v_g->dimensions[1]), 2) != 1); + __pyx_t_3 = __pyx_t_2; + } else { + __pyx_t_3 = __pyx_t_1; + } + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_none.pyx":84 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_3), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_none.pyx":86 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_4 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyObject_RichCompare(__pyx_t_4, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__pyx_t_3) { + __pyx_t_6 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_6, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = __pyx_t_1; + } else { + __pyx_t_2 = __pyx_t_3; + } + if (unlikely(!__pyx_t_2)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_none.pyx":88 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_none.pyx":89 + * + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + */ + __pyx_v_ny = (__pyx_v_f->dimensions[1]); + + /* "astropy/convolution/boundary_none.pyx":90 + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_none.pyx":91 + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + */ + __pyx_v_nky = (__pyx_v_g->dimensions[1]); + + /* "astropy/convolution/boundary_none.pyx":92 + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * cdef int wky = nky // 2 + * + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_none.pyx":93 + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 # <<<<<<<<<<<<<< + * + * # The following need to be set to zeros rather than empty because the + */ + __pyx_v_wky = __Pyx_div_long(__pyx_v_nky, 2); + + /* "astropy/convolution/boundary_none.pyx":97 + * # The following need to be set to zeros rather than empty because the + * # boundary does not get reset. + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.zeros([nx, ny], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=2] conv = np.zeros([nx, ny], dtype=DTYPE) + * + */ + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = PyObject_GetAttr(__pyx_t_4, __pyx_n_s__zeros); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PyList_New(2); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_7, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_7, 1, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_4 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_7)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_7)); + __pyx_t_7 = 0; + __pyx_t_7 = PyDict_New(); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_7)); + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + if (PyDict_SetItem(__pyx_t_7, ((PyObject *)__pyx_n_s__dtype), __pyx_t_4) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyObject_Call(__pyx_t_5, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_7)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_7)); __pyx_t_7 = 0; + if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_8 = ((PyArrayObject *)__pyx_t_4); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_8, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 2, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 97; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_fixed.diminfo[1].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_fixed.diminfo[1].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[1]; + } + } + __pyx_t_8 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "astropy/convolution/boundary_none.pyx":98 + * # boundary does not get reset. + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.zeros([nx, ny], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=2] conv = np.zeros([nx, ny], dtype=DTYPE) # <<<<<<<<<<<<<< + * + * cdef unsigned int i, j, ii, jj + */ + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = PyObject_GetAttr(__pyx_t_4, __pyx_n_s__zeros); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyList_New(2); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_5, 1, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_4 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyDict_New(); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + if (PyDict_SetItem(__pyx_t_5, ((PyObject *)__pyx_n_s__dtype), __pyx_t_4) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyObject_Call(__pyx_t_7, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_5)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0; + if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_9 = ((PyArrayObject *)__pyx_t_4); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_9, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 2, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_conv.diminfo[1].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_conv.diminfo[1].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[1]; + } + } + __pyx_t_9 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "astropy/convolution/boundary_none.pyx":108 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * if npy_isnan(f[i, j]) and i >= wkx and i < nx - wkx \ + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_none.pyx":109 + * # neighboring values + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * if npy_isnan(f[i, j]) and i >= wkx and i < nx - wkx \ + * and j >= wky and j < ny - wky: + */ + __pyx_t_12 = __pyx_v_ny; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_j = __pyx_t_13; + + /* "astropy/convolution/boundary_none.pyx":110 + * for i in range(nx): + * for j in range(ny): + * if npy_isnan(f[i, j]) and i >= wkx and i < nx - wkx \ # <<<<<<<<<<<<<< + * and j >= wky and j < ny - wky: + * top = 0. + */ + __pyx_t_14 = __pyx_v_i; + __pyx_t_15 = __pyx_v_j; + __pyx_t_2 = npy_isnan((*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_14, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_15, __pyx_pybuffernd_f.diminfo[1].strides))); + if (__pyx_t_2) { + __pyx_t_3 = (__pyx_v_i >= __pyx_v_wkx); + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_none.pyx":111 + * for j in range(ny): + * if npy_isnan(f[i, j]) and i >= wkx and i < nx - wkx \ + * and j >= wky and j < ny - wky: # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_1 = (__pyx_v_i < (__pyx_v_nx - __pyx_v_wkx)); + if (__pyx_t_1) { + __pyx_t_16 = (__pyx_v_j >= __pyx_v_wky); + if (__pyx_t_16) { + __pyx_t_17 = (__pyx_v_j < (__pyx_v_ny - __pyx_v_wky)); + __pyx_t_18 = __pyx_t_17; + } else { + __pyx_t_18 = __pyx_t_16; + } + __pyx_t_16 = __pyx_t_18; + } else { + __pyx_t_16 = __pyx_t_1; + } + __pyx_t_1 = __pyx_t_16; + } else { + __pyx_t_1 = __pyx_t_3; + } + __pyx_t_3 = __pyx_t_1; + } else { + __pyx_t_3 = __pyx_t_2; + } + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_none.pyx":112 + * if npy_isnan(f[i, j]) and i >= wkx and i < nx - wkx \ + * and j >= wky and j < ny - wky: + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_none.pyx":113 + * and j >= wky and j < ny - wky: + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_none.pyx":114 + * top = 0. + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): # <<<<<<<<<<<<<< + * for jj in range(j - wky, j + wky + 1): + * val = f[ii, jj] + */ + __pyx_t_19 = ((__pyx_v_i + __pyx_v_wkx) + 1); + for (__pyx_t_20 = (__pyx_v_i - __pyx_v_wkx); __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { + __pyx_v_ii = __pyx_t_20; + + /* "astropy/convolution/boundary_none.pyx":115 + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): # <<<<<<<<<<<<<< + * val = f[ii, jj] + * if not npy_isnan(val): + */ + __pyx_t_21 = ((__pyx_v_j + __pyx_v_wky) + 1); + for (__pyx_t_22 = (__pyx_v_j - __pyx_v_wky); __pyx_t_22 < __pyx_t_21; __pyx_t_22+=1) { + __pyx_v_jj = __pyx_t_22; + + /* "astropy/convolution/boundary_none.pyx":116 + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): + * val = f[ii, jj] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + */ + __pyx_t_23 = __pyx_v_ii; + __pyx_t_24 = __pyx_v_jj; + __pyx_v_val = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_23, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_24, __pyx_pybuffernd_f.diminfo[1].strides)); + + /* "astropy/convolution/boundary_none.pyx":117 + * for jj in range(j - wky, j + wky + 1): + * val = f[ii, jj] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + */ + __pyx_t_3 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_none.pyx":119 + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + * (wky + jj - j)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_25 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_26 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_v_ker = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_25, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_26, __pyx_pybuffernd_g.diminfo[1].strides)); + + /* "astropy/convolution/boundary_none.pyx":120 + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0.: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_none.pyx":121 + * (wky + jj - j)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0.: + * fixed[i, j] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L13; + } + __pyx_L13:; + } + } + + /* "astropy/convolution/boundary_none.pyx":122 + * top += val * ker + * bot += ker + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i, j] = top / bot + * else: + */ + __pyx_t_3 = (__pyx_v_bot != 0.); + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_none.pyx":123 + * bot += ker + * if bot != 0.: + * fixed[i, j] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i, j] = f[i, j] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 123; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_20 = __pyx_v_i; + __pyx_t_22 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_20, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_22, __pyx_pybuffernd_fixed.diminfo[1].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L14; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":125 + * fixed[i, j] = top / bot + * else: + * fixed[i, j] = f[i, j] # <<<<<<<<<<<<<< + * else: + * fixed[i, j] = f[i, j] + */ + __pyx_t_27 = __pyx_v_i; + __pyx_t_28 = __pyx_v_j; + __pyx_t_29 = __pyx_v_i; + __pyx_t_30 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_29, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_30, __pyx_pybuffernd_fixed.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_27, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_28, __pyx_pybuffernd_f.diminfo[1].strides)); + } + __pyx_L14:; + goto __pyx_L8; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":127 + * fixed[i, j] = f[i, j] + * else: + * fixed[i, j] = f[i, j] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_31 = __pyx_v_i; + __pyx_t_32 = __pyx_v_j; + __pyx_t_33 = __pyx_v_i; + __pyx_t_34 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_33, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_34, __pyx_pybuffernd_fixed.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_31, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_32, __pyx_pybuffernd_f.diminfo[1].strides)); + } + __pyx_L8:; + } + } + + /* "astropy/convolution/boundary_none.pyx":130 + * + * # Now run the proper convolution + * for i in range(wkx, nx - wkx): # <<<<<<<<<<<<<< + * for j in range(wky, ny - wky): + * if not npy_isnan(fixed[i, j]): + */ + __pyx_t_10 = (__pyx_v_nx - __pyx_v_wkx); + for (__pyx_t_11 = __pyx_v_wkx; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_none.pyx":131 + * # Now run the proper convolution + * for i in range(wkx, nx - wkx): + * for j in range(wky, ny - wky): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i, j]): + * top = 0. + */ + __pyx_t_12 = (__pyx_v_ny - __pyx_v_wky); + for (__pyx_t_13 = __pyx_v_wky; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_j = __pyx_t_13; + + /* "astropy/convolution/boundary_none.pyx":132 + * for i in range(wkx, nx - wkx): + * for j in range(wky, ny - wky): + * if not npy_isnan(fixed[i, j]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_35 = __pyx_v_i; + __pyx_t_36 = __pyx_v_j; + __pyx_t_3 = (!npy_isnan((*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_35, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_36, __pyx_pybuffernd_fixed.diminfo[1].strides)))); + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_none.pyx":133 + * for j in range(wky, ny - wky): + * if not npy_isnan(fixed[i, j]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_none.pyx":134 + * if not npy_isnan(fixed[i, j]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_none.pyx":135 + * top = 0. + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): # <<<<<<<<<<<<<< + * for jj in range(j - wky, j + wky + 1): + * val = fixed[ii, jj] + */ + __pyx_t_19 = ((__pyx_v_i + __pyx_v_wkx) + 1); + for (__pyx_t_37 = (__pyx_v_i - __pyx_v_wkx); __pyx_t_37 < __pyx_t_19; __pyx_t_37+=1) { + __pyx_v_ii = __pyx_t_37; + + /* "astropy/convolution/boundary_none.pyx":136 + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): # <<<<<<<<<<<<<< + * val = fixed[ii, jj] + * ker = g[(wkx + ii - i), + */ + __pyx_t_21 = ((__pyx_v_j + __pyx_v_wky) + 1); + for (__pyx_t_38 = (__pyx_v_j - __pyx_v_wky); __pyx_t_38 < __pyx_t_21; __pyx_t_38+=1) { + __pyx_v_jj = __pyx_t_38; + + /* "astropy/convolution/boundary_none.pyx":137 + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): + * val = fixed[ii, jj] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + */ + __pyx_t_39 = __pyx_v_ii; + __pyx_t_40 = __pyx_v_jj; + __pyx_v_val = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_39, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_40, __pyx_pybuffernd_fixed.diminfo[1].strides)); + + /* "astropy/convolution/boundary_none.pyx":139 + * val = fixed[ii, jj] + * ker = g[(wkx + ii - i), + * (wky + jj - j)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_41 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_42 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_v_ker = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_41, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_42, __pyx_pybuffernd_g.diminfo[1].strides)); + + /* "astropy/convolution/boundary_none.pyx":140 + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_3 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_none.pyx":141 + * (wky + jj - j)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_none.pyx":142 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i, j] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L24; + } + __pyx_L24:; + } + } + + /* "astropy/convolution/boundary_none.pyx":143 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i, j] = top / bot + * else: + */ + __pyx_t_3 = (__pyx_v_bot != 0.0); + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_none.pyx":144 + * bot += ker + * if bot != 0: + * conv[i, j] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i, j] = fixed[i, j] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 144; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_37 = __pyx_v_i; + __pyx_t_38 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_37, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_38, __pyx_pybuffernd_conv.diminfo[1].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L25; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":146 + * conv[i, j] = top / bot + * else: + * conv[i, j] = fixed[i, j] # <<<<<<<<<<<<<< + * else: + * conv[i, j] = fixed[i, j] + */ + __pyx_t_43 = __pyx_v_i; + __pyx_t_44 = __pyx_v_j; + __pyx_t_45 = __pyx_v_i; + __pyx_t_46 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_45, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_46, __pyx_pybuffernd_conv.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_43, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_44, __pyx_pybuffernd_fixed.diminfo[1].strides)); + } + __pyx_L25:; + goto __pyx_L19; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":148 + * conv[i, j] = fixed[i, j] + * else: + * conv[i, j] = fixed[i, j] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_47 = __pyx_v_i; + __pyx_t_48 = __pyx_v_j; + __pyx_t_49 = __pyx_v_i; + __pyx_t_50 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_49, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_50, __pyx_pybuffernd_conv.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_47, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_48, __pyx_pybuffernd_fixed.diminfo[1].strides)); + } + __pyx_L19:; + } + } + + /* "astropy/convolution/boundary_none.pyx":150 + * conv[i, j] = fixed[i, j] + * + * return conv # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_none.convolve2d_boundary_none", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_none_5convolve3d_boundary_none(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_13boundary_none_5convolve3d_boundary_none = {__Pyx_NAMESTR("convolve3d_boundary_none"), (PyCFunction)__pyx_pw_7astropy_11convolution_13boundary_none_5convolve3d_boundary_none, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_none_5convolve3d_boundary_none(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve3d_boundary_none (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve3d_boundary_none", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 154; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve3d_boundary_none") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 154; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve3d_boundary_none", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 154; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_none.convolve3d_boundary_none", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 154; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 155; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_13boundary_none_4convolve3d_boundary_none(__pyx_self, __pyx_v_f, __pyx_v_g); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_none.pyx":154 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_none(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g): + * + */ + +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_none_4convolve3d_boundary_none(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g) { + int __pyx_v_nx; + int __pyx_v_ny; + int __pyx_v_nz; + int __pyx_v_nkx; + int __pyx_v_nky; + int __pyx_v_nkz; + int __pyx_v_wkx; + int __pyx_v_wky; + int __pyx_v_wkz; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + unsigned int __pyx_v_j; + unsigned int __pyx_v_k; + unsigned int __pyx_v_ii; + unsigned int __pyx_v_jj; + unsigned int __pyx_v_kk; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyArrayObject *__pyx_t_10 = NULL; + PyArrayObject *__pyx_t_11 = NULL; + int __pyx_t_12; + unsigned int __pyx_t_13; + int __pyx_t_14; + unsigned int __pyx_t_15; + int __pyx_t_16; + unsigned int __pyx_t_17; + unsigned int __pyx_t_18; + unsigned int __pyx_t_19; + unsigned int __pyx_t_20; + int __pyx_t_21; + int __pyx_t_22; + int __pyx_t_23; + int __pyx_t_24; + long __pyx_t_25; + unsigned int __pyx_t_26; + long __pyx_t_27; + unsigned int __pyx_t_28; + long __pyx_t_29; + unsigned int __pyx_t_30; + unsigned int __pyx_t_31; + unsigned int __pyx_t_32; + unsigned int __pyx_t_33; + unsigned int __pyx_t_34; + unsigned int __pyx_t_35; + unsigned int __pyx_t_36; + unsigned int __pyx_t_37; + unsigned int __pyx_t_38; + unsigned int __pyx_t_39; + unsigned int __pyx_t_40; + unsigned int __pyx_t_41; + unsigned int __pyx_t_42; + unsigned int __pyx_t_43; + unsigned int __pyx_t_44; + unsigned int __pyx_t_45; + unsigned int __pyx_t_46; + unsigned int __pyx_t_47; + unsigned int __pyx_t_48; + unsigned int __pyx_t_49; + unsigned int __pyx_t_50; + unsigned int __pyx_t_51; + unsigned int __pyx_t_52; + unsigned int __pyx_t_53; + unsigned int __pyx_t_54; + unsigned int __pyx_t_55; + unsigned int __pyx_t_56; + unsigned int __pyx_t_57; + unsigned int __pyx_t_58; + unsigned int __pyx_t_59; + unsigned int __pyx_t_60; + unsigned int __pyx_t_61; + unsigned int __pyx_t_62; + unsigned int __pyx_t_63; + unsigned int __pyx_t_64; + unsigned int __pyx_t_65; + unsigned int __pyx_t_66; + unsigned int __pyx_t_67; + unsigned int __pyx_t_68; + unsigned int __pyx_t_69; + unsigned int __pyx_t_70; + unsigned int __pyx_t_71; + unsigned int __pyx_t_72; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve3d_boundary_none", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 3, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 154; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_f.diminfo[1].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_f.diminfo[1].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_f.diminfo[2].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_f.diminfo[2].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[2]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 3, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 154; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_g.diminfo[1].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_g.diminfo[1].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_g.diminfo[2].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_g.diminfo[2].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[2]; + + /* "astropy/convolution/boundary_none.pyx":157 + * np.ndarray[DTYPE_t, ndim=3] g): + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (!__pyx_t_1) { + __pyx_t_2 = (__Pyx_mod_long((__pyx_v_g->dimensions[1]), 2) != 1); + if (!__pyx_t_2) { + __pyx_t_3 = (__Pyx_mod_long((__pyx_v_g->dimensions[2]), 2) != 1); + __pyx_t_4 = __pyx_t_3; + } else { + __pyx_t_4 = __pyx_t_2; + } + __pyx_t_2 = __pyx_t_4; + } else { + __pyx_t_2 = __pyx_t_1; + } + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_none.pyx":158 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_4), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 158; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 158; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_none.pyx":160 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_5 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 160; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 160; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PyObject_RichCompare(__pyx_t_5, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_7); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 160; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 160; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_2) { + __pyx_t_7 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 160; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 160; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyObject_RichCompare(__pyx_t_7, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 160; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 160; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_4 = __pyx_t_1; + } else { + __pyx_t_4 = __pyx_t_2; + } + if (unlikely(!__pyx_t_4)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 160; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_none.pyx":162 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_none.pyx":163 + * + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] # <<<<<<<<<<<<<< + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] + */ + __pyx_v_ny = (__pyx_v_f->dimensions[1]); + + /* "astropy/convolution/boundary_none.pyx":164 + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + */ + __pyx_v_nz = (__pyx_v_f->dimensions[2]); + + /* "astropy/convolution/boundary_none.pyx":165 + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_none.pyx":166 + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] # <<<<<<<<<<<<<< + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nky = (__pyx_v_g->dimensions[1]); + + /* "astropy/convolution/boundary_none.pyx":167 + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + */ + __pyx_v_nkz = (__pyx_v_g->dimensions[2]); + + /* "astropy/convolution/boundary_none.pyx":168 + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * cdef int wky = nky // 2 + * cdef int wkz = nkz // 2 + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_none.pyx":169 + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 # <<<<<<<<<<<<<< + * cdef int wkz = nkz // 2 + * + */ + __pyx_v_wky = __Pyx_div_long(__pyx_v_nky, 2); + + /* "astropy/convolution/boundary_none.pyx":170 + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + * cdef int wkz = nkz // 2 # <<<<<<<<<<<<<< + * + * # The following need to be set to zeros rather than empty because the + */ + __pyx_v_wkz = __Pyx_div_long(__pyx_v_nkz, 2); + + /* "astropy/convolution/boundary_none.pyx":174 + * # The following need to be set to zeros rather than empty because the + * # boundary does not get reset. + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.zeros([nx, ny, nz], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=3] conv = np.zeros([nx, ny, nz], dtype=DTYPE) + * + */ + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyObject_GetAttr(__pyx_t_5, __pyx_n_s__zeros); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyInt_FromLong(__pyx_v_nz); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = PyList_New(3); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_9); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_9, 1, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_9, 2, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_8); + __pyx_t_5 = 0; + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_8 = PyTuple_New(1); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_8, 0, ((PyObject *)__pyx_t_9)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_9)); + __pyx_t_9 = 0; + __pyx_t_9 = PyDict_New(); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_9)); + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_t_9, ((PyObject *)__pyx_n_s__dtype), __pyx_t_7) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyObject_Call(__pyx_t_6, ((PyObject *)__pyx_t_8), ((PyObject *)__pyx_t_9)); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_8)); __pyx_t_8 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_9)); __pyx_t_9 = 0; + if (!(likely(((__pyx_t_7) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_7, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_10 = ((PyArrayObject *)__pyx_t_7); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_10, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 3, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 174; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_fixed.diminfo[1].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_fixed.diminfo[1].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_fixed.diminfo[2].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_fixed.diminfo[2].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[2]; + } + } + __pyx_t_10 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "astropy/convolution/boundary_none.pyx":175 + * # boundary does not get reset. + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.zeros([nx, ny, nz], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=3] conv = np.zeros([nx, ny, nz], dtype=DTYPE) # <<<<<<<<<<<<<< + * + * cdef unsigned int i, j, k, ii, jj, kk + */ + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = PyObject_GetAttr(__pyx_t_7, __pyx_n_s__zeros); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = PyInt_FromLong(__pyx_v_nz); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyList_New(3); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_5, 0, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_5, 1, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_8); + PyList_SET_ITEM(__pyx_t_5, 2, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyDict_New(); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_8 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + if (PyDict_SetItem(__pyx_t_5, ((PyObject *)__pyx_n_s__dtype), __pyx_t_8) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = PyObject_Call(__pyx_t_9, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_5)); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0; + if (!(likely(((__pyx_t_8) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_8, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_11 = ((PyArrayObject *)__pyx_t_8); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_11, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 3, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 175; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_conv.diminfo[1].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_conv.diminfo[1].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_conv.diminfo[2].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_conv.diminfo[2].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[2]; + } + } + __pyx_t_11 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_8); + __pyx_t_8 = 0; + + /* "astropy/convolution/boundary_none.pyx":185 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * for k in range(nz): + */ + __pyx_t_12 = __pyx_v_nx; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_i = __pyx_t_13; + + /* "astropy/convolution/boundary_none.pyx":186 + * # neighboring values + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * for k in range(nz): + * if npy_isnan(f[i, j, k]) and i >= wkx and i < nx - wkx \ + */ + __pyx_t_14 = __pyx_v_ny; + for (__pyx_t_15 = 0; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_j = __pyx_t_15; + + /* "astropy/convolution/boundary_none.pyx":187 + * for i in range(nx): + * for j in range(ny): + * for k in range(nz): # <<<<<<<<<<<<<< + * if npy_isnan(f[i, j, k]) and i >= wkx and i < nx - wkx \ + * and j >= wky and j < ny - wky and k >= wkz and k <= nz - wkz: + */ + __pyx_t_16 = __pyx_v_nz; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_k = __pyx_t_17; + + /* "astropy/convolution/boundary_none.pyx":188 + * for j in range(ny): + * for k in range(nz): + * if npy_isnan(f[i, j, k]) and i >= wkx and i < nx - wkx \ # <<<<<<<<<<<<<< + * and j >= wky and j < ny - wky and k >= wkz and k <= nz - wkz: + * top = 0. + */ + __pyx_t_18 = __pyx_v_i; + __pyx_t_19 = __pyx_v_j; + __pyx_t_20 = __pyx_v_k; + __pyx_t_4 = npy_isnan((*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_18, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_19, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_20, __pyx_pybuffernd_f.diminfo[2].strides))); + if (__pyx_t_4) { + __pyx_t_2 = (__pyx_v_i >= __pyx_v_wkx); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_none.pyx":189 + * for k in range(nz): + * if npy_isnan(f[i, j, k]) and i >= wkx and i < nx - wkx \ + * and j >= wky and j < ny - wky and k >= wkz and k <= nz - wkz: # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_1 = (__pyx_v_i < (__pyx_v_nx - __pyx_v_wkx)); + if (__pyx_t_1) { + __pyx_t_3 = (__pyx_v_j >= __pyx_v_wky); + if (__pyx_t_3) { + __pyx_t_21 = (__pyx_v_j < (__pyx_v_ny - __pyx_v_wky)); + if (__pyx_t_21) { + __pyx_t_22 = (__pyx_v_k >= __pyx_v_wkz); + if (__pyx_t_22) { + __pyx_t_23 = (__pyx_v_k <= (__pyx_v_nz - __pyx_v_wkz)); + __pyx_t_24 = __pyx_t_23; + } else { + __pyx_t_24 = __pyx_t_22; + } + __pyx_t_22 = __pyx_t_24; + } else { + __pyx_t_22 = __pyx_t_21; + } + __pyx_t_21 = __pyx_t_22; + } else { + __pyx_t_21 = __pyx_t_3; + } + __pyx_t_3 = __pyx_t_21; + } else { + __pyx_t_3 = __pyx_t_1; + } + __pyx_t_1 = __pyx_t_3; + } else { + __pyx_t_1 = __pyx_t_2; + } + __pyx_t_2 = __pyx_t_1; + } else { + __pyx_t_2 = __pyx_t_4; + } + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_none.pyx":190 + * if npy_isnan(f[i, j, k]) and i >= wkx and i < nx - wkx \ + * and j >= wky and j < ny - wky and k >= wkz and k <= nz - wkz: + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_none.pyx":191 + * and j >= wky and j < ny - wky and k >= wkz and k <= nz - wkz: + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_none.pyx":192 + * top = 0. + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): # <<<<<<<<<<<<<< + * for jj in range(j - wky, j + wky + 1): + * for kk in range(k - wkz, k + wkz + 1): + */ + __pyx_t_25 = ((__pyx_v_i + __pyx_v_wkx) + 1); + for (__pyx_t_26 = (__pyx_v_i - __pyx_v_wkx); __pyx_t_26 < __pyx_t_25; __pyx_t_26+=1) { + __pyx_v_ii = __pyx_t_26; + + /* "astropy/convolution/boundary_none.pyx":193 + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): # <<<<<<<<<<<<<< + * for kk in range(k - wkz, k + wkz + 1): + * val = f[ii, jj, kk] + */ + __pyx_t_27 = ((__pyx_v_j + __pyx_v_wky) + 1); + for (__pyx_t_28 = (__pyx_v_j - __pyx_v_wky); __pyx_t_28 < __pyx_t_27; __pyx_t_28+=1) { + __pyx_v_jj = __pyx_t_28; + + /* "astropy/convolution/boundary_none.pyx":194 + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): + * for kk in range(k - wkz, k + wkz + 1): # <<<<<<<<<<<<<< + * val = f[ii, jj, kk] + * if not npy_isnan(val): + */ + __pyx_t_29 = ((__pyx_v_k + __pyx_v_wkz) + 1); + for (__pyx_t_30 = (__pyx_v_k - __pyx_v_wkz); __pyx_t_30 < __pyx_t_29; __pyx_t_30+=1) { + __pyx_v_kk = __pyx_t_30; + + /* "astropy/convolution/boundary_none.pyx":195 + * for jj in range(j - wky, j + wky + 1): + * for kk in range(k - wkz, k + wkz + 1): + * val = f[ii, jj, kk] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + */ + __pyx_t_31 = __pyx_v_ii; + __pyx_t_32 = __pyx_v_jj; + __pyx_t_33 = __pyx_v_kk; + __pyx_v_val = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_31, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_32, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_33, __pyx_pybuffernd_f.diminfo[2].strides)); + + /* "astropy/convolution/boundary_none.pyx":196 + * for kk in range(k - wkz, k + wkz + 1): + * val = f[ii, jj, kk] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j), + */ + __pyx_t_2 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_none.pyx":199 + * ker = g[(wkx + ii - i), + * (wky + jj - j), + * (wkz + kk - k)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_34 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_35 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_t_36 = ((unsigned int)((__pyx_v_wkz + __pyx_v_kk) - __pyx_v_k)); + __pyx_v_ker = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_34, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_35, __pyx_pybuffernd_g.diminfo[1].strides, __pyx_t_36, __pyx_pybuffernd_g.diminfo[2].strides)); + + /* "astropy/convolution/boundary_none.pyx":200 + * (wky + jj - j), + * (wkz + kk - k)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0.: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_none.pyx":201 + * (wkz + kk - k)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0.: + * fixed[i, j, k] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L17; + } + __pyx_L17:; + } + } + } + + /* "astropy/convolution/boundary_none.pyx":202 + * top += val * ker + * bot += ker + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i, j, k] = top / bot + * else: + */ + __pyx_t_2 = (__pyx_v_bot != 0.); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_none.pyx":203 + * bot += ker + * if bot != 0.: + * fixed[i, j, k] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i, j, k] = f[i, j, k] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 203; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_26 = __pyx_v_i; + __pyx_t_28 = __pyx_v_j; + __pyx_t_30 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_26, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_28, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_30, __pyx_pybuffernd_fixed.diminfo[2].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L18; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":205 + * fixed[i, j, k] = top / bot + * else: + * fixed[i, j, k] = f[i, j, k] # <<<<<<<<<<<<<< + * else: + * fixed[i, j, k] = f[i, j, k] + */ + __pyx_t_37 = __pyx_v_i; + __pyx_t_38 = __pyx_v_j; + __pyx_t_39 = __pyx_v_k; + __pyx_t_40 = __pyx_v_i; + __pyx_t_41 = __pyx_v_j; + __pyx_t_42 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_40, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_41, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_42, __pyx_pybuffernd_fixed.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_37, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_38, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_39, __pyx_pybuffernd_f.diminfo[2].strides)); + } + __pyx_L18:; + goto __pyx_L10; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":207 + * fixed[i, j, k] = f[i, j, k] + * else: + * fixed[i, j, k] = f[i, j, k] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_43 = __pyx_v_i; + __pyx_t_44 = __pyx_v_j; + __pyx_t_45 = __pyx_v_k; + __pyx_t_46 = __pyx_v_i; + __pyx_t_47 = __pyx_v_j; + __pyx_t_48 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_46, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_47, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_48, __pyx_pybuffernd_fixed.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_43, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_44, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_45, __pyx_pybuffernd_f.diminfo[2].strides)); + } + __pyx_L10:; + } + } + } + + /* "astropy/convolution/boundary_none.pyx":210 + * + * # Now run the proper convolution + * for i in range(wkx, nx - wkx): # <<<<<<<<<<<<<< + * for j in range(wky, ny - wky): + * for k in range(wkz, nz - wkz): + */ + __pyx_t_12 = (__pyx_v_nx - __pyx_v_wkx); + for (__pyx_t_13 = __pyx_v_wkx; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_i = __pyx_t_13; + + /* "astropy/convolution/boundary_none.pyx":211 + * # Now run the proper convolution + * for i in range(wkx, nx - wkx): + * for j in range(wky, ny - wky): # <<<<<<<<<<<<<< + * for k in range(wkz, nz - wkz): + * if not npy_isnan(fixed[i, j, k]): + */ + __pyx_t_14 = (__pyx_v_ny - __pyx_v_wky); + for (__pyx_t_15 = __pyx_v_wky; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_j = __pyx_t_15; + + /* "astropy/convolution/boundary_none.pyx":212 + * for i in range(wkx, nx - wkx): + * for j in range(wky, ny - wky): + * for k in range(wkz, nz - wkz): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i, j, k]): + * top = 0. + */ + __pyx_t_16 = (__pyx_v_nz - __pyx_v_wkz); + for (__pyx_t_17 = __pyx_v_wkz; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_k = __pyx_t_17; + + /* "astropy/convolution/boundary_none.pyx":213 + * for j in range(wky, ny - wky): + * for k in range(wkz, nz - wkz): + * if not npy_isnan(fixed[i, j, k]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_49 = __pyx_v_i; + __pyx_t_50 = __pyx_v_j; + __pyx_t_51 = __pyx_v_k; + __pyx_t_2 = (!npy_isnan((*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_49, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_50, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_51, __pyx_pybuffernd_fixed.diminfo[2].strides)))); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_none.pyx":214 + * for k in range(wkz, nz - wkz): + * if not npy_isnan(fixed[i, j, k]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_none.pyx":215 + * if not npy_isnan(fixed[i, j, k]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_none.pyx":216 + * top = 0. + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): # <<<<<<<<<<<<<< + * for jj in range(j - wky, j + wky + 1): + * for kk in range(k - wkz, k + wkz + 1): + */ + __pyx_t_25 = ((__pyx_v_i + __pyx_v_wkx) + 1); + for (__pyx_t_52 = (__pyx_v_i - __pyx_v_wkx); __pyx_t_52 < __pyx_t_25; __pyx_t_52+=1) { + __pyx_v_ii = __pyx_t_52; + + /* "astropy/convolution/boundary_none.pyx":217 + * bot = 0. + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): # <<<<<<<<<<<<<< + * for kk in range(k - wkz, k + wkz + 1): + * val = fixed[ii, jj, kk] + */ + __pyx_t_27 = ((__pyx_v_j + __pyx_v_wky) + 1); + for (__pyx_t_53 = (__pyx_v_j - __pyx_v_wky); __pyx_t_53 < __pyx_t_27; __pyx_t_53+=1) { + __pyx_v_jj = __pyx_t_53; + + /* "astropy/convolution/boundary_none.pyx":218 + * for ii in range(i - wkx, i + wkx + 1): + * for jj in range(j - wky, j + wky + 1): + * for kk in range(k - wkz, k + wkz + 1): # <<<<<<<<<<<<<< + * val = fixed[ii, jj, kk] + * ker = g[(wkx + ii - i), + */ + __pyx_t_29 = ((__pyx_v_k + __pyx_v_wkz) + 1); + for (__pyx_t_54 = (__pyx_v_k - __pyx_v_wkz); __pyx_t_54 < __pyx_t_29; __pyx_t_54+=1) { + __pyx_v_kk = __pyx_t_54; + + /* "astropy/convolution/boundary_none.pyx":219 + * for jj in range(j - wky, j + wky + 1): + * for kk in range(k - wkz, k + wkz + 1): + * val = fixed[ii, jj, kk] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j), + */ + __pyx_t_55 = __pyx_v_ii; + __pyx_t_56 = __pyx_v_jj; + __pyx_t_57 = __pyx_v_kk; + __pyx_v_val = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_55, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_56, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_57, __pyx_pybuffernd_fixed.diminfo[2].strides)); + + /* "astropy/convolution/boundary_none.pyx":222 + * ker = g[(wkx + ii - i), + * (wky + jj - j), + * (wkz + kk - k)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_58 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_59 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_t_60 = ((unsigned int)((__pyx_v_wkz + __pyx_v_kk) - __pyx_v_k)); + __pyx_v_ker = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_58, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_59, __pyx_pybuffernd_g.diminfo[1].strides, __pyx_t_60, __pyx_pybuffernd_g.diminfo[2].strides)); + + /* "astropy/convolution/boundary_none.pyx":223 + * (wky + jj - j), + * (wkz + kk - k)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_2 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_none.pyx":224 + * (wkz + kk - k)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_none.pyx":225 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i, j, k] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L32; + } + __pyx_L32:; + } + } + } + + /* "astropy/convolution/boundary_none.pyx":226 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i, j, k] = top / bot + * else: + */ + __pyx_t_2 = (__pyx_v_bot != 0.0); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_none.pyx":227 + * bot += ker + * if bot != 0: + * conv[i, j, k] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i, j, k] = fixed[i, j, k] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 227; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_52 = __pyx_v_i; + __pyx_t_53 = __pyx_v_j; + __pyx_t_54 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_52, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_53, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_54, __pyx_pybuffernd_conv.diminfo[2].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L33; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":229 + * conv[i, j, k] = top / bot + * else: + * conv[i, j, k] = fixed[i, j, k] # <<<<<<<<<<<<<< + * else: + * conv[i, j, k] = fixed[i, j, k] + */ + __pyx_t_61 = __pyx_v_i; + __pyx_t_62 = __pyx_v_j; + __pyx_t_63 = __pyx_v_k; + __pyx_t_64 = __pyx_v_i; + __pyx_t_65 = __pyx_v_j; + __pyx_t_66 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_64, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_65, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_66, __pyx_pybuffernd_conv.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_61, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_62, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_63, __pyx_pybuffernd_fixed.diminfo[2].strides)); + } + __pyx_L33:; + goto __pyx_L25; + } + /*else*/ { + + /* "astropy/convolution/boundary_none.pyx":231 + * conv[i, j, k] = fixed[i, j, k] + * else: + * conv[i, j, k] = fixed[i, j, k] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_67 = __pyx_v_i; + __pyx_t_68 = __pyx_v_j; + __pyx_t_69 = __pyx_v_k; + __pyx_t_70 = __pyx_v_i; + __pyx_t_71 = __pyx_v_j; + __pyx_t_72 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_70, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_71, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_72, __pyx_pybuffernd_conv.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_none_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_67, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_68, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_69, __pyx_pybuffernd_fixed.diminfo[2].strides)); + } + __pyx_L25:; + } + } + } + + /* "astropy/convolution/boundary_none.pyx":233 + * conv[i, j, k] = fixed[i, j, k] + * + * return conv # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_none.convolve3d_boundary_none", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0); + __pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags)); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":194 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fullfill the PEP. + */ + +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_v_copy_shape; + int __pyx_v_i; + int __pyx_v_ndim; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + int __pyx_v_t; + char *__pyx_v_f; + PyArray_Descr *__pyx_v_descr = 0; + int __pyx_v_offset; + int __pyx_v_hasfields; + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + char *__pyx_t_9; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__getbuffer__", 0); + if (__pyx_v_info != NULL) { + __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(__pyx_v_info->obj); + } + + /* "numpy.pxd":200 + * # of flags + * + * if info == NULL: return # <<<<<<<<<<<<<< + * + * cdef int copy_shape, i, ndim + */ + __pyx_t_1 = (__pyx_v_info == NULL); + if (__pyx_t_1) { + __pyx_r = 0; + goto __pyx_L0; + goto __pyx_L3; + } + __pyx_L3:; + + /* "numpy.pxd":203 + * + * cdef int copy_shape, i, ndim + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + */ + __pyx_v_endian_detector = 1; + + /* "numpy.pxd":204 + * cdef int copy_shape, i, ndim + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * + * ndim = PyArray_NDIM(self) + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "numpy.pxd":206 + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); + + /* "numpy.pxd":208 + * ndim = PyArray_NDIM(self) + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * copy_shape = 1 + * else: + */ + __pyx_t_1 = ((sizeof(npy_intp)) != (sizeof(Py_ssize_t))); + if (__pyx_t_1) { + + /* "numpy.pxd":209 + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * copy_shape = 1 # <<<<<<<<<<<<<< + * else: + * copy_shape = 0 + */ + __pyx_v_copy_shape = 1; + goto __pyx_L4; + } + /*else*/ { + + /* "numpy.pxd":211 + * copy_shape = 1 + * else: + * copy_shape = 0 # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + */ + __pyx_v_copy_shape = 0; + } + __pyx_L4:; + + /* "numpy.pxd":213 + * copy_shape = 0 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + __pyx_t_1 = ((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS); + if (__pyx_t_1) { + + /* "numpy.pxd":214 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not C contiguous") + * + */ + __pyx_t_2 = (!PyArray_CHKFLAGS(__pyx_v_self, NPY_C_CONTIGUOUS)); + __pyx_t_3 = __pyx_t_2; + } else { + __pyx_t_3 = __pyx_t_1; + } + if (__pyx_t_3) { + + /* "numpy.pxd":215 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_6), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L5; + } + __pyx_L5:; + + /* "numpy.pxd":217 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + __pyx_t_3 = ((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS); + if (__pyx_t_3) { + + /* "numpy.pxd":218 + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not Fortran contiguous") + * + */ + __pyx_t_1 = (!PyArray_CHKFLAGS(__pyx_v_self, NPY_F_CONTIGUOUS)); + __pyx_t_2 = __pyx_t_1; + } else { + __pyx_t_2 = __pyx_t_3; + } + if (__pyx_t_2) { + + /* "numpy.pxd":219 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_8), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L6; + } + __pyx_L6:; + + /* "numpy.pxd":221 + * raise ValueError(u"ndarray is not Fortran contiguous") + * + * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< + * info.ndim = ndim + * if copy_shape: + */ + __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); + + /* "numpy.pxd":222 + * + * info.buf = PyArray_DATA(self) + * info.ndim = ndim # <<<<<<<<<<<<<< + * if copy_shape: + * # Allocate new buffer for strides and shape info. + */ + __pyx_v_info->ndim = __pyx_v_ndim; + + /* "numpy.pxd":223 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if copy_shape: # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + if (__pyx_v_copy_shape) { + + /* "numpy.pxd":226 + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) # <<<<<<<<<<<<<< + * info.shape = info.strides + ndim + * for i in range(ndim): + */ + __pyx_v_info->strides = ((Py_ssize_t *)malloc((((sizeof(Py_ssize_t)) * ((size_t)__pyx_v_ndim)) * 2))); + + /* "numpy.pxd":227 + * # This is allocated as one block, strides first. + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) + * info.shape = info.strides + ndim # <<<<<<<<<<<<<< + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + */ + __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); + + /* "numpy.pxd":228 + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) + * info.shape = info.strides + ndim + * for i in range(ndim): # <<<<<<<<<<<<<< + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] + */ + __pyx_t_5 = __pyx_v_ndim; + for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { + __pyx_v_i = __pyx_t_6; + + /* "numpy.pxd":229 + * info.shape = info.strides + ndim + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + */ + (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); + + /* "numpy.pxd":230 + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< + * else: + * info.strides = PyArray_STRIDES(self) + */ + (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); + } + goto __pyx_L7; + } + /*else*/ { + + /* "numpy.pxd":232 + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + */ + __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); + + /* "numpy.pxd":233 + * else: + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + */ + __pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self)); + } + __pyx_L7:; + + /* "numpy.pxd":234 + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL # <<<<<<<<<<<<<< + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) + */ + __pyx_v_info->suboffsets = NULL; + + /* "numpy.pxd":235 + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< + * info.readonly = not PyArray_ISWRITEABLE(self) + * + */ + __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); + + /* "numpy.pxd":236 + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< + * + * cdef int t + */ + __pyx_v_info->readonly = (!PyArray_ISWRITEABLE(__pyx_v_self)); + + /* "numpy.pxd":239 + * + * cdef int t + * cdef char* f = NULL # <<<<<<<<<<<<<< + * cdef dtype descr = self.descr + * cdef list stack + */ + __pyx_v_f = NULL; + + /* "numpy.pxd":240 + * cdef int t + * cdef char* f = NULL + * cdef dtype descr = self.descr # <<<<<<<<<<<<<< + * cdef list stack + * cdef int offset + */ + __pyx_t_4 = ((PyObject *)__pyx_v_self->descr); + __Pyx_INCREF(__pyx_t_4); + __pyx_v_descr = ((PyArray_Descr *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "numpy.pxd":244 + * cdef int offset + * + * cdef bint hasfields = PyDataType_HASFIELDS(descr) # <<<<<<<<<<<<<< + * + * if not hasfields and not copy_shape: + */ + __pyx_v_hasfields = PyDataType_HASFIELDS(__pyx_v_descr); + + /* "numpy.pxd":246 + * cdef bint hasfields = PyDataType_HASFIELDS(descr) + * + * if not hasfields and not copy_shape: # <<<<<<<<<<<<<< + * # do not call releasebuffer + * info.obj = None + */ + __pyx_t_2 = (!__pyx_v_hasfields); + if (__pyx_t_2) { + __pyx_t_3 = (!__pyx_v_copy_shape); + __pyx_t_1 = __pyx_t_3; + } else { + __pyx_t_1 = __pyx_t_2; + } + if (__pyx_t_1) { + + /* "numpy.pxd":248 + * if not hasfields and not copy_shape: + * # do not call releasebuffer + * info.obj = None # <<<<<<<<<<<<<< + * else: + * # need to call releasebuffer + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = Py_None; + goto __pyx_L10; + } + /*else*/ { + + /* "numpy.pxd":251 + * else: + * # need to call releasebuffer + * info.obj = self # <<<<<<<<<<<<<< + * + * if not hasfields: + */ + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = ((PyObject *)__pyx_v_self); + } + __pyx_L10:; + + /* "numpy.pxd":253 + * info.obj = self + * + * if not hasfields: # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + __pyx_t_1 = (!__pyx_v_hasfields); + if (__pyx_t_1) { + + /* "numpy.pxd":254 + * + * if not hasfields: + * t = descr.type_num # <<<<<<<<<<<<<< + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + */ + __pyx_t_5 = __pyx_v_descr->type_num; + __pyx_v_t = __pyx_t_5; + + /* "numpy.pxd":255 + * if not hasfields: + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_1 = (__pyx_v_descr->byteorder == '>'); + if (__pyx_t_1) { + __pyx_t_2 = __pyx_v_little_endian; + } else { + __pyx_t_2 = __pyx_t_1; + } + if (!__pyx_t_2) { + + /* "numpy.pxd":256 + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + */ + __pyx_t_1 = (__pyx_v_descr->byteorder == '<'); + if (__pyx_t_1) { + __pyx_t_3 = (!__pyx_v_little_endian); + __pyx_t_7 = __pyx_t_3; + } else { + __pyx_t_7 = __pyx_t_1; + } + __pyx_t_1 = __pyx_t_7; + } else { + __pyx_t_1 = __pyx_t_2; + } + if (__pyx_t_1) { + + /* "numpy.pxd":257 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_10), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L12; + } + __pyx_L12:; + + /* "numpy.pxd":258 + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + */ + __pyx_t_1 = (__pyx_v_t == NPY_BYTE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__b; + goto __pyx_L13; + } + + /* "numpy.pxd":259 + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + */ + __pyx_t_1 = (__pyx_v_t == NPY_UBYTE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__B; + goto __pyx_L13; + } + + /* "numpy.pxd":260 + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + */ + __pyx_t_1 = (__pyx_v_t == NPY_SHORT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__h; + goto __pyx_L13; + } + + /* "numpy.pxd":261 + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + */ + __pyx_t_1 = (__pyx_v_t == NPY_USHORT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__H; + goto __pyx_L13; + } + + /* "numpy.pxd":262 + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + */ + __pyx_t_1 = (__pyx_v_t == NPY_INT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__i; + goto __pyx_L13; + } + + /* "numpy.pxd":263 + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + */ + __pyx_t_1 = (__pyx_v_t == NPY_UINT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__I; + goto __pyx_L13; + } + + /* "numpy.pxd":264 + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__l; + goto __pyx_L13; + } + + /* "numpy.pxd":265 + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + */ + __pyx_t_1 = (__pyx_v_t == NPY_ULONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__L; + goto __pyx_L13; + } + + /* "numpy.pxd":266 + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONGLONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__q; + goto __pyx_L13; + } + + /* "numpy.pxd":267 + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + */ + __pyx_t_1 = (__pyx_v_t == NPY_ULONGLONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Q; + goto __pyx_L13; + } + + /* "numpy.pxd":268 + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + */ + __pyx_t_1 = (__pyx_v_t == NPY_FLOAT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__f; + goto __pyx_L13; + } + + /* "numpy.pxd":269 + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + */ + __pyx_t_1 = (__pyx_v_t == NPY_DOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__d; + goto __pyx_L13; + } + + /* "numpy.pxd":270 + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONGDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__g; + goto __pyx_L13; + } + + /* "numpy.pxd":271 + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + */ + __pyx_t_1 = (__pyx_v_t == NPY_CFLOAT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zf; + goto __pyx_L13; + } + + /* "numpy.pxd":272 + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" + */ + __pyx_t_1 = (__pyx_v_t == NPY_CDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zd; + goto __pyx_L13; + } + + /* "numpy.pxd":273 + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f = "O" + * else: + */ + __pyx_t_1 = (__pyx_v_t == NPY_CLONGDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zg; + goto __pyx_L13; + } + + /* "numpy.pxd":274 + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_1 = (__pyx_v_t == NPY_OBJECT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__O; + goto __pyx_L13; + } + /*else*/ { + + /* "numpy.pxd":276 + * elif t == NPY_OBJECT: f = "O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * info.format = f + * return + */ + __pyx_t_4 = PyInt_FromLong(__pyx_v_t); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_8 = PyNumber_Remainder(((PyObject *)__pyx_kp_u_11), __pyx_t_4); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_8)); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)__pyx_t_8)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_8)); + __pyx_t_8 = 0; + __pyx_t_8 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_L13:; + + /* "numpy.pxd":277 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f # <<<<<<<<<<<<<< + * return + * else: + */ + __pyx_v_info->format = __pyx_v_f; + + /* "numpy.pxd":278 + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f + * return # <<<<<<<<<<<<<< + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) + */ + __pyx_r = 0; + goto __pyx_L0; + goto __pyx_L11; + } + /*else*/ { + + /* "numpy.pxd":280 + * return + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + */ + __pyx_v_info->format = ((char *)malloc(255)); + + /* "numpy.pxd":281 + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, + */ + (__pyx_v_info->format[0]) = '^'; + + /* "numpy.pxd":282 + * info.format = stdlib.malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 # <<<<<<<<<<<<<< + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + */ + __pyx_v_offset = 0; + + /* "numpy.pxd":285 + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + * &offset) # <<<<<<<<<<<<<< + * f[0] = c'\0' # Terminate format string + * + */ + __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 255), (&__pyx_v_offset)); if (unlikely(__pyx_t_9 == NULL)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 283; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_v_f = __pyx_t_9; + + /* "numpy.pxd":286 + * info.format + _buffer_format_string_len, + * &offset) + * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + */ + (__pyx_v_f[0]) = '\x00'; + } + __pyx_L11:; + + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.ndarray.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + if (__pyx_v_info != NULL && __pyx_v_info->obj != NULL) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = NULL; + } + goto __pyx_L2; + __pyx_L0:; + if (__pyx_v_info != NULL && __pyx_v_info->obj == Py_None) { + __Pyx_GOTREF(Py_None); + __Pyx_DECREF(Py_None); __pyx_v_info->obj = NULL; + } + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_descr); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0); + __pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info)); + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":288 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) + */ + +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__releasebuffer__", 0); + + /* "numpy.pxd":289 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_t_1 = PyArray_HASFIELDS(__pyx_v_self); + if (__pyx_t_1) { + + /* "numpy.pxd":290 + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * stdlib.free(info.strides) + */ + free(__pyx_v_info->format); + goto __pyx_L3; + } + __pyx_L3:; + + /* "numpy.pxd":291 + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * stdlib.free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + __pyx_t_1 = ((sizeof(npy_intp)) != (sizeof(Py_ssize_t))); + if (__pyx_t_1) { + + /* "numpy.pxd":292 + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * stdlib.free(info.strides) # <<<<<<<<<<<<<< + * # info.shape was stored after info.strides in the same block + * + */ + free(__pyx_v_info->strides); + goto __pyx_L4; + } + __pyx_L4:; + + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":768 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); + + /* "numpy.pxd":769 + * + * cdef inline object PyArray_MultiIterNew1(a): + * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew2(a, b): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 769; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":771 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); + + /* "numpy.pxd":772 + * + * cdef inline object PyArray_MultiIterNew2(a, b): + * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 772; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":774 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); + + /* "numpy.pxd":775 + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 775; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":777 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); + + /* "numpy.pxd":778 + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 778; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":780 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); + + /* "numpy.pxd":781 + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 781; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":783 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) { + PyArray_Descr *__pyx_v_child = 0; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + PyObject *__pyx_v_fields = 0; + PyObject *__pyx_v_childname = NULL; + PyObject *__pyx_v_new_offset = NULL; + PyObject *__pyx_v_t = NULL; + char *__pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *(*__pyx_t_6)(PyObject *); + int __pyx_t_7; + int __pyx_t_8; + int __pyx_t_9; + int __pyx_t_10; + long __pyx_t_11; + char *__pyx_t_12; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_util_dtypestring", 0); + + /* "numpy.pxd":790 + * cdef int delta_offset + * cdef tuple i + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * cdef tuple fields + */ + __pyx_v_endian_detector = 1; + + /* "numpy.pxd":791 + * cdef tuple i + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * cdef tuple fields + * + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "numpy.pxd":794 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + if (unlikely(((PyObject *)__pyx_v_descr->names) == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_1 = ((PyObject *)__pyx_v_descr->names); __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_COMPILING_IN_CPYTHON + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + __Pyx_XDECREF(__pyx_v_childname); + __pyx_v_childname = __pyx_t_3; + __pyx_t_3 = 0; + + /* "numpy.pxd":795 + * + * for childname in descr.names: + * fields = descr.fields[childname] # <<<<<<<<<<<<<< + * child, new_offset = fields + * + */ + __pyx_t_3 = PyObject_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (!__pyx_t_3) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 795; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected tuple, got %.200s", Py_TYPE(__pyx_t_3)->tp_name), 0))) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 795; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_XDECREF(((PyObject *)__pyx_v_fields)); + __pyx_v_fields = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "numpy.pxd":796 + * for childname in descr.names: + * fields = descr.fields[childname] + * child, new_offset = fields # <<<<<<<<<<<<<< + * + * if (end - f) - (new_offset - offset[0]) < 15: + */ + if (likely(PyTuple_CheckExact(((PyObject *)__pyx_v_fields)))) { + PyObject* sequence = ((PyObject *)__pyx_v_fields); + #if CYTHON_COMPILING_IN_CPYTHON + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #if CYTHON_COMPILING_IN_CPYTHON + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + #endif + } else if (1) { + __Pyx_RaiseNoneNotIterableError(); {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else + { + Py_ssize_t index = -1; + __pyx_t_5 = PyObject_GetIter(((PyObject *)__pyx_v_fields)); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = Py_TYPE(__pyx_t_5)->tp_iternext; + index = 0; __pyx_t_3 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_3)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + index = 1; __pyx_t_4 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_4)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_6(__pyx_t_5), 2) < 0) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_6 = NULL; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L6_unpacking_done; + __pyx_L5_unpacking_failed:; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_6 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_L6_unpacking_done:; + } + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_XDECREF(((PyObject *)__pyx_v_child)); + __pyx_v_child = ((PyArray_Descr *)__pyx_t_3); + __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_v_new_offset); + __pyx_v_new_offset = __pyx_t_4; + __pyx_t_4 = 0; + + /* "numpy.pxd":798 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + __pyx_t_4 = PyInt_FromLong((__pyx_v_end - __pyx_v_f)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyInt_FromLong((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_3); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyNumber_Subtract(__pyx_t_4, __pyx_t_5); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = PyObject_RichCompare(__pyx_t_3, __pyx_int_15, Py_LT); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + + /* "numpy.pxd":799 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_RuntimeError, ((PyObject *)__pyx_k_tuple_13), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L7; + } + __pyx_L7:; + + /* "numpy.pxd":801 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_7 = (__pyx_v_child->byteorder == '>'); + if (__pyx_t_7) { + __pyx_t_8 = __pyx_v_little_endian; + } else { + __pyx_t_8 = __pyx_t_7; + } + if (!__pyx_t_8) { + + /* "numpy.pxd":802 + * + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * # One could encode it in the format string and have Cython + */ + __pyx_t_7 = (__pyx_v_child->byteorder == '<'); + if (__pyx_t_7) { + __pyx_t_9 = (!__pyx_v_little_endian); + __pyx_t_10 = __pyx_t_9; + } else { + __pyx_t_10 = __pyx_t_7; + } + __pyx_t_7 = __pyx_t_10; + } else { + __pyx_t_7 = __pyx_t_8; + } + if (__pyx_t_7) { + + /* "numpy.pxd":803 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_14), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L8; + } + __pyx_L8:; + + /* "numpy.pxd":813 + * + * # Output padding bytes + * while offset[0] < new_offset: # <<<<<<<<<<<<<< + * f[0] = 120 # "x"; pad byte + * f += 1 + */ + while (1) { + __pyx_t_5 = PyInt_FromLong((__pyx_v_offset[0])); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_5, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!__pyx_t_7) break; + + /* "numpy.pxd":814 + * # Output padding bytes + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< + * f += 1 + * offset[0] += 1 + */ + (__pyx_v_f[0]) = 120; + + /* "numpy.pxd":815 + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte + * f += 1 # <<<<<<<<<<<<<< + * offset[0] += 1 + * + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "numpy.pxd":816 + * f[0] = 120 # "x"; pad byte + * f += 1 + * offset[0] += 1 # <<<<<<<<<<<<<< + * + * offset[0] += child.itemsize + */ + __pyx_t_11 = 0; + (__pyx_v_offset[__pyx_t_11]) = ((__pyx_v_offset[__pyx_t_11]) + 1); + } + + /* "numpy.pxd":818 + * offset[0] += 1 + * + * offset[0] += child.itemsize # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(child): + */ + __pyx_t_11 = 0; + (__pyx_v_offset[__pyx_t_11]) = ((__pyx_v_offset[__pyx_t_11]) + __pyx_v_child->elsize); + + /* "numpy.pxd":820 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + __pyx_t_7 = (!PyDataType_HASFIELDS(__pyx_v_child)); + if (__pyx_t_7) { + + /* "numpy.pxd":821 + * + * if not PyDataType_HASFIELDS(child): + * t = child.type_num # <<<<<<<<<<<<<< + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") + */ + __pyx_t_3 = PyInt_FromLong(__pyx_v_child->type_num); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 821; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_v_t); + __pyx_v_t = __pyx_t_3; + __pyx_t_3 = 0; + + /* "numpy.pxd":822 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + __pyx_t_7 = ((__pyx_v_end - __pyx_v_f) < 5); + if (__pyx_t_7) { + + /* "numpy.pxd":823 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_t_3 = PyObject_Call(__pyx_builtin_RuntimeError, ((PyObject *)__pyx_k_tuple_16), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L12; + } + __pyx_L12:; + + /* "numpy.pxd":826 + * + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + */ + __pyx_t_3 = PyInt_FromLong(NPY_BYTE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 98; + goto __pyx_L13; + } + + /* "numpy.pxd":827 + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + */ + __pyx_t_5 = PyInt_FromLong(NPY_UBYTE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 66; + goto __pyx_L13; + } + + /* "numpy.pxd":828 + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + */ + __pyx_t_3 = PyInt_FromLong(NPY_SHORT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 104; + goto __pyx_L13; + } + + /* "numpy.pxd":829 + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + */ + __pyx_t_5 = PyInt_FromLong(NPY_USHORT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 72; + goto __pyx_L13; + } + + /* "numpy.pxd":830 + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + */ + __pyx_t_3 = PyInt_FromLong(NPY_INT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 105; + goto __pyx_L13; + } + + /* "numpy.pxd":831 + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + */ + __pyx_t_5 = PyInt_FromLong(NPY_UINT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 73; + goto __pyx_L13; + } + + /* "numpy.pxd":832 + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONG); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 108; + goto __pyx_L13; + } + + /* "numpy.pxd":833 + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + */ + __pyx_t_5 = PyInt_FromLong(NPY_ULONG); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 76; + goto __pyx_L13; + } + + /* "numpy.pxd":834 + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONGLONG); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 113; + goto __pyx_L13; + } + + /* "numpy.pxd":835 + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + */ + __pyx_t_5 = PyInt_FromLong(NPY_ULONGLONG); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 81; + goto __pyx_L13; + } + + /* "numpy.pxd":836 + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + */ + __pyx_t_3 = PyInt_FromLong(NPY_FLOAT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 102; + goto __pyx_L13; + } + + /* "numpy.pxd":837 + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + */ + __pyx_t_5 = PyInt_FromLong(NPY_DOUBLE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 100; + goto __pyx_L13; + } + + /* "numpy.pxd":838 + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 103; + goto __pyx_L13; + } + + /* "numpy.pxd":839 + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + */ + __pyx_t_5 = PyInt_FromLong(NPY_CFLOAT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 102; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":840 + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" + */ + __pyx_t_3 = PyInt_FromLong(NPY_CDOUBLE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 100; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":841 + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + */ + __pyx_t_5 = PyInt_FromLong(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 103; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":842 + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_3 = PyInt_FromLong(NPY_OBJECT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 79; + goto __pyx_L13; + } + /*else*/ { + + /* "numpy.pxd":844 + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * f += 1 + * else: + */ + __pyx_t_5 = PyNumber_Remainder(((PyObject *)__pyx_kp_u_11), __pyx_v_t); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_L13:; + + /* "numpy.pxd":845 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * f += 1 # <<<<<<<<<<<<<< + * else: + * # Cython ignores struct boundary information ("T{...}"), + */ + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L11; + } + /*else*/ { + + /* "numpy.pxd":849 + * # Cython ignores struct boundary information ("T{...}"), + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< + * return f + * + */ + __pyx_t_12 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_12 == NULL)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 849; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_v_f = __pyx_t_12; + } + __pyx_L11:; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "numpy.pxd":850 + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) + * return f # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = __pyx_v_f; + goto __pyx_L0; + + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("numpy._util_dtypestring", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_child); + __Pyx_XDECREF(__pyx_v_fields); + __Pyx_XDECREF(__pyx_v_childname); + __Pyx_XDECREF(__pyx_v_new_offset); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":965 + * + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * cdef PyObject* baseptr + * if base is None: + */ + +static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_arr, PyObject *__pyx_v_base) { + PyObject *__pyx_v_baseptr; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("set_array_base", 0); + + /* "numpy.pxd":967 + * cdef inline void set_array_base(ndarray arr, object base): + * cdef PyObject* baseptr + * if base is None: # <<<<<<<<<<<<<< + * baseptr = NULL + * else: + */ + __pyx_t_1 = (__pyx_v_base == Py_None); + if (__pyx_t_1) { + + /* "numpy.pxd":968 + * cdef PyObject* baseptr + * if base is None: + * baseptr = NULL # <<<<<<<<<<<<<< + * else: + * Py_INCREF(base) # important to do this before decref below! + */ + __pyx_v_baseptr = NULL; + goto __pyx_L3; + } + /*else*/ { + + /* "numpy.pxd":970 + * baseptr = NULL + * else: + * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< + * baseptr = base + * Py_XDECREF(arr.base) + */ + Py_INCREF(__pyx_v_base); + + /* "numpy.pxd":971 + * else: + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base # <<<<<<<<<<<<<< + * Py_XDECREF(arr.base) + * arr.base = baseptr + */ + __pyx_v_baseptr = ((PyObject *)__pyx_v_base); + } + __pyx_L3:; + + /* "numpy.pxd":972 + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base + * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< + * arr.base = baseptr + * + */ + Py_XDECREF(__pyx_v_arr->base); + + /* "numpy.pxd":973 + * baseptr = base + * Py_XDECREF(arr.base) + * arr.base = baseptr # <<<<<<<<<<<<<< + * + * cdef inline object get_array_base(ndarray arr): + */ + __pyx_v_arr->base = __pyx_v_baseptr; + + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":975 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__pyx_v_arr) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("get_array_base", 0); + + /* "numpy.pxd":976 + * + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: # <<<<<<<<<<<<<< + * return None + * else: + */ + __pyx_t_1 = (__pyx_v_arr->base == NULL); + if (__pyx_t_1) { + + /* "numpy.pxd":977 + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: + * return None # <<<<<<<<<<<<<< + * else: + * return arr.base + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L0; + goto __pyx_L3; + } + /*else*/ { + + /* "numpy.pxd":979 + * return None + * else: + * return arr.base # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_arr->base)); + __pyx_r = ((PyObject *)__pyx_v_arr->base); + goto __pyx_L0; + } + __pyx_L3:; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef __pyx_moduledef = { + #if PY_VERSION_HEX < 0x03020000 + { PyObject_HEAD_INIT(NULL) NULL, 0, NULL }, + #else + PyModuleDef_HEAD_INIT, + #endif + __Pyx_NAMESTR("boundary_none"), + 0, /* m_doc */ + -1, /* m_size */ + __pyx_methods /* m_methods */, + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_s_1, __pyx_k_1, sizeof(__pyx_k_1), 0, 0, 1, 0}, + {&__pyx_kp_u_11, __pyx_k_11, sizeof(__pyx_k_11), 0, 1, 0, 0}, + {&__pyx_kp_u_12, __pyx_k_12, sizeof(__pyx_k_12), 0, 1, 0, 0}, + {&__pyx_kp_u_15, __pyx_k_15, sizeof(__pyx_k_15), 0, 1, 0, 0}, + {&__pyx_n_s_19, __pyx_k_19, sizeof(__pyx_k_19), 0, 0, 1, 1}, + {&__pyx_kp_s_20, __pyx_k_20, sizeof(__pyx_k_20), 0, 0, 1, 0}, + {&__pyx_n_s_21, __pyx_k_21, sizeof(__pyx_k_21), 0, 0, 1, 1}, + {&__pyx_n_s_24, __pyx_k_24, sizeof(__pyx_k_24), 0, 0, 1, 1}, + {&__pyx_n_s_27, __pyx_k_27, sizeof(__pyx_k_27), 0, 0, 1, 1}, + {&__pyx_kp_u_5, __pyx_k_5, sizeof(__pyx_k_5), 0, 1, 0, 0}, + {&__pyx_kp_u_7, __pyx_k_7, sizeof(__pyx_k_7), 0, 1, 0, 0}, + {&__pyx_kp_u_9, __pyx_k_9, sizeof(__pyx_k_9), 0, 1, 0, 0}, + {&__pyx_n_s__DTYPE, __pyx_k__DTYPE, sizeof(__pyx_k__DTYPE), 0, 0, 1, 1}, + {&__pyx_n_s__RuntimeError, __pyx_k__RuntimeError, sizeof(__pyx_k__RuntimeError), 0, 0, 1, 1}, + {&__pyx_n_s__ValueError, __pyx_k__ValueError, sizeof(__pyx_k__ValueError), 0, 0, 1, 1}, + {&__pyx_n_s____main__, __pyx_k____main__, sizeof(__pyx_k____main__), 0, 0, 1, 1}, + {&__pyx_n_s____test__, __pyx_k____test__, sizeof(__pyx_k____test__), 0, 0, 1, 1}, + {&__pyx_n_s__bot, __pyx_k__bot, sizeof(__pyx_k__bot), 0, 0, 1, 1}, + {&__pyx_n_s__conv, __pyx_k__conv, sizeof(__pyx_k__conv), 0, 0, 1, 1}, + {&__pyx_n_s__dtype, __pyx_k__dtype, sizeof(__pyx_k__dtype), 0, 0, 1, 1}, + {&__pyx_n_s__f, __pyx_k__f, sizeof(__pyx_k__f), 0, 0, 1, 1}, + {&__pyx_n_s__fixed, __pyx_k__fixed, sizeof(__pyx_k__fixed), 0, 0, 1, 1}, + {&__pyx_n_s__float, __pyx_k__float, sizeof(__pyx_k__float), 0, 0, 1, 1}, + {&__pyx_n_s__g, __pyx_k__g, sizeof(__pyx_k__g), 0, 0, 1, 1}, + {&__pyx_n_s__i, __pyx_k__i, sizeof(__pyx_k__i), 0, 0, 1, 1}, + {&__pyx_n_s__ii, __pyx_k__ii, sizeof(__pyx_k__ii), 0, 0, 1, 1}, + {&__pyx_n_s__iimax, __pyx_k__iimax, sizeof(__pyx_k__iimax), 0, 0, 1, 1}, + {&__pyx_n_s__iimin, __pyx_k__iimin, sizeof(__pyx_k__iimin), 0, 0, 1, 1}, + {&__pyx_n_s__j, __pyx_k__j, sizeof(__pyx_k__j), 0, 0, 1, 1}, + {&__pyx_n_s__jj, __pyx_k__jj, sizeof(__pyx_k__jj), 0, 0, 1, 1}, + {&__pyx_n_s__jjmax, __pyx_k__jjmax, sizeof(__pyx_k__jjmax), 0, 0, 1, 1}, + {&__pyx_n_s__jjmin, __pyx_k__jjmin, sizeof(__pyx_k__jjmin), 0, 0, 1, 1}, + {&__pyx_n_s__k, __pyx_k__k, sizeof(__pyx_k__k), 0, 0, 1, 1}, + {&__pyx_n_s__ker, __pyx_k__ker, sizeof(__pyx_k__ker), 0, 0, 1, 1}, + {&__pyx_n_s__kk, __pyx_k__kk, sizeof(__pyx_k__kk), 0, 0, 1, 1}, + {&__pyx_n_s__kkmax, __pyx_k__kkmax, sizeof(__pyx_k__kkmax), 0, 0, 1, 1}, + {&__pyx_n_s__kkmin, __pyx_k__kkmin, sizeof(__pyx_k__kkmin), 0, 0, 1, 1}, + {&__pyx_n_s__nkx, __pyx_k__nkx, sizeof(__pyx_k__nkx), 0, 0, 1, 1}, + {&__pyx_n_s__nky, __pyx_k__nky, sizeof(__pyx_k__nky), 0, 0, 1, 1}, + {&__pyx_n_s__nkz, __pyx_k__nkz, sizeof(__pyx_k__nkz), 0, 0, 1, 1}, + {&__pyx_n_s__np, __pyx_k__np, sizeof(__pyx_k__np), 0, 0, 1, 1}, + {&__pyx_n_s__numpy, __pyx_k__numpy, sizeof(__pyx_k__numpy), 0, 0, 1, 1}, + {&__pyx_n_s__nx, __pyx_k__nx, sizeof(__pyx_k__nx), 0, 0, 1, 1}, + {&__pyx_n_s__ny, __pyx_k__ny, sizeof(__pyx_k__ny), 0, 0, 1, 1}, + {&__pyx_n_s__nz, __pyx_k__nz, sizeof(__pyx_k__nz), 0, 0, 1, 1}, + {&__pyx_n_s__range, __pyx_k__range, sizeof(__pyx_k__range), 0, 0, 1, 1}, + {&__pyx_n_s__top, __pyx_k__top, sizeof(__pyx_k__top), 0, 0, 1, 1}, + {&__pyx_n_s__val, __pyx_k__val, sizeof(__pyx_k__val), 0, 0, 1, 1}, + {&__pyx_n_s__wkx, __pyx_k__wkx, sizeof(__pyx_k__wkx), 0, 0, 1, 1}, + {&__pyx_n_s__wky, __pyx_k__wky, sizeof(__pyx_k__wky), 0, 0, 1, 1}, + {&__pyx_n_s__wkz, __pyx_k__wkz, sizeof(__pyx_k__wkz), 0, 0, 1, 1}, + {&__pyx_n_s__zeros, __pyx_k__zeros, sizeof(__pyx_k__zeros), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_ValueError = __Pyx_GetName(__pyx_b, __pyx_n_s__ValueError); if (!__pyx_builtin_ValueError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_builtin_range = __Pyx_GetName(__pyx_b, __pyx_n_s__range); if (!__pyx_builtin_range) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_builtin_RuntimeError = __Pyx_GetName(__pyx_b, __pyx_n_s__RuntimeError); if (!__pyx_builtin_RuntimeError) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "astropy/convolution/boundary_none.pyx":20 + * + * if g.shape[0] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_2 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_2); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_2)); + + /* "astropy/convolution/boundary_none.pyx":84 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_3 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_3); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_3)); + + /* "astropy/convolution/boundary_none.pyx":158 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_4 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 158; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_4); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_4)); + + /* "numpy.pxd":215 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_k_tuple_6 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_5)); if (unlikely(!__pyx_k_tuple_6)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_6); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_6)); + + /* "numpy.pxd":219 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_k_tuple_8 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_7)); if (unlikely(!__pyx_k_tuple_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_8); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_8)); + + /* "numpy.pxd":257 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_k_tuple_10 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_9)); if (unlikely(!__pyx_k_tuple_10)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_10); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_10)); + + /* "numpy.pxd":799 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_k_tuple_13 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_12)); if (unlikely(!__pyx_k_tuple_13)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_13); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_13)); + + /* "numpy.pxd":803 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_k_tuple_14 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_9)); if (unlikely(!__pyx_k_tuple_14)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_14); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_14)); + + /* "numpy.pxd":823 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_k_tuple_16 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_15)); if (unlikely(!__pyx_k_tuple_16)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_16); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_16)); + + /* "astropy/convolution/boundary_none.pyx":16 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_none(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g): + * + */ + __pyx_k_tuple_17 = PyTuple_Pack(15, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_17)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_17); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_17)); + __pyx_k_codeobj_18 = (PyObject*)__Pyx_PyCode_New(2, 0, 15, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_19, 16, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_18)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + + /* "astropy/convolution/boundary_none.pyx":80 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_none(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g): + * + */ + __pyx_k_tuple_22 = PyTuple_Pack(22, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__ny), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__nky), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__wky), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__j), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__jj), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__jjmin), ((PyObject *)__pyx_n_s__jjmax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_22)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 80; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_22); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_22)); + __pyx_k_codeobj_23 = (PyObject*)__Pyx_PyCode_New(2, 0, 22, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_24, 80, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_23)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 80; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + + /* "astropy/convolution/boundary_none.pyx":154 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_none(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g): + * + */ + __pyx_k_tuple_25 = PyTuple_Pack(29, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__ny), ((PyObject *)__pyx_n_s__nz), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__nky), ((PyObject *)__pyx_n_s__nkz), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__wky), ((PyObject *)__pyx_n_s__wkz), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__j), ((PyObject *)__pyx_n_s__k), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__jj), ((PyObject *)__pyx_n_s__kk), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__jjmin), ((PyObject *)__pyx_n_s__jjmax), ((PyObject *)__pyx_n_s__kkmin), ((PyObject *)__pyx_n_s__kkmax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_25)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 154; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_25); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_25)); + __pyx_k_codeobj_26 = (PyObject*)__Pyx_PyCode_New(2, 0, 29, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_27, 154, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_26)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 154; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + __pyx_int_15 = PyInt_FromLong(15); if (unlikely(!__pyx_int_15)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + return 0; + __pyx_L1_error:; + return -1; +} + +#if PY_MAJOR_VERSION < 3 +PyMODINIT_FUNC initboundary_none(void); /*proto*/ +PyMODINIT_FUNC initboundary_none(void) +#else +PyMODINIT_FUNC PyInit_boundary_none(void); /*proto*/ +PyMODINIT_FUNC PyInit_boundary_none(void) +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_REFNANNY + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); + if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); + } + #endif + __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit_boundary_none(void)", 0); + if ( __Pyx_check_binary_version() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #ifdef __Pyx_CyFunction_USED + if (__Pyx_CyFunction_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4(__Pyx_NAMESTR("boundary_none"), __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (!PyDict_GetItemString(modules, "astropy.convolution.boundary_none")) { + if (unlikely(PyDict_SetItemString(modules, "astropy.convolution.boundary_none", __pyx_m) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + } + #endif + __pyx_b = PyImport_AddModule(__Pyx_NAMESTR(__Pyx_BUILTIN_MODULE_NAME)); if (unlikely(!__pyx_b)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (__Pyx_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + /*--- Initialize various global constants etc. ---*/ + if (unlikely(__Pyx_InitGlobals() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (__pyx_module_is_main_astropy__convolution__boundary_none) { + if (__Pyx_SetAttrString(__pyx_m, "__name__", __pyx_n_s____main__) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + } + /*--- Builtin init code ---*/ + if (unlikely(__Pyx_InitCachedBuiltins() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Constants init code ---*/ + if (unlikely(__Pyx_InitCachedConstants() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Global init code ---*/ + /*--- Variable export code ---*/ + /*--- Function export code ---*/ + /*--- Type init code ---*/ + /*--- Type import code ---*/ + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", + #if CYTHON_COMPILING_IN_PYPY + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 9; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_dtype = __Pyx_ImportType("numpy", "dtype", sizeof(PyArray_Descr), 0); if (unlikely(!__pyx_ptype_5numpy_dtype)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 155; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_flatiter = __Pyx_ImportType("numpy", "flatiter", sizeof(PyArrayIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_flatiter)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 165; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_broadcast = __Pyx_ImportType("numpy", "broadcast", sizeof(PyArrayMultiIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_broadcast)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 169; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_ndarray = __Pyx_ImportType("numpy", "ndarray", sizeof(PyArrayObject), 0); if (unlikely(!__pyx_ptype_5numpy_ndarray)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 178; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_ufunc = __Pyx_ImportType("numpy", "ufunc", sizeof(PyUFuncObject), 0); if (unlikely(!__pyx_ptype_5numpy_ufunc)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 861; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Variable import code ---*/ + /*--- Function import code ---*/ + /*--- Execution code ---*/ + + /* "astropy/convolution/boundary_none.pyx":3 + * # Licensed under a 3-clause BSD style license - see LICENSE.rst + * from __future__ import division + * import numpy as np # <<<<<<<<<<<<<< + * cimport numpy as np + * + */ + __pyx_t_1 = __Pyx_Import(((PyObject *)__pyx_n_s__numpy), 0, -1); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s__np, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "astropy/convolution/boundary_none.pyx":6 + * cimport numpy as np + * + * DTYPE = np.float # <<<<<<<<<<<<<< + * ctypedef np.float_t DTYPE_t + * + */ + __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__float); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (PyObject_SetAttr(__pyx_m, __pyx_n_s__DTYPE, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_none.pyx":16 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_none(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g): + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_13boundary_none_1convolve1d_boundary_none, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_19, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_none.pyx":80 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_none(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g): + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_13boundary_none_3convolve2d_boundary_none, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 80; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_24, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 80; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_none.pyx":154 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_none(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g): + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_13boundary_none_5convolve3d_boundary_none, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 154; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_27, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 154; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_none.pyx":1 + * # Licensed under a 3-clause BSD style license - see LICENSE.rst # <<<<<<<<<<<<<< + * from __future__ import division + * import numpy as np + */ + __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_2)); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s____test__, ((PyObject *)__pyx_t_2)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + + /* "numpy.pxd":975 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + if (__pyx_m) { + __Pyx_AddTraceback("init astropy.convolution.boundary_none", __pyx_clineno, __pyx_lineno, __pyx_filename); + Py_DECREF(__pyx_m); __pyx_m = 0; + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init astropy.convolution.boundary_none"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if PY_MAJOR_VERSION < 3 + return; + #else + return __pyx_m; + #endif +} + +/* Runtime support code */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule((char *)modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif /* CYTHON_REFNANNY */ + +static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name) { + PyObject *result; + result = PyObject_GetAttr(dict, name); + if (!result) { + if (dict != __pyx_b) { + PyErr_Clear(); + result = PyObject_GetAttr(__pyx_b, name); + } + if (!result) { + PyErr_SetObject(PyExc_NameError, name); + } + } + return result; +} + +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%s() takes %s %" CYTHON_FORMAT_SSIZE_T "d positional argument%s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%s() got an unexpected keyword argument '%s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact) +{ + if (!type) { + PyErr_Format(PyExc_SystemError, "Missing type object"); + return 0; + } + if (none_allowed && obj == Py_None) return 1; + else if (exact) { + if (Py_TYPE(obj) == type) return 1; + } + else { + if (PyObject_TypeCheck(obj, type)) return 1; + } + PyErr_Format(PyExc_TypeError, + "Argument '%s' has incorrect type (expected %s, got %s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); + return 0; +} + +static CYTHON_INLINE int __Pyx_IsLittleEndian(void) { + unsigned int n = 1; + return *(unsigned char*)(&n) != 0; +} +static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx, + __Pyx_BufFmt_StackElem* stack, + __Pyx_TypeInfo* type) { + stack[0].field = &ctx->root; + stack[0].parent_offset = 0; + ctx->root.type = type; + ctx->root.name = "buffer dtype"; + ctx->root.offset = 0; + ctx->head = stack; + ctx->head->field = &ctx->root; + ctx->fmt_offset = 0; + ctx->head->parent_offset = 0; + ctx->new_packmode = '@'; + ctx->enc_packmode = '@'; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->is_complex = 0; + ctx->is_valid_array = 0; + ctx->struct_alignment = 0; + while (type->typegroup == 'S') { + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = 0; + type = type->fields->type; + } +} +static int __Pyx_BufFmt_ParseNumber(const char** ts) { + int count; + const char* t = *ts; + if (*t < '0' || *t > '9') { + return -1; + } else { + count = *t++ - '0'; + while (*t >= '0' && *t < '9') { + count *= 10; + count += *t++ - '0'; + } + } + *ts = t; + return count; +} +static int __Pyx_BufFmt_ExpectNumber(const char **ts) { + int number = __Pyx_BufFmt_ParseNumber(ts); + if (number == -1) /* First char was not a digit */ + PyErr_Format(PyExc_ValueError,\ + "Does not understand character buffer dtype format string ('%c')", **ts); + return number; +} +static void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) { + PyErr_Format(PyExc_ValueError, + "Unexpected format string character: '%c'", ch); +} +static const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) { + switch (ch) { + case 'c': return "'char'"; + case 'b': return "'signed char'"; + case 'B': return "'unsigned char'"; + case 'h': return "'short'"; + case 'H': return "'unsigned short'"; + case 'i': return "'int'"; + case 'I': return "'unsigned int'"; + case 'l': return "'long'"; + case 'L': return "'unsigned long'"; + case 'q': return "'long long'"; + case 'Q': return "'unsigned long long'"; + case 'f': return (is_complex ? "'complex float'" : "'float'"); + case 'd': return (is_complex ? "'complex double'" : "'double'"); + case 'g': return (is_complex ? "'complex long double'" : "'long double'"); + case 'T': return "a struct"; + case 'O': return "Python object"; + case 'P': return "a pointer"; + case 's': case 'p': return "a string"; + case 0: return "end"; + default: return "unparseable format string"; + } +} +static size_t __Pyx_BufFmt_TypeCharToStandardSize(char ch, int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return 2; + case 'i': case 'I': case 'l': case 'L': return 4; + case 'q': case 'Q': return 8; + case 'f': return (is_complex ? 8 : 4); + case 'd': return (is_complex ? 16 : 8); + case 'g': { + PyErr_SetString(PyExc_ValueError, "Python does not define a standard format string size for long double ('g').."); + return 0; + } + case 'O': case 'P': return sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +static size_t __Pyx_BufFmt_TypeCharToNativeSize(char ch, int is_complex) { + switch (ch) { + case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(short); + case 'i': case 'I': return sizeof(int); + case 'l': case 'L': return sizeof(long); + #ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(PY_LONG_LONG); + #endif + case 'f': return sizeof(float) * (is_complex ? 2 : 1); + case 'd': return sizeof(double) * (is_complex ? 2 : 1); + case 'g': return sizeof(long double) * (is_complex ? 2 : 1); + case 'O': case 'P': return sizeof(void*); + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} +typedef struct { char c; short x; } __Pyx_st_short; +typedef struct { char c; int x; } __Pyx_st_int; +typedef struct { char c; long x; } __Pyx_st_long; +typedef struct { char c; float x; } __Pyx_st_float; +typedef struct { char c; double x; } __Pyx_st_double; +typedef struct { char c; long double x; } __Pyx_st_longdouble; +typedef struct { char c; void *x; } __Pyx_st_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { char c; PY_LONG_LONG x; } __Pyx_st_longlong; +#endif +static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_st_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_st_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_st_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_st_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_st_float) - sizeof(float); + case 'd': return sizeof(__Pyx_st_double) - sizeof(double); + case 'g': return sizeof(__Pyx_st_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_st_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +/* These are for computing the padding at the end of the struct to align + on the first member of the struct. This will probably the same as above, + but we don't have any guarantees. + */ +typedef struct { short x; char c; } __Pyx_pad_short; +typedef struct { int x; char c; } __Pyx_pad_int; +typedef struct { long x; char c; } __Pyx_pad_long; +typedef struct { float x; char c; } __Pyx_pad_float; +typedef struct { double x; char c; } __Pyx_pad_double; +typedef struct { long double x; char c; } __Pyx_pad_longdouble; +typedef struct { void *x; char c; } __Pyx_pad_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { PY_LONG_LONG x; char c; } __Pyx_pad_longlong; +#endif +static size_t __Pyx_BufFmt_TypeCharToPadding(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_pad_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_pad_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_pad_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_pad_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_pad_float) - sizeof(float); + case 'd': return sizeof(__Pyx_pad_double) - sizeof(double); + case 'g': return sizeof(__Pyx_pad_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_pad_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +static char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) { + switch (ch) { + case 'c': + return 'H'; + case 'b': case 'h': case 'i': + case 'l': case 'q': case 's': case 'p': + return 'I'; + case 'B': case 'H': case 'I': case 'L': case 'Q': + return 'U'; + case 'f': case 'd': case 'g': + return (is_complex ? 'C' : 'R'); + case 'O': + return 'O'; + case 'P': + return 'P'; + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} +static void __Pyx_BufFmt_RaiseExpected(__Pyx_BufFmt_Context* ctx) { + if (ctx->head == NULL || ctx->head->field == &ctx->root) { + const char* expected; + const char* quote; + if (ctx->head == NULL) { + expected = "end"; + quote = ""; + } else { + expected = ctx->head->field->type->name; + quote = "'"; + } + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected %s%s%s but got %s", + quote, expected, quote, + __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex)); + } else { + __Pyx_StructField* field = ctx->head->field; + __Pyx_StructField* parent = (ctx->head - 1)->field; + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected '%s' but got %s in '%s.%s'", + field->type->name, __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex), + parent->type->name, field->name); + } +} +static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) { + char group; + size_t size, offset, arraysize = 1; + if (ctx->enc_type == 0) return 0; + if (ctx->head->field->type->arraysize[0]) { + int i, ndim = 0; + if (ctx->enc_type == 's' || ctx->enc_type == 'p') { + ctx->is_valid_array = ctx->head->field->type->ndim == 1; + ndim = 1; + if (ctx->enc_count != ctx->head->field->type->arraysize[0]) { + PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %zu", + ctx->head->field->type->arraysize[0], ctx->enc_count); + return -1; + } + } + if (!ctx->is_valid_array) { + PyErr_Format(PyExc_ValueError, "Expected %d dimensions, got %d", + ctx->head->field->type->ndim, ndim); + return -1; + } + for (i = 0; i < ctx->head->field->type->ndim; i++) { + arraysize *= ctx->head->field->type->arraysize[i]; + } + ctx->is_valid_array = 0; + ctx->enc_count = 1; + } + group = __Pyx_BufFmt_TypeCharToGroup(ctx->enc_type, ctx->is_complex); + do { + __Pyx_StructField* field = ctx->head->field; + __Pyx_TypeInfo* type = field->type; + if (ctx->enc_packmode == '@' || ctx->enc_packmode == '^') { + size = __Pyx_BufFmt_TypeCharToNativeSize(ctx->enc_type, ctx->is_complex); + } else { + size = __Pyx_BufFmt_TypeCharToStandardSize(ctx->enc_type, ctx->is_complex); + } + if (ctx->enc_packmode == '@') { + size_t align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex); + size_t align_mod_offset; + if (align_at == 0) return -1; + align_mod_offset = ctx->fmt_offset % align_at; + if (align_mod_offset > 0) ctx->fmt_offset += align_at - align_mod_offset; + if (ctx->struct_alignment == 0) + ctx->struct_alignment = __Pyx_BufFmt_TypeCharToPadding(ctx->enc_type, + ctx->is_complex); + } + if (type->size != size || type->typegroup != group) { + if (type->typegroup == 'C' && type->fields != NULL) { + size_t parent_offset = ctx->head->parent_offset + field->offset; + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = parent_offset; + continue; + } + if ((type->typegroup == 'H' || group == 'H') && type->size == size) { + } else { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + } + offset = ctx->head->parent_offset + field->offset; + if (ctx->fmt_offset != offset) { + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch; next field is at offset %" CYTHON_FORMAT_SSIZE_T "d but %" CYTHON_FORMAT_SSIZE_T "d expected", + (Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset); + return -1; + } + ctx->fmt_offset += size; + if (arraysize) + ctx->fmt_offset += (arraysize - 1) * size; + --ctx->enc_count; /* Consume from buffer string */ + while (1) { + if (field == &ctx->root) { + ctx->head = NULL; + if (ctx->enc_count != 0) { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + break; /* breaks both loops as ctx->enc_count == 0 */ + } + ctx->head->field = ++field; + if (field->type == NULL) { + --ctx->head; + field = ctx->head->field; + continue; + } else if (field->type->typegroup == 'S') { + size_t parent_offset = ctx->head->parent_offset + field->offset; + if (field->type->fields->type == NULL) continue; /* empty struct */ + field = field->type->fields; + ++ctx->head; + ctx->head->field = field; + ctx->head->parent_offset = parent_offset; + break; + } else { + break; + } + } + } while (ctx->enc_count); + ctx->enc_type = 0; + ctx->is_complex = 0; + return 0; +} +static CYTHON_INLINE PyObject * +__pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp) +{ + const char *ts = *tsp; + int i = 0, number; + int ndim = ctx->head->field->type->ndim; +; + ++ts; + if (ctx->new_count != 1) { + PyErr_SetString(PyExc_ValueError, + "Cannot handle repeated arrays in format string"); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + while (*ts && *ts != ')') { + if (isspace(*ts)) + continue; + number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + if (i < ndim && (size_t) number != ctx->head->field->type->arraysize[i]) + return PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %d", + ctx->head->field->type->arraysize[i], number); + if (*ts != ',' && *ts != ')') + return PyErr_Format(PyExc_ValueError, + "Expected a comma in format string, got '%c'", *ts); + if (*ts == ',') ts++; + i++; + } + if (i != ndim) + return PyErr_Format(PyExc_ValueError, "Expected %d dimension(s), got %d", + ctx->head->field->type->ndim, i); + if (!*ts) { + PyErr_SetString(PyExc_ValueError, + "Unexpected end of format string, expected ')'"); + return NULL; + } + ctx->is_valid_array = 1; + ctx->new_count = 1; + *tsp = ++ts; + return Py_None; +} +static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts) { + int got_Z = 0; + while (1) { + switch(*ts) { + case 0: + if (ctx->enc_type != 0 && ctx->head == NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + if (ctx->head != NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + return ts; + case ' ': + case 10: + case 13: + ++ts; + break; + case '<': + if (!__Pyx_IsLittleEndian()) { + PyErr_SetString(PyExc_ValueError, "Little-endian buffer not supported on big-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '>': + case '!': + if (__Pyx_IsLittleEndian()) { + PyErr_SetString(PyExc_ValueError, "Big-endian buffer not supported on little-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '=': + case '@': + case '^': + ctx->new_packmode = *ts++; + break; + case 'T': /* substruct */ + { + const char* ts_after_sub; + size_t i, struct_count = ctx->new_count; + size_t struct_alignment = ctx->struct_alignment; + ctx->new_count = 1; + ++ts; + if (*ts != '{') { + PyErr_SetString(PyExc_ValueError, "Buffer acquisition: Expected '{' after 'T'"); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; /* Erase processed last struct element */ + ctx->enc_count = 0; + ctx->struct_alignment = 0; + ++ts; + ts_after_sub = ts; + for (i = 0; i != struct_count; ++i) { + ts_after_sub = __Pyx_BufFmt_CheckString(ctx, ts); + if (!ts_after_sub) return NULL; + } + ts = ts_after_sub; + if (struct_alignment) ctx->struct_alignment = struct_alignment; + } + break; + case '}': /* end of substruct; either repeat or move on */ + { + size_t alignment = ctx->struct_alignment; + ++ts; + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; /* Erase processed last struct element */ + if (alignment && ctx->fmt_offset % alignment) { + ctx->fmt_offset += alignment - (ctx->fmt_offset % alignment); + } + } + return ts; + case 'x': + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->fmt_offset += ctx->new_count; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->enc_packmode = ctx->new_packmode; + ++ts; + break; + case 'Z': + got_Z = 1; + ++ts; + if (*ts != 'f' && *ts != 'd' && *ts != 'g') { + __Pyx_BufFmt_RaiseUnexpectedChar('Z'); + return NULL; + } /* fall through */ + case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I': + case 'l': case 'L': case 'q': case 'Q': + case 'f': case 'd': case 'g': + case 'O': case 's': case 'p': + if (ctx->enc_type == *ts && got_Z == ctx->is_complex && + ctx->enc_packmode == ctx->new_packmode) { + ctx->enc_count += ctx->new_count; + } else { + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_count = ctx->new_count; + ctx->enc_packmode = ctx->new_packmode; + ctx->enc_type = *ts; + ctx->is_complex = got_Z; + } + ++ts; + ctx->new_count = 1; + got_Z = 0; + break; + case ':': + ++ts; + while(*ts != ':') ++ts; + ++ts; + break; + case '(': + if (!__pyx_buffmt_parse_array(ctx, &ts)) return NULL; + break; + default: + { + int number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + ctx->new_count = (size_t)number; + } + } + } +} +static CYTHON_INLINE void __Pyx_ZeroBuffer(Py_buffer* buf) { + buf->buf = NULL; + buf->obj = NULL; + buf->strides = __Pyx_zeros; + buf->shape = __Pyx_zeros; + buf->suboffsets = __Pyx_minusones; +} +static CYTHON_INLINE int __Pyx_GetBufferAndValidate( + Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, + int nd, int cast, __Pyx_BufFmt_StackElem* stack) +{ + if (obj == Py_None || obj == NULL) { + __Pyx_ZeroBuffer(buf); + return 0; + } + buf->buf = NULL; + if (__Pyx_GetBuffer(obj, buf, flags) == -1) goto fail; + if (buf->ndim != nd) { + PyErr_Format(PyExc_ValueError, + "Buffer has wrong number of dimensions (expected %d, got %d)", + nd, buf->ndim); + goto fail; + } + if (!cast) { + __Pyx_BufFmt_Context ctx; + __Pyx_BufFmt_Init(&ctx, stack, dtype); + if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail; + } + if ((unsigned)buf->itemsize != dtype->size) { + PyErr_Format(PyExc_ValueError, + "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "d byte%s) does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "d byte%s)", + buf->itemsize, (buf->itemsize > 1) ? "s" : "", + dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : ""); + goto fail; + } + if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones; + return 0; +fail:; + __Pyx_ZeroBuffer(buf); + return -1; +} +static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) { + if (info->buf == NULL) return; + if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL; + __Pyx_ReleaseBuffer(info); +} + +static CYTHON_INLINE long __Pyx_mod_long(long a, long b) { + long r = a % b; + r += ((r != 0) & ((r ^ b) < 0)) * b; + return r; +} + +static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb) { +#if CYTHON_COMPILING_IN_CPYTHON + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyThreadState *tstate = PyThreadState_GET(); + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_Restore(type, value, tb); +#endif +} +static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb) { +#if CYTHON_COMPILING_IN_CPYTHON + PyThreadState *tstate = PyThreadState_GET(); + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(type, value, tb); +#endif +} + +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + #if PY_VERSION_HEX < 0x02050000 + if (PyClass_Check(type)) { + #else + if (PyType_Check(type)) { + #endif +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + #if PY_VERSION_HEX < 0x02050000 + if (PyInstance_Check(type)) { + type = (PyObject*) ((PyInstanceObject*)type)->in_class; + Py_INCREF(type); + } + else { + type = 0; + PyErr_SetString(PyExc_TypeError, + "raise: exception must be an old-style class or instance"); + goto raise_error; + } + #else + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + #endif + } + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else /* Python 3+ */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } + else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyEval_CallObject(type, args); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause && cause != Py_None) { + PyObject *fixed_cause; + if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } + else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } + else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { + PyThreadState *tstate = PyThreadState_GET(); + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +static CYTHON_INLINE long __Pyx_div_long(long a, long b) { + long q = a / b; + long r = a - q*b; + q -= ((r != 0) & ((r ^ b) < 0)); + return q; +} + +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_Format(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(PyObject_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%s to unpack", + index, (index == 1) ? "" : "s"); +} + +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_COMPILING_IN_CPYTHON + PyThreadState *tstate = PyThreadState_GET(); + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(exc_type == PyExc_StopIteration) || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +#if PY_MAJOR_VERSION < 3 +static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) { + CYTHON_UNUSED PyObject *getbuffer_cobj; + #if PY_VERSION_HEX >= 0x02060000 + if (PyObject_CheckBuffer(obj)) return PyObject_GetBuffer(obj, view, flags); + #endif + if (PyObject_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) return __pyx_pw_5numpy_7ndarray_1__getbuffer__(obj, view, flags); + #if PY_VERSION_HEX < 0x02060000 + if (obj->ob_type->tp_dict && + (getbuffer_cobj = PyMapping_GetItemString(obj->ob_type->tp_dict, + "__pyx_getbuffer"))) { + getbufferproc func; + #if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION == 0) + func = (getbufferproc) PyCapsule_GetPointer(getbuffer_cobj, "getbuffer(obj, view, flags)"); + #else + func = (getbufferproc) PyCObject_AsVoidPtr(getbuffer_cobj); + #endif + Py_DECREF(getbuffer_cobj); + if (!func) + goto fail; + return func(obj, view, flags); + } else { + PyErr_Clear(); + } + #endif + PyErr_Format(PyExc_TypeError, "'%100s' does not have the buffer interface", Py_TYPE(obj)->tp_name); +#if PY_VERSION_HEX < 0x02060000 +fail: +#endif + return -1; +} +static void __Pyx_ReleaseBuffer(Py_buffer *view) { + PyObject *obj = view->obj; + CYTHON_UNUSED PyObject *releasebuffer_cobj; + if (!obj) return; + #if PY_VERSION_HEX >= 0x02060000 + if (PyObject_CheckBuffer(obj)) { + PyBuffer_Release(view); + return; + } + #endif + if (PyObject_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) { __pyx_pw_5numpy_7ndarray_3__releasebuffer__(obj, view); return; } + #if PY_VERSION_HEX < 0x02060000 + if (obj->ob_type->tp_dict && + (releasebuffer_cobj = PyMapping_GetItemString(obj->ob_type->tp_dict, + "__pyx_releasebuffer"))) { + releasebufferproc func; + #if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION == 0) + func = (releasebufferproc) PyCapsule_GetPointer(releasebuffer_cobj, "releasebuffer(obj, view)"); + #else + func = (releasebufferproc) PyCObject_AsVoidPtr(releasebuffer_cobj); + #endif + Py_DECREF(releasebuffer_cobj); + if (!func) + goto fail; + func(obj, view); + return; + } else { + PyErr_Clear(); + } + #endif + goto nofail; +#if PY_VERSION_HEX < 0x02060000 +fail: +#endif + PyErr_WriteUnraisable(obj); +nofail: + Py_DECREF(obj); + view->obj = NULL; +} +#endif /* PY_MAJOR_VERSION < 3 */ + + + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_import = 0; + py_import = __Pyx_GetAttrString(__pyx_b, "__import__"); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + #if PY_VERSION_HEX >= 0x02050000 + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(1); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + #endif + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; /* try absolute import on failure */ + } + #endif + if (!module) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } + #else + if (level>0) { + PyErr_SetString(PyExc_RuntimeError, "Relative import is not supported for Python <=2.4."); + goto bad; + } + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, NULL); + #endif +bad: + #if PY_VERSION_HEX < 0x03030000 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return ::std::complex< float >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return x + y*(__pyx_t_float_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + __pyx_t_float_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +#if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eqf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sumf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_difff(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prodf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quotf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float denom = b.real * b.real + b.imag * b.imag; + z.real = (a.real * b.real + a.imag * b.imag) / denom; + z.imag = (a.imag * b.real - a.real * b.imag) / denom; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_negf(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zerof(__pyx_t_float_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conjf(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE float __Pyx_c_absf(__pyx_t_float_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrtf(z.real*z.real + z.imag*z.imag); + #else + return hypotf(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_powf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + float denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(a, a); + case 3: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(z, a); + case 4: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } + r = a.real; + theta = 0; + } else { + r = __Pyx_c_absf(a); + theta = atan2f(a.imag, a.real); + } + lnr = logf(r); + z_r = expf(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cosf(z_theta); + z.imag = z_r * sinf(z_theta); + return z; + } + #endif +#endif + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return ::std::complex< double >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return x + y*(__pyx_t_double_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + __pyx_t_double_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +#if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq(__pyx_t_double_complex a, __pyx_t_double_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double denom = b.real * b.real + b.imag * b.imag; + z.real = (a.real * b.real + a.imag * b.imag) / denom; + z.imag = (a.imag * b.real - a.real * b.imag) / denom; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero(__pyx_t_double_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE double __Pyx_c_abs(__pyx_t_double_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrt(z.real*z.real + z.imag*z.imag); + #else + return hypot(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + double denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(a, a); + case 3: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(z, a); + case 4: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } + r = a.real; + theta = 0; + } else { + r = __Pyx_c_abs(a); + theta = atan2(a.imag, a.real); + } + lnr = log(r); + z_r = exp(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cos(z_theta); + z.imag = z_r * sin(z_theta); + return z; + } + #endif +#endif + +static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject* x) { + const unsigned char neg_one = (unsigned char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned char" : + "value too large to convert to unsigned char"); + } + return (unsigned char)-1; + } + return (unsigned char)val; + } + return (unsigned char)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject* x) { + const unsigned short neg_one = (unsigned short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned short" : + "value too large to convert to unsigned short"); + } + return (unsigned short)-1; + } + return (unsigned short)val; + } + return (unsigned short)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject* x) { + const unsigned int neg_one = (unsigned int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned int" : + "value too large to convert to unsigned int"); + } + return (unsigned int)-1; + } + return (unsigned int)val; + } + return (unsigned int)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject* x) { + const char neg_one = (char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to char" : + "value too large to convert to char"); + } + return (char)-1; + } + return (char)val; + } + return (char)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject* x) { + const short neg_one = (short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to short" : + "value too large to convert to short"); + } + return (short)-1; + } + return (short)val; + } + return (short)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject* x) { + const int neg_one = (int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to int" : + "value too large to convert to int"); + } + return (int)-1; + } + return (int)val; + } + return (int)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject* x) { + const signed char neg_one = (signed char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed char" : + "value too large to convert to signed char"); + } + return (signed char)-1; + } + return (signed char)val; + } + return (signed char)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject* x) { + const signed short neg_one = (signed short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed short" : + "value too large to convert to signed short"); + } + return (signed short)-1; + } + return (signed short)val; + } + return (signed short)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject* x) { + const signed int neg_one = (signed int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed int" : + "value too large to convert to signed int"); + } + return (signed int)-1; + } + return (signed int)val; + } + return (signed int)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject* x) { + const int neg_one = (int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to int" : + "value too large to convert to int"); + } + return (int)-1; + } + return (int)val; + } + return (int)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject* x) { + const unsigned long neg_one = (unsigned long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned long"); + return (unsigned long)-1; + } + return (unsigned long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned long"); + return (unsigned long)-1; + } + return (unsigned long)PyLong_AsUnsignedLong(x); + } else { + return (unsigned long)PyLong_AsLong(x); + } + } else { + unsigned long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (unsigned long)-1; + val = __Pyx_PyInt_AsUnsignedLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject* x) { + const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned PY_LONG_LONG"); + return (unsigned PY_LONG_LONG)-1; + } + return (unsigned PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned PY_LONG_LONG"); + return (unsigned PY_LONG_LONG)-1; + } + return (unsigned PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (unsigned PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + unsigned PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (unsigned PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsUnsignedLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject* x) { + const long neg_one = (long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long)-1; + } + return (long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long)-1; + } + return (long)PyLong_AsUnsignedLong(x); + } else { + return (long)PyLong_AsLong(x); + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (long)-1; + val = __Pyx_PyInt_AsLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject* x) { + const PY_LONG_LONG neg_one = (PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to PY_LONG_LONG"); + return (PY_LONG_LONG)-1; + } + return (PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to PY_LONG_LONG"); + return (PY_LONG_LONG)-1; + } + return (PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject* x) { + const signed long neg_one = (signed long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed long"); + return (signed long)-1; + } + return (signed long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed long"); + return (signed long)-1; + } + return (signed long)PyLong_AsUnsignedLong(x); + } else { + return (signed long)PyLong_AsLong(x); + } + } else { + signed long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (signed long)-1; + val = __Pyx_PyInt_AsSignedLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject* x) { + const signed PY_LONG_LONG neg_one = (signed PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed PY_LONG_LONG"); + return (signed PY_LONG_LONG)-1; + } + return (signed PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed PY_LONG_LONG"); + return (signed PY_LONG_LONG)-1; + } + return (signed PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (signed PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + signed PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (signed PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsSignedLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + #if PY_VERSION_HEX < 0x02050000 + return PyErr_Warn(NULL, message); + #else + return PyErr_WarnEx(NULL, message, 1); + #endif + } + return 0; +} + +#ifndef __PYX_HAVE_RT_ImportModule +#define __PYX_HAVE_RT_ImportModule +static PyObject *__Pyx_ImportModule(const char *name) { + PyObject *py_name = 0; + PyObject *py_module = 0; + py_name = __Pyx_PyIdentifier_FromString(name); + if (!py_name) + goto bad; + py_module = PyImport_Import(py_name); + Py_DECREF(py_name); + return py_module; +bad: + Py_XDECREF(py_name); + return 0; +} +#endif + +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, + size_t size, int strict) +{ + PyObject *py_module = 0; + PyObject *result = 0; + PyObject *py_name = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + py_module = __Pyx_ImportModule(module_name); + if (!py_module) + goto bad; + py_name = __Pyx_PyIdentifier_FromString(class_name); + if (!py_name) + goto bad; + result = PyObject_GetAttr(py_module, py_name); + Py_DECREF(py_name); + py_name = 0; + Py_DECREF(py_module); + py_module = 0; + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%s.%s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if (!strict && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility", + module_name, class_name); + #if PY_VERSION_HEX < 0x02050000 + if (PyErr_Warn(NULL, warning) < 0) goto bad; + #else + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + #endif + } + else if ((size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%s.%s has the wrong size, try recompiling", + module_name, class_name); + goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(py_module); + Py_XDECREF(result); + return NULL; +} +#endif + +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = (start + end) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, /*int argcount,*/ + 0, /*int kwonlyargcount,*/ + 0, /*int nlocals,*/ + 0, /*int stacksize,*/ + 0, /*int flags,*/ + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, /*int firstlineno,*/ + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_globals = 0; + PyFrameObject *py_frame = 0; + py_code = __pyx_find_code_object(c_line ? c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? c_line : py_line, py_code); + } + py_globals = PyModule_GetDict(__pyx_m); + if (!py_globals) goto bad; + py_frame = PyFrame_New( + PyThreadState_GET(), /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + py_globals, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + py_frame->f_lineno = py_line; + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else /* Python 3+ has unicode identifiers */ + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) { + PyNumberMethods *m; + const char *name = NULL; + PyObject *res = NULL; +#if PY_VERSION_HEX < 0x03000000 + if (PyInt_Check(x) || PyLong_Check(x)) +#else + if (PyLong_Check(x)) +#endif + return Py_INCREF(x), x; + m = Py_TYPE(x)->tp_as_number; +#if PY_VERSION_HEX < 0x03000000 + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = PyNumber_Long(x); + } +#else + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Long(x); + } +#endif + if (res) { +#if PY_VERSION_HEX < 0x03000000 + if (!PyInt_Check(res) && !PyLong_Check(res)) { +#else + if (!PyLong_Check(res)) { +#endif + PyErr_Format(PyExc_TypeError, + "__%s__ returned non-%s (type %.200s)", + name, name, Py_TYPE(res)->tp_name); + Py_DECREF(res); + return NULL; + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject* x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { +#if PY_VERSION_HEX < 0x02050000 + if (ival <= LONG_MAX) + return PyInt_FromLong((long)ival); + else { + unsigned char *bytes = (unsigned char *) &ival; + int one = 1; int little = (int)*(unsigned char*)&one; + return _PyLong_FromByteArray(bytes, sizeof(size_t), little, 0); + } +#else + return PyInt_FromSize_t(ival); +#endif +} +static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject* x) { + unsigned PY_LONG_LONG val = __Pyx_PyInt_AsUnsignedLongLong(x); + if (unlikely(val == (unsigned PY_LONG_LONG)-1 && PyErr_Occurred())) { + return (size_t)-1; + } else if (unlikely(val != (unsigned PY_LONG_LONG)(size_t)val)) { + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to size_t"); + return (size_t)-1; + } + return (size_t)val; +} + + +#endif /* Py_PYTHON_H */ diff --git a/astropy/convolution/boundary_none.pyx b/astropy/convolution/boundary_none.pyx new file mode 100644 index 0000000..1907b4f --- /dev/null +++ b/astropy/convolution/boundary_none.pyx @@ -0,0 +1,233 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import division +import numpy as np +cimport numpy as np + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + +cdef extern from "numpy/npy_math.h": + bint npy_isnan(double x) + +cimport cython + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve1d_boundary_none(np.ndarray[DTYPE_t, ndim=1] f, + np.ndarray[DTYPE_t, ndim=1] g): + + if g.shape[0] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int nkx = g.shape[0] + cdef int wkx = nkx // 2 + + # The following need to be set to zeros rather than empty because the + # boundary does not get reset. + cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.zeros([nx], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=1] conv = np.zeros([nx], dtype=DTYPE) + + cdef unsigned int i, ii + + cdef int iimin, iimax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + if npy_isnan(f[i]) and i >= wkx and i < nx - wkx: + top = 0. + bot = 0. + for ii in range(i - wkx, i + wkx + 1): + val = f[ii] + if not npy_isnan(val): + ker = g[(wkx + ii - i)] + top += val * ker + bot += ker + if bot != 0.: + fixed[i] = top / bot + else: + fixed[i] = f[i] + else: + fixed[i] = f[i] + + # Now run the proper convolution + for i in range(wkx, nx - wkx): + if not npy_isnan(fixed[i]): + top = 0. + bot = 0. + for ii in range(i - wkx, i + wkx + 1): + val = fixed[ii] + ker = g[(wkx + ii - i)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i] = top / bot + else: + conv[i] = fixed[i] + else: + conv[i] = fixed[i] + + return conv + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve2d_boundary_none(np.ndarray[DTYPE_t, ndim=2] f, + np.ndarray[DTYPE_t, ndim=2] g): + + if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int ny = f.shape[1] + cdef int nkx = g.shape[0] + cdef int nky = g.shape[1] + cdef int wkx = nkx // 2 + cdef int wky = nky // 2 + + # The following need to be set to zeros rather than empty because the + # boundary does not get reset. + cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.zeros([nx, ny], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=2] conv = np.zeros([nx, ny], dtype=DTYPE) + + cdef unsigned int i, j, ii, jj + + cdef int iimin, iimax, jjmin, jjmax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + for j in range(ny): + if npy_isnan(f[i, j]) and i >= wkx and i < nx - wkx \ + and j >= wky and j < ny - wky: + top = 0. + bot = 0. + for ii in range(i - wkx, i + wkx + 1): + for jj in range(j - wky, j + wky + 1): + val = f[ii, jj] + if not npy_isnan(val): + ker = g[(wkx + ii - i), + (wky + jj - j)] + top += val * ker + bot += ker + if bot != 0.: + fixed[i, j] = top / bot + else: + fixed[i, j] = f[i, j] + else: + fixed[i, j] = f[i, j] + + # Now run the proper convolution + for i in range(wkx, nx - wkx): + for j in range(wky, ny - wky): + if not npy_isnan(fixed[i, j]): + top = 0. + bot = 0. + for ii in range(i - wkx, i + wkx + 1): + for jj in range(j - wky, j + wky + 1): + val = fixed[ii, jj] + ker = g[(wkx + ii - i), + (wky + jj - j)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i, j] = top / bot + else: + conv[i, j] = fixed[i, j] + else: + conv[i, j] = fixed[i, j] + + return conv + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve3d_boundary_none(np.ndarray[DTYPE_t, ndim=3] f, + np.ndarray[DTYPE_t, ndim=3] g): + + if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int ny = f.shape[1] + cdef int nz = f.shape[2] + cdef int nkx = g.shape[0] + cdef int nky = g.shape[1] + cdef int nkz = g.shape[2] + cdef int wkx = nkx // 2 + cdef int wky = nky // 2 + cdef int wkz = nkz // 2 + + # The following need to be set to zeros rather than empty because the + # boundary does not get reset. + cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.zeros([nx, ny, nz], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=3] conv = np.zeros([nx, ny, nz], dtype=DTYPE) + + cdef unsigned int i, j, k, ii, jj, kk + + cdef int iimin, iimax, jjmin, jjmax, kkmin, kkmax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + for j in range(ny): + for k in range(nz): + if npy_isnan(f[i, j, k]) and i >= wkx and i < nx - wkx \ + and j >= wky and j < ny - wky and k >= wkz and k <= nz - wkz: + top = 0. + bot = 0. + for ii in range(i - wkx, i + wkx + 1): + for jj in range(j - wky, j + wky + 1): + for kk in range(k - wkz, k + wkz + 1): + val = f[ii, jj, kk] + if not npy_isnan(val): + ker = g[(wkx + ii - i), + (wky + jj - j), + (wkz + kk - k)] + top += val * ker + bot += ker + if bot != 0.: + fixed[i, j, k] = top / bot + else: + fixed[i, j, k] = f[i, j, k] + else: + fixed[i, j, k] = f[i, j, k] + + # Now run the proper convolution + for i in range(wkx, nx - wkx): + for j in range(wky, ny - wky): + for k in range(wkz, nz - wkz): + if not npy_isnan(fixed[i, j, k]): + top = 0. + bot = 0. + for ii in range(i - wkx, i + wkx + 1): + for jj in range(j - wky, j + wky + 1): + for kk in range(k - wkz, k + wkz + 1): + val = fixed[ii, jj, kk] + ker = g[(wkx + ii - i), + (wky + jj - j), + (wkz + kk - k)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i, j, k] = top / bot + else: + conv[i, j, k] = fixed[i, j, k] + else: + conv[i, j, k] = fixed[i, j, k] + + return conv diff --git a/astropy/convolution/boundary_wrap.c b/astropy/convolution/boundary_wrap.c new file mode 100644 index 0000000..58809f7 --- /dev/null +++ b/astropy/convolution/boundary_wrap.c @@ -0,0 +1,8608 @@ +/* Generated by Cython 0.18 on Tue Sep 23 16:50:23 2014 */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02040000 + #error Cython requires Python 2.4+. +#else +#include /* For offsetof */ +#ifndef offsetof +#define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION +#define CYTHON_COMPILING_IN_PYPY 1 +#define CYTHON_COMPILING_IN_CPYTHON 0 +#else +#define CYTHON_COMPILING_IN_PYPY 0 +#define CYTHON_COMPILING_IN_CPYTHON 1 +#endif +#if PY_VERSION_HEX < 0x02050000 + typedef int Py_ssize_t; + #define PY_SSIZE_T_MAX INT_MAX + #define PY_SSIZE_T_MIN INT_MIN + #define PY_FORMAT_SIZE_T "" + #define CYTHON_FORMAT_SSIZE_T "" + #define PyInt_FromSsize_t(z) PyInt_FromLong(z) + #define PyInt_AsSsize_t(o) __Pyx_PyInt_AsInt(o) + #define PyNumber_Index(o) ((PyNumber_Check(o) && !PyFloat_Check(o)) ? PyNumber_Int(o) : \ + (PyErr_Format(PyExc_TypeError, \ + "expected index value, got %.200s", Py_TYPE(o)->tp_name), \ + (PyObject*)0)) + #define __Pyx_PyIndex_Check(o) (PyNumber_Check(o) && !PyFloat_Check(o) && \ + !PyComplex_Check(o)) + #define PyIndex_Check __Pyx_PyIndex_Check + #define PyErr_WarnEx(category, message, stacklevel) PyErr_Warn(category, message) + #define __PYX_BUILD_PY_SSIZE_T "i" +#else + #define __PYX_BUILD_PY_SSIZE_T "n" + #define CYTHON_FORMAT_SSIZE_T "z" + #define __Pyx_PyIndex_Check PyIndex_Check +#endif +#if PY_VERSION_HEX < 0x02060000 + #define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt) + #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) + #define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size) + #define PyVarObject_HEAD_INIT(type, size) \ + PyObject_HEAD_INIT(type) size, + #define PyType_Modified(t) + typedef struct { + void *buf; + PyObject *obj; + Py_ssize_t len; + Py_ssize_t itemsize; + int readonly; + int ndim; + char *format; + Py_ssize_t *shape; + Py_ssize_t *strides; + Py_ssize_t *suboffsets; + void *internal; + } Py_buffer; + #define PyBUF_SIMPLE 0 + #define PyBUF_WRITABLE 0x0001 + #define PyBUF_FORMAT 0x0004 + #define PyBUF_ND 0x0008 + #define PyBUF_STRIDES (0x0010 | PyBUF_ND) + #define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES) + #define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES) + #define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES) + #define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES) + #define PyBUF_RECORDS (PyBUF_STRIDES | PyBUF_FORMAT | PyBUF_WRITABLE) + #define PyBUF_FULL (PyBUF_INDIRECT | PyBUF_FORMAT | PyBUF_WRITABLE) + typedef int (*getbufferproc)(PyObject *, Py_buffer *, int); + typedef void (*releasebufferproc)(PyObject *, Py_buffer *); +#endif +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ + PyCode_New(a, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) \ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif +#if PY_MAJOR_VERSION < 3 && PY_MINOR_VERSION < 6 + #define PyUnicode_FromString(s) PyUnicode_Decode(s, strlen(s), "UTF-8", "strict") +#endif +#if PY_MAJOR_VERSION >= 3 + #define Py_TPFLAGS_CHECKTYPES 0 + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#if (PY_VERSION_HEX < 0x02060000) || (PY_MAJOR_VERSION >= 3) + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ? \ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) +#else + #define CYTHON_PEP393_ENABLED 0 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_READ(k, d, i) ((k=k), (Py_UCS4)(((Py_UNICODE*)d)[i])) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#endif +#if PY_VERSION_HEX < 0x02060000 + #define PyBytesObject PyStringObject + #define PyBytes_Type PyString_Type + #define PyBytes_Check PyString_Check + #define PyBytes_CheckExact PyString_CheckExact + #define PyBytes_FromString PyString_FromString + #define PyBytes_FromStringAndSize PyString_FromStringAndSize + #define PyBytes_FromFormat PyString_FromFormat + #define PyBytes_DecodeEscape PyString_DecodeEscape + #define PyBytes_AsString PyString_AsString + #define PyBytes_AsStringAndSize PyString_AsStringAndSize + #define PyBytes_Size PyString_Size + #define PyBytes_AS_STRING PyString_AS_STRING + #define PyBytes_GET_SIZE PyString_GET_SIZE + #define PyBytes_Repr PyString_Repr + #define PyBytes_Concat PyString_Concat + #define PyBytes_ConcatAndDel PyString_ConcatAndDel +#endif +#if PY_VERSION_HEX < 0x02060000 + #define PySet_Check(obj) PyObject_TypeCheck(obj, &PySet_Type) + #define PyFrozenSet_Check(obj) PyObject_TypeCheck(obj, &PyFrozenSet_Type) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_VERSION_HEX < 0x03020000 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if (PY_MAJOR_VERSION < 3) || (PY_VERSION_HEX >= 0x03010300) + #define __Pyx_PySequence_GetSlice(obj, a, b) PySequence_GetSlice(obj, a, b) + #define __Pyx_PySequence_SetSlice(obj, a, b, value) PySequence_SetSlice(obj, a, b, value) + #define __Pyx_PySequence_DelSlice(obj, a, b) PySequence_DelSlice(obj, a, b) +#else + #define __Pyx_PySequence_GetSlice(obj, a, b) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), (PyObject*)0) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_GetSlice(obj, a, b)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", (obj)->ob_type->tp_name), (PyObject*)0))) + #define __Pyx_PySequence_SetSlice(obj, a, b, value) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_SetSlice(obj, a, b, value)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice assignment", (obj)->ob_type->tp_name), -1))) + #define __Pyx_PySequence_DelSlice(obj, a, b) (unlikely(!(obj)) ? \ + (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ + (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_DelSlice(obj, a, b)) : \ + (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice deletion", (obj)->ob_type->tp_name), -1))) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) +#endif +#if PY_VERSION_HEX < 0x02050000 + #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),((char *)(n))) + #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),((char *)(n)),(a)) + #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),((char *)(n))) +#else + #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),(n)) + #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),(n),(a)) + #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),(n)) +#endif +#if PY_VERSION_HEX < 0x02050000 + #define __Pyx_NAMESTR(n) ((char *)(n)) + #define __Pyx_DOCSTR(n) ((char *)(n)) +#else + #define __Pyx_NAMESTR(n) (n) + #define __Pyx_DOCSTR(n) (n) +#endif + + +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#endif + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) +#define _USE_MATH_DEFINES +#endif +#include +#define __PYX_HAVE__astropy__convolution__boundary_wrap +#define __PYX_HAVE_API__astropy__convolution__boundary_wrap +#include "string.h" +#include "stdio.h" +#include "stdlib.h" +#include "numpy/arrayobject.h" +#include "numpy/ufuncobject.h" +#include "numpy/npy_math.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#ifdef PYREX_WITHOUT_ASSERTIONS +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +#ifndef CYTHON_INLINE + #if defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +typedef struct {PyObject **p; char *s; const long n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/ + +#define __Pyx_PyBytes_FromUString(s) PyBytes_FromString((char*)s) +#define __Pyx_PyBytes_AsUString(s) ((unsigned char*) PyBytes_AsString(s)) +#define __Pyx_Owned_Py_None(b) (Py_INCREF(Py_None), Py_None) +#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False)) +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x); +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject*); +#if CYTHON_COMPILING_IN_CPYTHON +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) + + +#ifdef __GNUC__ + /* Test for GCC > 2.95 */ + #if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) + #else /* __GNUC__ > 2 ... */ + #define likely(x) (x) + #define unlikely(x) (x) + #endif /* __GNUC__ > 2 ... */ +#else /* __GNUC__ */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ + +static PyObject *__pyx_m; +static PyObject *__pyx_b; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + +#if !defined(CYTHON_CCOMPLEX) + #if defined(__cplusplus) + #define CYTHON_CCOMPLEX 1 + #elif defined(_Complex_I) + #define CYTHON_CCOMPLEX 1 + #else + #define CYTHON_CCOMPLEX 0 + #endif +#endif +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #include + #else + #include + #endif +#endif +#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__) + #undef _Complex_I + #define _Complex_I 1.0fj +#endif + + +static const char *__pyx_f[] = { + "boundary_wrap.pyx", + "numpy.pxd", + "type.pxd", +}; +#define IS_UNSIGNED(type) (((type) -1) > 0) +struct __Pyx_StructField_; +#define __PYX_BUF_FLAGS_PACKED_STRUCT (1 << 0) +typedef struct { + const char* name; /* for error messages only */ + struct __Pyx_StructField_* fields; + size_t size; /* sizeof(type) */ + size_t arraysize[8]; /* length of array in each dimension */ + int ndim; + char typegroup; /* _R_eal, _C_omplex, Signed _I_nt, _U_nsigned int, _S_truct, _P_ointer, _O_bject, c_H_ar */ + char is_unsigned; + int flags; +} __Pyx_TypeInfo; +typedef struct __Pyx_StructField_ { + __Pyx_TypeInfo* type; + const char* name; + size_t offset; +} __Pyx_StructField; +typedef struct { + __Pyx_StructField* field; + size_t parent_offset; +} __Pyx_BufFmt_StackElem; +typedef struct { + __Pyx_StructField root; + __Pyx_BufFmt_StackElem* head; + size_t fmt_offset; + size_t new_count, enc_count; + size_t struct_alignment; + int is_complex; + char enc_type; + char new_packmode; + char enc_packmode; + char is_valid_array; +} __Pyx_BufFmt_Context; + + +/* "numpy.pxd":723 + * # in Cython to enable them only on the right systems. + * + * ctypedef npy_int8 int8_t # <<<<<<<<<<<<<< + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + */ +typedef npy_int8 __pyx_t_5numpy_int8_t; + +/* "numpy.pxd":724 + * + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t # <<<<<<<<<<<<<< + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t + */ +typedef npy_int16 __pyx_t_5numpy_int16_t; + +/* "numpy.pxd":725 + * ctypedef npy_int8 int8_t + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t # <<<<<<<<<<<<<< + * ctypedef npy_int64 int64_t + * #ctypedef npy_int96 int96_t + */ +typedef npy_int32 __pyx_t_5numpy_int32_t; + +/* "numpy.pxd":726 + * ctypedef npy_int16 int16_t + * ctypedef npy_int32 int32_t + * ctypedef npy_int64 int64_t # <<<<<<<<<<<<<< + * #ctypedef npy_int96 int96_t + * #ctypedef npy_int128 int128_t + */ +typedef npy_int64 __pyx_t_5numpy_int64_t; + +/* "numpy.pxd":730 + * #ctypedef npy_int128 int128_t + * + * ctypedef npy_uint8 uint8_t # <<<<<<<<<<<<<< + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + */ +typedef npy_uint8 __pyx_t_5numpy_uint8_t; + +/* "numpy.pxd":731 + * + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t # <<<<<<<<<<<<<< + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t + */ +typedef npy_uint16 __pyx_t_5numpy_uint16_t; + +/* "numpy.pxd":732 + * ctypedef npy_uint8 uint8_t + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t # <<<<<<<<<<<<<< + * ctypedef npy_uint64 uint64_t + * #ctypedef npy_uint96 uint96_t + */ +typedef npy_uint32 __pyx_t_5numpy_uint32_t; + +/* "numpy.pxd":733 + * ctypedef npy_uint16 uint16_t + * ctypedef npy_uint32 uint32_t + * ctypedef npy_uint64 uint64_t # <<<<<<<<<<<<<< + * #ctypedef npy_uint96 uint96_t + * #ctypedef npy_uint128 uint128_t + */ +typedef npy_uint64 __pyx_t_5numpy_uint64_t; + +/* "numpy.pxd":737 + * #ctypedef npy_uint128 uint128_t + * + * ctypedef npy_float32 float32_t # <<<<<<<<<<<<<< + * ctypedef npy_float64 float64_t + * #ctypedef npy_float80 float80_t + */ +typedef npy_float32 __pyx_t_5numpy_float32_t; + +/* "numpy.pxd":738 + * + * ctypedef npy_float32 float32_t + * ctypedef npy_float64 float64_t # <<<<<<<<<<<<<< + * #ctypedef npy_float80 float80_t + * #ctypedef npy_float128 float128_t + */ +typedef npy_float64 __pyx_t_5numpy_float64_t; + +/* "numpy.pxd":747 + * # The int types are mapped a bit surprising -- + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t + */ +typedef npy_long __pyx_t_5numpy_int_t; + +/* "numpy.pxd":748 + * # numpy.int corresponds to 'l' and numpy.long to 'q' + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t # <<<<<<<<<<<<<< + * ctypedef npy_longlong longlong_t + * + */ +typedef npy_longlong __pyx_t_5numpy_long_t; + +/* "numpy.pxd":749 + * ctypedef npy_long int_t + * ctypedef npy_longlong long_t + * ctypedef npy_longlong longlong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_ulong uint_t + */ +typedef npy_longlong __pyx_t_5numpy_longlong_t; + +/* "numpy.pxd":751 + * ctypedef npy_longlong longlong_t + * + * ctypedef npy_ulong uint_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t + */ +typedef npy_ulong __pyx_t_5numpy_uint_t; + +/* "numpy.pxd":752 + * + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t # <<<<<<<<<<<<<< + * ctypedef npy_ulonglong ulonglong_t + * + */ +typedef npy_ulonglong __pyx_t_5numpy_ulong_t; + +/* "numpy.pxd":753 + * ctypedef npy_ulong uint_t + * ctypedef npy_ulonglong ulong_t + * ctypedef npy_ulonglong ulonglong_t # <<<<<<<<<<<<<< + * + * ctypedef npy_intp intp_t + */ +typedef npy_ulonglong __pyx_t_5numpy_ulonglong_t; + +/* "numpy.pxd":755 + * ctypedef npy_ulonglong ulonglong_t + * + * ctypedef npy_intp intp_t # <<<<<<<<<<<<<< + * ctypedef npy_uintp uintp_t + * + */ +typedef npy_intp __pyx_t_5numpy_intp_t; + +/* "numpy.pxd":756 + * + * ctypedef npy_intp intp_t + * ctypedef npy_uintp uintp_t # <<<<<<<<<<<<<< + * + * ctypedef npy_double float_t + */ +typedef npy_uintp __pyx_t_5numpy_uintp_t; + +/* "numpy.pxd":758 + * ctypedef npy_uintp uintp_t + * + * ctypedef npy_double float_t # <<<<<<<<<<<<<< + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t + */ +typedef npy_double __pyx_t_5numpy_float_t; + +/* "numpy.pxd":759 + * + * ctypedef npy_double float_t + * ctypedef npy_double double_t # <<<<<<<<<<<<<< + * ctypedef npy_longdouble longdouble_t + * + */ +typedef npy_double __pyx_t_5numpy_double_t; + +/* "numpy.pxd":760 + * ctypedef npy_double float_t + * ctypedef npy_double double_t + * ctypedef npy_longdouble longdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cfloat cfloat_t + */ +typedef npy_longdouble __pyx_t_5numpy_longdouble_t; + +/* "astropy/convolution/boundary_wrap.pyx":7 + * + * DTYPE = np.float + * ctypedef np.float_t DTYPE_t # <<<<<<<<<<<<<< + * + * cdef extern from "numpy/npy_math.h": + */ +typedef __pyx_t_5numpy_float_t __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t; +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< float > __pyx_t_float_complex; + #else + typedef float _Complex __pyx_t_float_complex; + #endif +#else + typedef struct { float real, imag; } __pyx_t_float_complex; +#endif + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + typedef ::std::complex< double > __pyx_t_double_complex; + #else + typedef double _Complex __pyx_t_double_complex; + #endif +#else + typedef struct { double real, imag; } __pyx_t_double_complex; +#endif + + +/*--- Type declarations ---*/ + +/* "numpy.pxd":762 + * ctypedef npy_longdouble longdouble_t + * + * ctypedef npy_cfloat cfloat_t # <<<<<<<<<<<<<< + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t + */ +typedef npy_cfloat __pyx_t_5numpy_cfloat_t; + +/* "numpy.pxd":763 + * + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t # <<<<<<<<<<<<<< + * ctypedef npy_clongdouble clongdouble_t + * + */ +typedef npy_cdouble __pyx_t_5numpy_cdouble_t; + +/* "numpy.pxd":764 + * ctypedef npy_cfloat cfloat_t + * ctypedef npy_cdouble cdouble_t + * ctypedef npy_clongdouble clongdouble_t # <<<<<<<<<<<<<< + * + * ctypedef npy_cdouble complex_t + */ +typedef npy_clongdouble __pyx_t_5numpy_clongdouble_t; + +/* "numpy.pxd":766 + * ctypedef npy_clongdouble clongdouble_t + * + * ctypedef npy_cdouble complex_t # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew1(a): + */ +typedef npy_cdouble __pyx_t_5numpy_complex_t; +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); /*proto*/ + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil) \ + if (acquire_gil) { \ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure(); \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \ + PyGILState_Release(__pyx_gilstate_save); \ + } else { \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__); \ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil) \ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext() \ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif /* CYTHON_REFNANNY */ +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/ + +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/ + +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /*proto*/ + +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[], \ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, \ + const char* function_name); /*proto*/ + +static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact); /*proto*/ + +static CYTHON_INLINE int __Pyx_GetBufferAndValidate(Py_buffer* buf, PyObject* obj, + __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack); +static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info); + +static CYTHON_INLINE long __Pyx_mod_long(long, long); /* proto */ + +static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb); /*proto*/ +static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ + +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /*proto*/ + +static CYTHON_INLINE long __Pyx_div_long(long, long); /* proto */ + +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/ + +#define __Pyx_BufPtrStrided1d(type, buf, i0, s0) (type)((char*)buf + i0 * s0) +static CYTHON_INLINE int __Pyx_mod_int(int, int); /* proto */ + +#define __Pyx_BufPtrStrided2d(type, buf, i0, s0, i1, s1) (type)((char*)buf + i0 * s0 + i1 * s1) +#define __Pyx_BufPtrStrided3d(type, buf, i0, s0, i1, s1, i2, s2) (type)((char*)buf + i0 * s0 + i1 * s1 + i2 * s2) +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +static CYTHON_INLINE int __Pyx_IterFinish(void); /*proto*/ + +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); /*proto*/ + +typedef struct { + Py_ssize_t shape, strides, suboffsets; +} __Pyx_Buf_DimInfo; +typedef struct { + size_t refcount; + Py_buffer pybuffer; +} __Pyx_Buffer; +typedef struct { + __Pyx_Buffer *rcbuffer; + char *data; + __Pyx_Buf_DimInfo diminfo[8]; +} __Pyx_LocalBuf_ND; + +#if PY_MAJOR_VERSION < 3 + static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags); + static void __Pyx_ReleaseBuffer(Py_buffer *view); +#else + #define __Pyx_GetBuffer PyObject_GetBuffer + #define __Pyx_ReleaseBuffer PyBuffer_Release +#endif + + +static Py_ssize_t __Pyx_zeros[] = {0, 0, 0, 0, 0, 0, 0, 0}; +static Py_ssize_t __Pyx_minusones[] = {-1, -1, -1, -1, -1, -1, -1, -1}; + +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); /*proto*/ + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + #define __Pyx_CREAL(z) ((z).real()) + #define __Pyx_CIMAG(z) ((z).imag()) + #else + #define __Pyx_CREAL(z) (__real__(z)) + #define __Pyx_CIMAG(z) (__imag__(z)) + #endif +#else + #define __Pyx_CREAL(z) ((z).real) + #define __Pyx_CIMAG(z) ((z).imag) +#endif +#if defined(_WIN32) && defined(__cplusplus) && CYTHON_CCOMPLEX + #define __Pyx_SET_CREAL(z,x) ((z).real(x)) + #define __Pyx_SET_CIMAG(z,y) ((z).imag(y)) +#else + #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x) + #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y) +#endif + +static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float, float); + +#if CYTHON_CCOMPLEX + #define __Pyx_c_eqf(a, b) ((a)==(b)) + #define __Pyx_c_sumf(a, b) ((a)+(b)) + #define __Pyx_c_difff(a, b) ((a)-(b)) + #define __Pyx_c_prodf(a, b) ((a)*(b)) + #define __Pyx_c_quotf(a, b) ((a)/(b)) + #define __Pyx_c_negf(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zerof(z) ((z)==(float)0) + #define __Pyx_c_conjf(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_absf(z) (::std::abs(z)) + #define __Pyx_c_powf(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zerof(z) ((z)==0) + #define __Pyx_c_conjf(z) (conjf(z)) + #if 1 + #define __Pyx_c_absf(z) (cabsf(z)) + #define __Pyx_c_powf(a, b) (cpowf(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eqf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sumf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_difff(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prodf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quotf(__pyx_t_float_complex, __pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_negf(__pyx_t_float_complex); + static CYTHON_INLINE int __Pyx_c_is_zerof(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conjf(__pyx_t_float_complex); + #if 1 + static CYTHON_INLINE float __Pyx_c_absf(__pyx_t_float_complex); + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_powf(__pyx_t_float_complex, __pyx_t_float_complex); + #endif +#endif + +static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double, double); + +#if CYTHON_CCOMPLEX + #define __Pyx_c_eq(a, b) ((a)==(b)) + #define __Pyx_c_sum(a, b) ((a)+(b)) + #define __Pyx_c_diff(a, b) ((a)-(b)) + #define __Pyx_c_prod(a, b) ((a)*(b)) + #define __Pyx_c_quot(a, b) ((a)/(b)) + #define __Pyx_c_neg(a) (-(a)) + #ifdef __cplusplus + #define __Pyx_c_is_zero(z) ((z)==(double)0) + #define __Pyx_c_conj(z) (::std::conj(z)) + #if 1 + #define __Pyx_c_abs(z) (::std::abs(z)) + #define __Pyx_c_pow(a, b) (::std::pow(a, b)) + #endif + #else + #define __Pyx_c_is_zero(z) ((z)==0) + #define __Pyx_c_conj(z) (conj(z)) + #if 1 + #define __Pyx_c_abs(z) (cabs(z)) + #define __Pyx_c_pow(a, b) (cpow(a, b)) + #endif + #endif +#else + static CYTHON_INLINE int __Pyx_c_eq(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot(__pyx_t_double_complex, __pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg(__pyx_t_double_complex); + static CYTHON_INLINE int __Pyx_c_is_zero(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj(__pyx_t_double_complex); + #if 1 + static CYTHON_INLINE double __Pyx_c_abs(__pyx_t_double_complex); + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow(__pyx_t_double_complex, __pyx_t_double_complex); + #endif +#endif + +static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject *); + +static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject *); + +static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject *); + +static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject *); + +static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject *); + +static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject *); + +static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject *); + +static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject *); + +static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject *); + +static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject *); + +static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject *); + +static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject *); + +static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject *); + +static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject *); + +static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject *); + +static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject *); + +static int __Pyx_check_binary_version(void); + +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif +#endif + +static PyObject *__Pyx_ImportModule(const char *name); /*proto*/ + +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); /*proto*/ + +typedef struct { + int code_line; + PyCodeObject* code_object; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); /*proto*/ + +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/ + + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'libc.stdlib' */ + +/* Module declarations from 'numpy' */ + +/* Module declarations from 'numpy' */ +static PyTypeObject *__pyx_ptype_5numpy_dtype = 0; +static PyTypeObject *__pyx_ptype_5numpy_flatiter = 0; +static PyTypeObject *__pyx_ptype_5numpy_broadcast = 0; +static PyTypeObject *__pyx_ptype_5numpy_ndarray = 0; +static PyTypeObject *__pyx_ptype_5numpy_ufunc = 0; +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/ + +/* Module declarations from 'cython' */ + +/* Module declarations from 'astropy.convolution.boundary_wrap' */ +static __Pyx_TypeInfo __Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t = { "DTYPE_t", NULL, sizeof(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t), { 0 }, 0, 'R', 0, 0 }; +#define __Pyx_MODULE_NAME "astropy.convolution.boundary_wrap" +int __pyx_module_is_main_astropy__convolution__boundary_wrap = 0; + +/* Implementation of 'astropy.convolution.boundary_wrap' */ +static PyObject *__pyx_builtin_ValueError; +static PyObject *__pyx_builtin_range; +static PyObject *__pyx_builtin_RuntimeError; +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_wrap_convolve1d_boundary_wrap(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g); /* proto */ +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_wrap_2convolve2d_boundary_wrap(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g); /* proto */ +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_wrap_4convolve3d_boundary_wrap(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g); /* proto */ +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */ +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */ +static char __pyx_k_1[] = "Convolution kernel must have odd dimensions"; +static char __pyx_k_5[] = "ndarray is not C contiguous"; +static char __pyx_k_7[] = "ndarray is not Fortran contiguous"; +static char __pyx_k_9[] = "Non-native byte order not supported"; +static char __pyx_k_11[] = "unknown dtype code in numpy.pxd (%d)"; +static char __pyx_k_12[] = "Format string allocated too short, see comment in numpy.pxd"; +static char __pyx_k_15[] = "Format string allocated too short."; +static char __pyx_k_19[] = "convolve1d_boundary_wrap"; +static char __pyx_k_20[] = "/internal/1/root/src/astropy/astropy/astropy/convolution/boundary_wrap.pyx"; +static char __pyx_k_21[] = "astropy.convolution.boundary_wrap"; +static char __pyx_k_24[] = "convolve2d_boundary_wrap"; +static char __pyx_k_27[] = "convolve3d_boundary_wrap"; +static char __pyx_k__B[] = "B"; +static char __pyx_k__H[] = "H"; +static char __pyx_k__I[] = "I"; +static char __pyx_k__L[] = "L"; +static char __pyx_k__O[] = "O"; +static char __pyx_k__Q[] = "Q"; +static char __pyx_k__b[] = "b"; +static char __pyx_k__d[] = "d"; +static char __pyx_k__f[] = "f"; +static char __pyx_k__g[] = "g"; +static char __pyx_k__h[] = "h"; +static char __pyx_k__i[] = "i"; +static char __pyx_k__j[] = "j"; +static char __pyx_k__k[] = "k"; +static char __pyx_k__l[] = "l"; +static char __pyx_k__q[] = "q"; +static char __pyx_k__Zd[] = "Zd"; +static char __pyx_k__Zf[] = "Zf"; +static char __pyx_k__Zg[] = "Zg"; +static char __pyx_k__ii[] = "ii"; +static char __pyx_k__jj[] = "jj"; +static char __pyx_k__kk[] = "kk"; +static char __pyx_k__np[] = "np"; +static char __pyx_k__nx[] = "nx"; +static char __pyx_k__ny[] = "ny"; +static char __pyx_k__nz[] = "nz"; +static char __pyx_k__bot[] = "bot"; +static char __pyx_k__iii[] = "iii"; +static char __pyx_k__jjj[] = "jjj"; +static char __pyx_k__ker[] = "ker"; +static char __pyx_k__kkk[] = "kkk"; +static char __pyx_k__nkx[] = "nkx"; +static char __pyx_k__nky[] = "nky"; +static char __pyx_k__nkz[] = "nkz"; +static char __pyx_k__top[] = "top"; +static char __pyx_k__val[] = "val"; +static char __pyx_k__wkx[] = "wkx"; +static char __pyx_k__wky[] = "wky"; +static char __pyx_k__wkz[] = "wkz"; +static char __pyx_k__conv[] = "conv"; +static char __pyx_k__DTYPE[] = "DTYPE"; +static char __pyx_k__dtype[] = "dtype"; +static char __pyx_k__empty[] = "empty"; +static char __pyx_k__fixed[] = "fixed"; +static char __pyx_k__float[] = "float"; +static char __pyx_k__iimax[] = "iimax"; +static char __pyx_k__iimin[] = "iimin"; +static char __pyx_k__jjmax[] = "jjmax"; +static char __pyx_k__jjmin[] = "jjmin"; +static char __pyx_k__kkmax[] = "kkmax"; +static char __pyx_k__kkmin[] = "kkmin"; +static char __pyx_k__numpy[] = "numpy"; +static char __pyx_k__range[] = "range"; +static char __pyx_k____main__[] = "__main__"; +static char __pyx_k____test__[] = "__test__"; +static char __pyx_k__ValueError[] = "ValueError"; +static char __pyx_k__RuntimeError[] = "RuntimeError"; +static PyObject *__pyx_kp_s_1; +static PyObject *__pyx_kp_u_11; +static PyObject *__pyx_kp_u_12; +static PyObject *__pyx_kp_u_15; +static PyObject *__pyx_n_s_19; +static PyObject *__pyx_kp_s_20; +static PyObject *__pyx_n_s_21; +static PyObject *__pyx_n_s_24; +static PyObject *__pyx_n_s_27; +static PyObject *__pyx_kp_u_5; +static PyObject *__pyx_kp_u_7; +static PyObject *__pyx_kp_u_9; +static PyObject *__pyx_n_s__DTYPE; +static PyObject *__pyx_n_s__RuntimeError; +static PyObject *__pyx_n_s__ValueError; +static PyObject *__pyx_n_s____main__; +static PyObject *__pyx_n_s____test__; +static PyObject *__pyx_n_s__bot; +static PyObject *__pyx_n_s__conv; +static PyObject *__pyx_n_s__dtype; +static PyObject *__pyx_n_s__empty; +static PyObject *__pyx_n_s__f; +static PyObject *__pyx_n_s__fixed; +static PyObject *__pyx_n_s__float; +static PyObject *__pyx_n_s__g; +static PyObject *__pyx_n_s__i; +static PyObject *__pyx_n_s__ii; +static PyObject *__pyx_n_s__iii; +static PyObject *__pyx_n_s__iimax; +static PyObject *__pyx_n_s__iimin; +static PyObject *__pyx_n_s__j; +static PyObject *__pyx_n_s__jj; +static PyObject *__pyx_n_s__jjj; +static PyObject *__pyx_n_s__jjmax; +static PyObject *__pyx_n_s__jjmin; +static PyObject *__pyx_n_s__k; +static PyObject *__pyx_n_s__ker; +static PyObject *__pyx_n_s__kk; +static PyObject *__pyx_n_s__kkk; +static PyObject *__pyx_n_s__kkmax; +static PyObject *__pyx_n_s__kkmin; +static PyObject *__pyx_n_s__nkx; +static PyObject *__pyx_n_s__nky; +static PyObject *__pyx_n_s__nkz; +static PyObject *__pyx_n_s__np; +static PyObject *__pyx_n_s__numpy; +static PyObject *__pyx_n_s__nx; +static PyObject *__pyx_n_s__ny; +static PyObject *__pyx_n_s__nz; +static PyObject *__pyx_n_s__range; +static PyObject *__pyx_n_s__top; +static PyObject *__pyx_n_s__val; +static PyObject *__pyx_n_s__wkx; +static PyObject *__pyx_n_s__wky; +static PyObject *__pyx_n_s__wkz; +static PyObject *__pyx_int_15; +static PyObject *__pyx_k_tuple_2; +static PyObject *__pyx_k_tuple_3; +static PyObject *__pyx_k_tuple_4; +static PyObject *__pyx_k_tuple_6; +static PyObject *__pyx_k_tuple_8; +static PyObject *__pyx_k_tuple_10; +static PyObject *__pyx_k_tuple_13; +static PyObject *__pyx_k_tuple_14; +static PyObject *__pyx_k_tuple_16; +static PyObject *__pyx_k_tuple_17; +static PyObject *__pyx_k_tuple_22; +static PyObject *__pyx_k_tuple_25; +static PyObject *__pyx_k_codeobj_18; +static PyObject *__pyx_k_codeobj_23; +static PyObject *__pyx_k_codeobj_26; + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_wrap_1convolve1d_boundary_wrap(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_13boundary_wrap_1convolve1d_boundary_wrap = {__Pyx_NAMESTR("convolve1d_boundary_wrap"), (PyCFunction)__pyx_pw_7astropy_11convolution_13boundary_wrap_1convolve1d_boundary_wrap, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_wrap_1convolve1d_boundary_wrap(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve1d_boundary_wrap (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve1d_boundary_wrap", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve1d_boundary_wrap") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve1d_boundary_wrap", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_wrap.convolve1d_boundary_wrap", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 17; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_13boundary_wrap_convolve1d_boundary_wrap(__pyx_self, __pyx_v_f, __pyx_v_g); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_wrap.pyx":16 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_wrap(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g): + * + */ + +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_wrap_convolve1d_boundary_wrap(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g) { + int __pyx_v_nx; + int __pyx_v_nkx; + int __pyx_v_wkx; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + unsigned int __pyx_v_iii; + int __pyx_v_ii; + int __pyx_v_iimin; + int __pyx_v_iimax; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyArrayObject *__pyx_t_8 = NULL; + PyArrayObject *__pyx_t_9 = NULL; + int __pyx_t_10; + unsigned int __pyx_t_11; + unsigned int __pyx_t_12; + int __pyx_t_13; + int __pyx_t_14; + unsigned int __pyx_t_15; + unsigned int __pyx_t_16; + unsigned int __pyx_t_17; + unsigned int __pyx_t_18; + unsigned int __pyx_t_19; + unsigned int __pyx_t_20; + unsigned int __pyx_t_21; + unsigned int __pyx_t_22; + unsigned int __pyx_t_23; + unsigned int __pyx_t_24; + unsigned int __pyx_t_25; + unsigned int __pyx_t_26; + unsigned int __pyx_t_27; + unsigned int __pyx_t_28; + unsigned int __pyx_t_29; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve1d_boundary_wrap", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; + + /* "astropy/convolution/boundary_wrap.pyx":19 + * np.ndarray[DTYPE_t, ndim=1] g): + * + * if g.shape[0] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (__pyx_t_1) { + + /* "astropy/convolution/boundary_wrap.pyx":20 + * + * if g.shape[0] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_2 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_2), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __Pyx_Raise(__pyx_t_2, 0, 0, 0); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_wrap.pyx":22 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_2 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_2, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_1) { + __pyx_t_4 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = PyObject_RichCompare(__pyx_t_4, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_5 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_6 = __pyx_t_5; + } else { + __pyx_t_6 = __pyx_t_1; + } + if (unlikely(!__pyx_t_6)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_wrap.pyx":24 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_wrap.pyx":25 + * + * cdef int nx = f.shape[0] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_wrap.pyx":26 + * cdef int nx = f.shape[0] + * cdef int nkx = g.shape[0] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_wrap.pyx":27 + * cdef int nkx = g.shape[0] + * cdef int wkx = nkx // 2 + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) + * cdef unsigned int i, iii + */ + __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__empty); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = PyList_New(1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_4, 0, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_2); + __pyx_t_2 = 0; + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_t_4)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_4)); + __pyx_t_4 = 0; + __pyx_t_4 = PyDict_New(); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_4)); + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_t_4, ((PyObject *)__pyx_n_s__dtype), __pyx_t_7) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), ((PyObject *)__pyx_t_4)); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; + if (!(likely(((__pyx_t_7) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_7, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_8 = ((PyArrayObject *)__pyx_t_7); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_8, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 1, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; + } + } + __pyx_t_8 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "astropy/convolution/boundary_wrap.pyx":28 + * cdef int wkx = nkx // 2 + * cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef unsigned int i, iii + * cdef int ii + */ + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = PyObject_GetAttr(__pyx_t_7, __pyx_n_s__empty); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + __pyx_t_7 = 0; + __pyx_t_7 = PyTuple_New(1); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_7, 0, ((PyObject *)__pyx_t_2)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_2)); + __pyx_t_2 = 0; + __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_2)); + __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_2, ((PyObject *)__pyx_n_s__dtype), __pyx_t_3) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_7), ((PyObject *)__pyx_t_2)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_7)); __pyx_t_7 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_9 = ((PyArrayObject *)__pyx_t_3); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_9, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 1, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; + } + } + __pyx_t_9 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_3); + __pyx_t_3 = 0; + + /* "astropy/convolution/boundary_wrap.pyx":38 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * if npy_isnan(f[i]): + * top = 0. + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_wrap.pyx":39 + * # neighboring values + * for i in range(nx): + * if npy_isnan(f[i]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_12 = __pyx_v_i; + __pyx_t_6 = npy_isnan((*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_12, __pyx_pybuffernd_f.diminfo[0].strides))); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_wrap.pyx":40 + * for i in range(nx): + * if npy_isnan(f[i]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":41 + * if npy_isnan(f[i]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":42 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_wrap.pyx":43 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * iii = ii % nx + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":44 + * iimin = i - wkx + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * iii = ii % nx + * val = f[iii] + */ + __pyx_t_13 = __pyx_v_iimax; + for (__pyx_t_14 = __pyx_v_iimin; __pyx_t_14 < __pyx_t_13; __pyx_t_14+=1) { + __pyx_v_ii = __pyx_t_14; + + /* "astropy/convolution/boundary_wrap.pyx":45 + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + * iii = ii % nx # <<<<<<<<<<<<<< + * val = f[iii] + * if not npy_isnan(val): + */ + if (unlikely(__pyx_v_nx == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_iii = __Pyx_mod_int(__pyx_v_ii, __pyx_v_nx); + + /* "astropy/convolution/boundary_wrap.pyx":46 + * for ii in range(iimin, iimax): + * iii = ii % nx + * val = f[iii] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] + */ + __pyx_t_15 = __pyx_v_iii; + __pyx_v_val = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_15, __pyx_pybuffernd_f.diminfo[0].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":47 + * iii = ii % nx + * val = f[iii] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i)] + * top += val * ker + */ + __pyx_t_6 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_wrap.pyx":48 + * val = f[iii] + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_16 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_v_ker = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_16, __pyx_pybuffernd_g.diminfo[0].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":49 + * if not npy_isnan(val): + * ker = g[(wkx + ii - i)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_wrap.pyx":50 + * ker = g[(wkx + ii - i)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * + * if bot != 0.: + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L9; + } + __pyx_L9:; + } + + /* "astropy/convolution/boundary_wrap.pyx":52 + * bot += ker + * + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i] = top / bot + * else: + */ + __pyx_t_6 = (__pyx_v_bot != 0.); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_wrap.pyx":53 + * + * if bot != 0.: + * fixed[i] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i] = f[i] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 53; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_17 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_17, __pyx_pybuffernd_fixed.diminfo[0].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L10; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":55 + * fixed[i] = top / bot + * else: + * fixed[i] = f[i] # <<<<<<<<<<<<<< + * else: + * fixed[i] = f[i] + */ + __pyx_t_18 = __pyx_v_i; + __pyx_t_19 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_19, __pyx_pybuffernd_fixed.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_18, __pyx_pybuffernd_f.diminfo[0].strides)); + } + __pyx_L10:; + goto __pyx_L6; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":57 + * fixed[i] = f[i] + * else: + * fixed[i] = f[i] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_20 = __pyx_v_i; + __pyx_t_21 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_21, __pyx_pybuffernd_fixed.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_20, __pyx_pybuffernd_f.diminfo[0].strides)); + } + __pyx_L6:; + } + + /* "astropy/convolution/boundary_wrap.pyx":60 + * + * # Now run the proper convolution + * for i in range(nx): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i]): + * top = 0. + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_wrap.pyx":61 + * # Now run the proper convolution + * for i in range(nx): + * if not npy_isnan(fixed[i]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_22 = __pyx_v_i; + __pyx_t_6 = (!npy_isnan((*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_22, __pyx_pybuffernd_fixed.diminfo[0].strides)))); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_wrap.pyx":62 + * for i in range(nx): + * if not npy_isnan(fixed[i]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":63 + * if not npy_isnan(fixed[i]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":64 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_wrap.pyx":65 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * iii = ii % nx + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":66 + * iimin = i - wkx + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * iii = ii % nx + * val = fixed[iii] + */ + __pyx_t_13 = __pyx_v_iimax; + for (__pyx_t_14 = __pyx_v_iimin; __pyx_t_14 < __pyx_t_13; __pyx_t_14+=1) { + __pyx_v_ii = __pyx_t_14; + + /* "astropy/convolution/boundary_wrap.pyx":67 + * iimax = i + wkx + 1 + * for ii in range(iimin, iimax): + * iii = ii % nx # <<<<<<<<<<<<<< + * val = fixed[iii] + * ker = g[(wkx + ii - i)] + */ + if (unlikely(__pyx_v_nx == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 67; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_iii = __Pyx_mod_int(__pyx_v_ii, __pyx_v_nx); + + /* "astropy/convolution/boundary_wrap.pyx":68 + * for ii in range(iimin, iimax): + * iii = ii % nx + * val = fixed[iii] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): + */ + __pyx_t_23 = __pyx_v_iii; + __pyx_v_val = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_23, __pyx_pybuffernd_fixed.diminfo[0].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":69 + * iii = ii % nx + * val = fixed[iii] + * ker = g[(wkx + ii - i)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_24 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_v_ker = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_24, __pyx_pybuffernd_g.diminfo[0].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":70 + * val = fixed[iii] + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_6 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_wrap.pyx":71 + * ker = g[(wkx + ii - i)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_wrap.pyx":72 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L16; + } + __pyx_L16:; + } + + /* "astropy/convolution/boundary_wrap.pyx":73 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i] = top / bot + * else: + */ + __pyx_t_6 = (__pyx_v_bot != 0.0); + if (__pyx_t_6) { + + /* "astropy/convolution/boundary_wrap.pyx":74 + * bot += ker + * if bot != 0: + * conv[i] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i] = fixed[i] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 74; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_25 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_25, __pyx_pybuffernd_conv.diminfo[0].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L17; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":76 + * conv[i] = top / bot + * else: + * conv[i] = fixed[i] # <<<<<<<<<<<<<< + * else: + * conv[i] = fixed[i] + */ + __pyx_t_26 = __pyx_v_i; + __pyx_t_27 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_27, __pyx_pybuffernd_conv.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_26, __pyx_pybuffernd_fixed.diminfo[0].strides)); + } + __pyx_L17:; + goto __pyx_L13; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":78 + * conv[i] = fixed[i] + * else: + * conv[i] = fixed[i] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_28 = __pyx_v_i; + __pyx_t_29 = __pyx_v_i; + *__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_29, __pyx_pybuffernd_conv.diminfo[0].strides) = (*__Pyx_BufPtrStrided1d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_28, __pyx_pybuffernd_fixed.diminfo[0].strides)); + } + __pyx_L13:; + } + + /* "astropy/convolution/boundary_wrap.pyx":80 + * conv[i] = fixed[i] + * + * return conv # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_7); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_wrap.convolve1d_boundary_wrap", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_wrap_3convolve2d_boundary_wrap(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_13boundary_wrap_3convolve2d_boundary_wrap = {__Pyx_NAMESTR("convolve2d_boundary_wrap"), (PyCFunction)__pyx_pw_7astropy_11convolution_13boundary_wrap_3convolve2d_boundary_wrap, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_wrap_3convolve2d_boundary_wrap(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve2d_boundary_wrap (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve2d_boundary_wrap", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve2d_boundary_wrap") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve2d_boundary_wrap", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_wrap.convolve2d_boundary_wrap", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 85; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_13boundary_wrap_2convolve2d_boundary_wrap(__pyx_self, __pyx_v_f, __pyx_v_g); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_wrap.pyx":84 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_wrap(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g): + * + */ + +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_wrap_2convolve2d_boundary_wrap(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g) { + int __pyx_v_nx; + int __pyx_v_ny; + int __pyx_v_nkx; + int __pyx_v_nky; + int __pyx_v_wkx; + int __pyx_v_wky; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + unsigned int __pyx_v_j; + unsigned int __pyx_v_iii; + unsigned int __pyx_v_jjj; + int __pyx_v_ii; + int __pyx_v_jj; + int __pyx_v_iimin; + int __pyx_v_iimax; + int __pyx_v_jjmin; + int __pyx_v_jjmax; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyArrayObject *__pyx_t_8 = NULL; + PyArrayObject *__pyx_t_9 = NULL; + int __pyx_t_10; + unsigned int __pyx_t_11; + int __pyx_t_12; + unsigned int __pyx_t_13; + unsigned int __pyx_t_14; + unsigned int __pyx_t_15; + int __pyx_t_16; + int __pyx_t_17; + int __pyx_t_18; + int __pyx_t_19; + unsigned int __pyx_t_20; + unsigned int __pyx_t_21; + unsigned int __pyx_t_22; + unsigned int __pyx_t_23; + unsigned int __pyx_t_24; + unsigned int __pyx_t_25; + unsigned int __pyx_t_26; + unsigned int __pyx_t_27; + unsigned int __pyx_t_28; + unsigned int __pyx_t_29; + unsigned int __pyx_t_30; + unsigned int __pyx_t_31; + unsigned int __pyx_t_32; + unsigned int __pyx_t_33; + unsigned int __pyx_t_34; + unsigned int __pyx_t_35; + unsigned int __pyx_t_36; + unsigned int __pyx_t_37; + unsigned int __pyx_t_38; + unsigned int __pyx_t_39; + unsigned int __pyx_t_40; + unsigned int __pyx_t_41; + unsigned int __pyx_t_42; + unsigned int __pyx_t_43; + unsigned int __pyx_t_44; + unsigned int __pyx_t_45; + unsigned int __pyx_t_46; + unsigned int __pyx_t_47; + unsigned int __pyx_t_48; + unsigned int __pyx_t_49; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve2d_boundary_wrap", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 2, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_f.diminfo[1].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_f.diminfo[1].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[1]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 2, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_g.diminfo[1].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_g.diminfo[1].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[1]; + + /* "astropy/convolution/boundary_wrap.pyx":87 + * np.ndarray[DTYPE_t, ndim=2] g): + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (!__pyx_t_1) { + __pyx_t_2 = (__Pyx_mod_long((__pyx_v_g->dimensions[1]), 2) != 1); + __pyx_t_3 = __pyx_t_2; + } else { + __pyx_t_3 = __pyx_t_1; + } + if (__pyx_t_3) { + + /* "astropy/convolution/boundary_wrap.pyx":88 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_3), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_wrap.pyx":90 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_4 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 90; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 90; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyObject_RichCompare(__pyx_t_4, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 90; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 90; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__pyx_t_3) { + __pyx_t_6 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 90; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 90; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_6, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 90; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 90; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_2 = __pyx_t_1; + } else { + __pyx_t_2 = __pyx_t_3; + } + if (unlikely(!__pyx_t_2)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 90; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_wrap.pyx":92 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_wrap.pyx":93 + * + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + */ + __pyx_v_ny = (__pyx_v_f->dimensions[1]); + + /* "astropy/convolution/boundary_wrap.pyx":94 + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_wrap.pyx":95 + * cdef int ny = f.shape[1] + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + */ + __pyx_v_nky = (__pyx_v_g->dimensions[1]); + + /* "astropy/convolution/boundary_wrap.pyx":96 + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * cdef int wky = nky // 2 + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_wrap.pyx":97 + * cdef int nky = g.shape[1] + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) + */ + __pyx_v_wky = __Pyx_div_long(__pyx_v_nky, 2); + + /* "astropy/convolution/boundary_wrap.pyx":98 + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) + * cdef unsigned int i, j, iii, jjj + */ + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = PyObject_GetAttr(__pyx_t_4, __pyx_n_s__empty); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PyList_New(2); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_7, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_7, 1, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_4 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_7)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_7)); + __pyx_t_7 = 0; + __pyx_t_7 = PyDict_New(); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_7)); + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + if (PyDict_SetItem(__pyx_t_7, ((PyObject *)__pyx_n_s__dtype), __pyx_t_4) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyObject_Call(__pyx_t_5, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_7)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_7)); __pyx_t_7 = 0; + if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_8 = ((PyArrayObject *)__pyx_t_4); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_8, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 2, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_fixed.diminfo[1].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_fixed.diminfo[1].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[1]; + } + } + __pyx_t_8 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "astropy/convolution/boundary_wrap.pyx":99 + * cdef int wky = nky // 2 + * cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef unsigned int i, j, iii, jjj + * cdef int ii, jj + */ + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = PyObject_GetAttr(__pyx_t_4, __pyx_n_s__empty); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyList_New(2); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_4); + PyList_SET_ITEM(__pyx_t_5, 1, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_4 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyDict_New(); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + if (PyDict_SetItem(__pyx_t_5, ((PyObject *)__pyx_n_s__dtype), __pyx_t_4) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyObject_Call(__pyx_t_7, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_5)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0; + if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_9 = ((PyArrayObject *)__pyx_t_4); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_9, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 2, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_conv.diminfo[1].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_conv.diminfo[1].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[1]; + } + } + __pyx_t_9 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "astropy/convolution/boundary_wrap.pyx":109 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * if npy_isnan(f[i, j]): + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_wrap.pyx":110 + * # neighboring values + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * if npy_isnan(f[i, j]): + * top = 0. + */ + __pyx_t_12 = __pyx_v_ny; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_j = __pyx_t_13; + + /* "astropy/convolution/boundary_wrap.pyx":111 + * for i in range(nx): + * for j in range(ny): + * if npy_isnan(f[i, j]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_14 = __pyx_v_i; + __pyx_t_15 = __pyx_v_j; + __pyx_t_2 = npy_isnan((*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_14, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_15, __pyx_pybuffernd_f.diminfo[1].strides))); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_wrap.pyx":112 + * for j in range(ny): + * if npy_isnan(f[i, j]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":113 + * if npy_isnan(f[i, j]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":114 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_wrap.pyx":115 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":116 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_wrap.pyx":117 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":118 + * jjmin = j - wky + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * iii = ii % nx + */ + __pyx_t_16 = __pyx_v_iimax; + for (__pyx_t_17 = __pyx_v_iimin; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_ii = __pyx_t_17; + + /* "astropy/convolution/boundary_wrap.pyx":119 + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * iii = ii % nx + * jjj = jj % ny + */ + __pyx_t_18 = __pyx_v_jjmax; + for (__pyx_t_19 = __pyx_v_jjmin; __pyx_t_19 < __pyx_t_18; __pyx_t_19+=1) { + __pyx_v_jj = __pyx_t_19; + + /* "astropy/convolution/boundary_wrap.pyx":120 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * iii = ii % nx # <<<<<<<<<<<<<< + * jjj = jj % ny + * val = f[iii, jjj] + */ + if (unlikely(__pyx_v_nx == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 120; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_iii = __Pyx_mod_int(__pyx_v_ii, __pyx_v_nx); + + /* "astropy/convolution/boundary_wrap.pyx":121 + * for jj in range(jjmin, jjmax): + * iii = ii % nx + * jjj = jj % ny # <<<<<<<<<<<<<< + * val = f[iii, jjj] + * if not npy_isnan(val): + */ + if (unlikely(__pyx_v_ny == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 121; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_jjj = __Pyx_mod_int(__pyx_v_jj, __pyx_v_ny); + + /* "astropy/convolution/boundary_wrap.pyx":122 + * iii = ii % nx + * jjj = jj % ny + * val = f[iii, jjj] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + */ + __pyx_t_20 = __pyx_v_iii; + __pyx_t_21 = __pyx_v_jjj; + __pyx_v_val = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_20, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_21, __pyx_pybuffernd_f.diminfo[1].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":123 + * jjj = jj % ny + * val = f[iii, jjj] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + */ + __pyx_t_2 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_wrap.pyx":125 + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + * (wky + jj - j)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_22 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_23 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_v_ker = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_22, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_23, __pyx_pybuffernd_g.diminfo[1].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":126 + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_wrap.pyx":127 + * (wky + jj - j)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * + * if bot != 0.: + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L13; + } + __pyx_L13:; + } + } + + /* "astropy/convolution/boundary_wrap.pyx":129 + * bot += ker + * + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i, j] = top / bot + * else: + */ + __pyx_t_2 = (__pyx_v_bot != 0.); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_wrap.pyx":130 + * + * if bot != 0.: + * fixed[i, j] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i, j] = f[i, j] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 130; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_24 = __pyx_v_i; + __pyx_t_25 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_24, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_25, __pyx_pybuffernd_fixed.diminfo[1].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L14; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":132 + * fixed[i, j] = top / bot + * else: + * fixed[i, j] = f[i, j] # <<<<<<<<<<<<<< + * else: + * fixed[i, j] = f[i, j] + */ + __pyx_t_26 = __pyx_v_i; + __pyx_t_27 = __pyx_v_j; + __pyx_t_28 = __pyx_v_i; + __pyx_t_29 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_28, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_29, __pyx_pybuffernd_fixed.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_26, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_27, __pyx_pybuffernd_f.diminfo[1].strides)); + } + __pyx_L14:; + goto __pyx_L8; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":134 + * fixed[i, j] = f[i, j] + * else: + * fixed[i, j] = f[i, j] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_30 = __pyx_v_i; + __pyx_t_31 = __pyx_v_j; + __pyx_t_32 = __pyx_v_i; + __pyx_t_33 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_32, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_33, __pyx_pybuffernd_fixed.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_30, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_31, __pyx_pybuffernd_f.diminfo[1].strides)); + } + __pyx_L8:; + } + } + + /* "astropy/convolution/boundary_wrap.pyx":137 + * + * # Now run the proper convolution + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * if not npy_isnan(fixed[i, j]): + */ + __pyx_t_10 = __pyx_v_nx; + for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { + __pyx_v_i = __pyx_t_11; + + /* "astropy/convolution/boundary_wrap.pyx":138 + * # Now run the proper convolution + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i, j]): + * top = 0. + */ + __pyx_t_12 = __pyx_v_ny; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_j = __pyx_t_13; + + /* "astropy/convolution/boundary_wrap.pyx":139 + * for i in range(nx): + * for j in range(ny): + * if not npy_isnan(fixed[i, j]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_34 = __pyx_v_i; + __pyx_t_35 = __pyx_v_j; + __pyx_t_2 = (!npy_isnan((*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_34, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_35, __pyx_pybuffernd_fixed.diminfo[1].strides)))); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_wrap.pyx":140 + * for j in range(ny): + * if not npy_isnan(fixed[i, j]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":141 + * if not npy_isnan(fixed[i, j]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":142 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_wrap.pyx":143 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":144 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_wrap.pyx":145 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":146 + * jjmin = j - wky + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * iii = ii % nx + */ + __pyx_t_16 = __pyx_v_iimax; + for (__pyx_t_17 = __pyx_v_iimin; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_ii = __pyx_t_17; + + /* "astropy/convolution/boundary_wrap.pyx":147 + * jjmax = j + wky + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * iii = ii % nx + * jjj = jj % ny + */ + __pyx_t_18 = __pyx_v_jjmax; + for (__pyx_t_19 = __pyx_v_jjmin; __pyx_t_19 < __pyx_t_18; __pyx_t_19+=1) { + __pyx_v_jj = __pyx_t_19; + + /* "astropy/convolution/boundary_wrap.pyx":148 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * iii = ii % nx # <<<<<<<<<<<<<< + * jjj = jj % ny + * val = fixed[iii, jjj] + */ + if (unlikely(__pyx_v_nx == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 148; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_iii = __Pyx_mod_int(__pyx_v_ii, __pyx_v_nx); + + /* "astropy/convolution/boundary_wrap.pyx":149 + * for jj in range(jjmin, jjmax): + * iii = ii % nx + * jjj = jj % ny # <<<<<<<<<<<<<< + * val = fixed[iii, jjj] + * ker = g[(wkx + ii - i), + */ + if (unlikely(__pyx_v_ny == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 149; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_jjj = __Pyx_mod_int(__pyx_v_jj, __pyx_v_ny); + + /* "astropy/convolution/boundary_wrap.pyx":150 + * iii = ii % nx + * jjj = jj % ny + * val = fixed[iii, jjj] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + */ + __pyx_t_36 = __pyx_v_iii; + __pyx_t_37 = __pyx_v_jjj; + __pyx_v_val = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_36, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_37, __pyx_pybuffernd_fixed.diminfo[1].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":152 + * val = fixed[iii, jjj] + * ker = g[(wkx + ii - i), + * (wky + jj - j)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_38 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_39 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_v_ker = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_38, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_39, __pyx_pybuffernd_g.diminfo[1].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":153 + * ker = g[(wkx + ii - i), + * (wky + jj - j)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_2 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_wrap.pyx":154 + * (wky + jj - j)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_wrap.pyx":155 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i, j] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L24; + } + __pyx_L24:; + } + } + + /* "astropy/convolution/boundary_wrap.pyx":156 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i, j] = top / bot + * else: + */ + __pyx_t_2 = (__pyx_v_bot != 0.0); + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_wrap.pyx":157 + * bot += ker + * if bot != 0: + * conv[i, j] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i, j] = fixed[i, j] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 157; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_40 = __pyx_v_i; + __pyx_t_41 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_40, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_41, __pyx_pybuffernd_conv.diminfo[1].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L25; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":159 + * conv[i, j] = top / bot + * else: + * conv[i, j] = fixed[i, j] # <<<<<<<<<<<<<< + * else: + * conv[i, j] = fixed[i, j] + */ + __pyx_t_42 = __pyx_v_i; + __pyx_t_43 = __pyx_v_j; + __pyx_t_44 = __pyx_v_i; + __pyx_t_45 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_44, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_45, __pyx_pybuffernd_conv.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_42, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_43, __pyx_pybuffernd_fixed.diminfo[1].strides)); + } + __pyx_L25:; + goto __pyx_L19; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":161 + * conv[i, j] = fixed[i, j] + * else: + * conv[i, j] = fixed[i, j] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_46 = __pyx_v_i; + __pyx_t_47 = __pyx_v_j; + __pyx_t_48 = __pyx_v_i; + __pyx_t_49 = __pyx_v_j; + *__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_48, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_49, __pyx_pybuffernd_conv.diminfo[1].strides) = (*__Pyx_BufPtrStrided2d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_46, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_47, __pyx_pybuffernd_fixed.diminfo[1].strides)); + } + __pyx_L19:; + } + } + + /* "astropy/convolution/boundary_wrap.pyx":163 + * conv[i, j] = fixed[i, j] + * + * return conv # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_wrap.convolve2d_boundary_wrap", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_wrap_5convolve3d_boundary_wrap(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_7astropy_11convolution_13boundary_wrap_5convolve3d_boundary_wrap = {__Pyx_NAMESTR("convolve3d_boundary_wrap"), (PyCFunction)__pyx_pw_7astropy_11convolution_13boundary_wrap_5convolve3d_boundary_wrap, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}; +static PyObject *__pyx_pw_7astropy_11convolution_13boundary_wrap_5convolve3d_boundary_wrap(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_f = 0; + PyArrayObject *__pyx_v_g = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("convolve3d_boundary_wrap (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__f,&__pyx_n_s__g,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__f)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__g)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("convolve3d_boundary_wrap", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 167; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "convolve3d_boundary_wrap") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 167; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_f = ((PyArrayObject *)values[0]); + __pyx_v_g = ((PyArrayObject *)values[1]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("convolve3d_boundary_wrap", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 167; __pyx_clineno = __LINE__; goto __pyx_L3_error;} + __pyx_L3_error:; + __Pyx_AddTraceback("astropy.convolution.boundary_wrap.convolve3d_boundary_wrap", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_f), __pyx_ptype_5numpy_ndarray, 1, "f", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 167; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_g), __pyx_ptype_5numpy_ndarray, 1, "g", 0))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 168; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_r = __pyx_pf_7astropy_11convolution_13boundary_wrap_4convolve3d_boundary_wrap(__pyx_self, __pyx_v_f, __pyx_v_g); + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "astropy/convolution/boundary_wrap.pyx":167 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_wrap(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g): + * + */ + +static PyObject *__pyx_pf_7astropy_11convolution_13boundary_wrap_4convolve3d_boundary_wrap(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_f, PyArrayObject *__pyx_v_g) { + int __pyx_v_nx; + int __pyx_v_ny; + int __pyx_v_nz; + int __pyx_v_nkx; + int __pyx_v_nky; + int __pyx_v_nkz; + int __pyx_v_wkx; + int __pyx_v_wky; + int __pyx_v_wkz; + PyArrayObject *__pyx_v_fixed = 0; + PyArrayObject *__pyx_v_conv = 0; + unsigned int __pyx_v_i; + unsigned int __pyx_v_j; + unsigned int __pyx_v_k; + unsigned int __pyx_v_iii; + unsigned int __pyx_v_jjj; + unsigned int __pyx_v_kkk; + int __pyx_v_ii; + int __pyx_v_jj; + int __pyx_v_kk; + int __pyx_v_iimin; + int __pyx_v_iimax; + int __pyx_v_jjmin; + int __pyx_v_jjmax; + int __pyx_v_kkmin; + int __pyx_v_kkmax; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_top; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_bot; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_ker; + __pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t __pyx_v_val; + __Pyx_LocalBuf_ND __pyx_pybuffernd_conv; + __Pyx_Buffer __pyx_pybuffer_conv; + __Pyx_LocalBuf_ND __pyx_pybuffernd_f; + __Pyx_Buffer __pyx_pybuffer_f; + __Pyx_LocalBuf_ND __pyx_pybuffernd_fixed; + __Pyx_Buffer __pyx_pybuffer_fixed; + __Pyx_LocalBuf_ND __pyx_pybuffernd_g; + __Pyx_Buffer __pyx_pybuffer_g; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyArrayObject *__pyx_t_10 = NULL; + PyArrayObject *__pyx_t_11 = NULL; + int __pyx_t_12; + unsigned int __pyx_t_13; + int __pyx_t_14; + unsigned int __pyx_t_15; + int __pyx_t_16; + unsigned int __pyx_t_17; + unsigned int __pyx_t_18; + unsigned int __pyx_t_19; + unsigned int __pyx_t_20; + int __pyx_t_21; + int __pyx_t_22; + int __pyx_t_23; + int __pyx_t_24; + int __pyx_t_25; + int __pyx_t_26; + unsigned int __pyx_t_27; + unsigned int __pyx_t_28; + unsigned int __pyx_t_29; + unsigned int __pyx_t_30; + unsigned int __pyx_t_31; + unsigned int __pyx_t_32; + unsigned int __pyx_t_33; + unsigned int __pyx_t_34; + unsigned int __pyx_t_35; + unsigned int __pyx_t_36; + unsigned int __pyx_t_37; + unsigned int __pyx_t_38; + unsigned int __pyx_t_39; + unsigned int __pyx_t_40; + unsigned int __pyx_t_41; + unsigned int __pyx_t_42; + unsigned int __pyx_t_43; + unsigned int __pyx_t_44; + unsigned int __pyx_t_45; + unsigned int __pyx_t_46; + unsigned int __pyx_t_47; + unsigned int __pyx_t_48; + unsigned int __pyx_t_49; + unsigned int __pyx_t_50; + unsigned int __pyx_t_51; + unsigned int __pyx_t_52; + unsigned int __pyx_t_53; + unsigned int __pyx_t_54; + unsigned int __pyx_t_55; + unsigned int __pyx_t_56; + unsigned int __pyx_t_57; + unsigned int __pyx_t_58; + unsigned int __pyx_t_59; + unsigned int __pyx_t_60; + unsigned int __pyx_t_61; + unsigned int __pyx_t_62; + unsigned int __pyx_t_63; + unsigned int __pyx_t_64; + unsigned int __pyx_t_65; + unsigned int __pyx_t_66; + unsigned int __pyx_t_67; + unsigned int __pyx_t_68; + unsigned int __pyx_t_69; + unsigned int __pyx_t_70; + unsigned int __pyx_t_71; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("convolve3d_boundary_wrap", 0); + __pyx_pybuffer_fixed.pybuffer.buf = NULL; + __pyx_pybuffer_fixed.refcount = 0; + __pyx_pybuffernd_fixed.data = NULL; + __pyx_pybuffernd_fixed.rcbuffer = &__pyx_pybuffer_fixed; + __pyx_pybuffer_conv.pybuffer.buf = NULL; + __pyx_pybuffer_conv.refcount = 0; + __pyx_pybuffernd_conv.data = NULL; + __pyx_pybuffernd_conv.rcbuffer = &__pyx_pybuffer_conv; + __pyx_pybuffer_f.pybuffer.buf = NULL; + __pyx_pybuffer_f.refcount = 0; + __pyx_pybuffernd_f.data = NULL; + __pyx_pybuffernd_f.rcbuffer = &__pyx_pybuffer_f; + __pyx_pybuffer_g.pybuffer.buf = NULL; + __pyx_pybuffer_g.refcount = 0; + __pyx_pybuffernd_g.data = NULL; + __pyx_pybuffernd_g.rcbuffer = &__pyx_pybuffer_g; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_f.rcbuffer->pybuffer, (PyObject*)__pyx_v_f, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 3, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 167; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_f.diminfo[0].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_f.diminfo[0].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_f.diminfo[1].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_f.diminfo[1].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_f.diminfo[2].strides = __pyx_pybuffernd_f.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_f.diminfo[2].shape = __pyx_pybuffernd_f.rcbuffer->pybuffer.shape[2]; + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_g.rcbuffer->pybuffer, (PyObject*)__pyx_v_g, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES, 3, 0, __pyx_stack) == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 167; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_pybuffernd_g.diminfo[0].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_g.diminfo[0].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_g.diminfo[1].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_g.diminfo[1].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_g.diminfo[2].strides = __pyx_pybuffernd_g.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_g.diminfo[2].shape = __pyx_pybuffernd_g.rcbuffer->pybuffer.shape[2]; + + /* "astropy/convolution/boundary_wrap.pyx":170 + * np.ndarray[DTYPE_t, ndim=3] g): + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: # <<<<<<<<<<<<<< + * raise ValueError("Convolution kernel must have odd dimensions") + * + */ + __pyx_t_1 = (__Pyx_mod_long((__pyx_v_g->dimensions[0]), 2) != 1); + if (!__pyx_t_1) { + __pyx_t_2 = (__Pyx_mod_long((__pyx_v_g->dimensions[1]), 2) != 1); + if (!__pyx_t_2) { + __pyx_t_3 = (__Pyx_mod_long((__pyx_v_g->dimensions[2]), 2) != 1); + __pyx_t_4 = __pyx_t_3; + } else { + __pyx_t_4 = __pyx_t_2; + } + __pyx_t_2 = __pyx_t_4; + } else { + __pyx_t_2 = __pyx_t_1; + } + if (__pyx_t_2) { + + /* "astropy/convolution/boundary_wrap.pyx":171 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_4), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 171; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 171; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L3; + } + __pyx_L3:; + + /* "astropy/convolution/boundary_wrap.pyx":173 + * raise ValueError("Convolution kernel must have odd dimensions") + * + * assert f.dtype == DTYPE and g.dtype == DTYPE # <<<<<<<<<<<<<< + * + * cdef int nx = f.shape[0] + */ + #ifndef CYTHON_WITHOUT_ASSERTIONS + __pyx_t_5 = PyObject_GetAttr(((PyObject *)__pyx_v_f), __pyx_n_s__dtype); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PyObject_RichCompare(__pyx_t_5, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_7); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_2) { + __pyx_t_7 = PyObject_GetAttr(((PyObject *)__pyx_v_g), __pyx_n_s__dtype); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyObject_RichCompare(__pyx_t_7, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_4 = __pyx_t_1; + } else { + __pyx_t_4 = __pyx_t_2; + } + if (unlikely(!__pyx_t_4)) { + PyErr_SetNone(PyExc_AssertionError); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #endif + + /* "astropy/convolution/boundary_wrap.pyx":175 + * assert f.dtype == DTYPE and g.dtype == DTYPE + * + * cdef int nx = f.shape[0] # <<<<<<<<<<<<<< + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] + */ + __pyx_v_nx = (__pyx_v_f->dimensions[0]); + + /* "astropy/convolution/boundary_wrap.pyx":176 + * + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] # <<<<<<<<<<<<<< + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] + */ + __pyx_v_ny = (__pyx_v_f->dimensions[1]); + + /* "astropy/convolution/boundary_wrap.pyx":177 + * cdef int nx = f.shape[0] + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] # <<<<<<<<<<<<<< + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + */ + __pyx_v_nz = (__pyx_v_f->dimensions[2]); + + /* "astropy/convolution/boundary_wrap.pyx":178 + * cdef int ny = f.shape[1] + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] # <<<<<<<<<<<<<< + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] + */ + __pyx_v_nkx = (__pyx_v_g->dimensions[0]); + + /* "astropy/convolution/boundary_wrap.pyx":179 + * cdef int nz = f.shape[2] + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] # <<<<<<<<<<<<<< + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 + */ + __pyx_v_nky = (__pyx_v_g->dimensions[1]); + + /* "astropy/convolution/boundary_wrap.pyx":180 + * cdef int nkx = g.shape[0] + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] # <<<<<<<<<<<<<< + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + */ + __pyx_v_nkz = (__pyx_v_g->dimensions[2]); + + /* "astropy/convolution/boundary_wrap.pyx":181 + * cdef int nky = g.shape[1] + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 # <<<<<<<<<<<<<< + * cdef int wky = nky // 2 + * cdef int wkz = nkz // 2 + */ + __pyx_v_wkx = __Pyx_div_long(__pyx_v_nkx, 2); + + /* "astropy/convolution/boundary_wrap.pyx":182 + * cdef int nkz = g.shape[2] + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 # <<<<<<<<<<<<<< + * cdef int wkz = nkz // 2 + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + */ + __pyx_v_wky = __Pyx_div_long(__pyx_v_nky, 2); + + /* "astropy/convolution/boundary_wrap.pyx":183 + * cdef int wkx = nkx // 2 + * cdef int wky = nky // 2 + * cdef int wkz = nkz // 2 # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) + */ + __pyx_v_wkz = __Pyx_div_long(__pyx_v_nkz, 2); + + /* "astropy/convolution/boundary_wrap.pyx":184 + * cdef int wky = nky // 2 + * cdef int wkz = nkz // 2 + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) + * cdef unsigned int i, j, k, iii, jjj, kkk + */ + __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyObject_GetAttr(__pyx_t_5, __pyx_n_s__empty); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyInt_FromLong(__pyx_v_nz); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = PyList_New(3); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_9); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_9, 1, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_9, 2, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_8); + __pyx_t_5 = 0; + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_8 = PyTuple_New(1); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_8, 0, ((PyObject *)__pyx_t_9)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_9)); + __pyx_t_9 = 0; + __pyx_t_9 = PyDict_New(); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_9)); + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + if (PyDict_SetItem(__pyx_t_9, ((PyObject *)__pyx_n_s__dtype), __pyx_t_7) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyObject_Call(__pyx_t_6, ((PyObject *)__pyx_t_8), ((PyObject *)__pyx_t_9)); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_8)); __pyx_t_8 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_9)); __pyx_t_9 = 0; + if (!(likely(((__pyx_t_7) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_7, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_10 = ((PyArrayObject *)__pyx_t_7); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer, (PyObject*)__pyx_t_10, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 3, 0, __pyx_stack) == -1)) { + __pyx_v_fixed = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_fixed.diminfo[0].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_fixed.diminfo[0].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_fixed.diminfo[1].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_fixed.diminfo[1].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_fixed.diminfo[2].strides = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_fixed.diminfo[2].shape = __pyx_pybuffernd_fixed.rcbuffer->pybuffer.shape[2]; + } + } + __pyx_t_10 = 0; + __pyx_v_fixed = ((PyArrayObject *)__pyx_t_7); + __pyx_t_7 = 0; + + /* "astropy/convolution/boundary_wrap.pyx":185 + * cdef int wkz = nkz // 2 + * cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + * cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) # <<<<<<<<<<<<<< + * cdef unsigned int i, j, k, iii, jjj, kkk + * cdef int ii, jj, kk + */ + __pyx_t_7 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = PyObject_GetAttr(__pyx_t_7, __pyx_n_s__empty); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = PyInt_FromLong(__pyx_v_nx); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyInt_FromLong(__pyx_v_ny); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = PyInt_FromLong(__pyx_v_nz); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = PyList_New(3); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + PyList_SET_ITEM(__pyx_t_5, 0, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_7); + PyList_SET_ITEM(__pyx_t_5, 1, __pyx_t_8); + __Pyx_GIVEREF(__pyx_t_8); + PyList_SET_ITEM(__pyx_t_5, 2, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_6); + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = PyTuple_New(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_6, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyDict_New(); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_8 = __Pyx_GetName(__pyx_m, __pyx_n_s__DTYPE); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + if (PyDict_SetItem(__pyx_t_5, ((PyObject *)__pyx_n_s__dtype), __pyx_t_8) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = PyObject_Call(__pyx_t_9, ((PyObject *)__pyx_t_6), ((PyObject *)__pyx_t_5)); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; + __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0; + if (!(likely(((__pyx_t_8) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_8, __pyx_ptype_5numpy_ndarray))))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_11 = ((PyArrayObject *)__pyx_t_8); + { + __Pyx_BufFmt_StackElem __pyx_stack[1]; + if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_conv.rcbuffer->pybuffer, (PyObject*)__pyx_t_11, &__Pyx_TypeInfo_nn___pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t, PyBUF_FORMAT| PyBUF_STRIDES| PyBUF_WRITABLE, 3, 0, __pyx_stack) == -1)) { + __pyx_v_conv = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf = NULL; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else {__pyx_pybuffernd_conv.diminfo[0].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_conv.diminfo[0].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_conv.diminfo[1].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_conv.diminfo[1].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_conv.diminfo[2].strides = __pyx_pybuffernd_conv.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_conv.diminfo[2].shape = __pyx_pybuffernd_conv.rcbuffer->pybuffer.shape[2]; + } + } + __pyx_t_11 = 0; + __pyx_v_conv = ((PyArrayObject *)__pyx_t_8); + __pyx_t_8 = 0; + + /* "astropy/convolution/boundary_wrap.pyx":195 + * # Need a first pass to replace NaN values with value convolved from + * # neighboring values + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * for k in range(nz): + */ + __pyx_t_12 = __pyx_v_nx; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_i = __pyx_t_13; + + /* "astropy/convolution/boundary_wrap.pyx":196 + * # neighboring values + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * for k in range(nz): + * if npy_isnan(f[i, j, k]): + */ + __pyx_t_14 = __pyx_v_ny; + for (__pyx_t_15 = 0; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_j = __pyx_t_15; + + /* "astropy/convolution/boundary_wrap.pyx":197 + * for i in range(nx): + * for j in range(ny): + * for k in range(nz): # <<<<<<<<<<<<<< + * if npy_isnan(f[i, j, k]): + * top = 0. + */ + __pyx_t_16 = __pyx_v_nz; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_k = __pyx_t_17; + + /* "astropy/convolution/boundary_wrap.pyx":198 + * for j in range(ny): + * for k in range(nz): + * if npy_isnan(f[i, j, k]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_18 = __pyx_v_i; + __pyx_t_19 = __pyx_v_j; + __pyx_t_20 = __pyx_v_k; + __pyx_t_4 = npy_isnan((*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_18, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_19, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_20, __pyx_pybuffernd_f.diminfo[2].strides))); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_wrap.pyx":199 + * for k in range(nz): + * if npy_isnan(f[i, j, k]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":200 + * if npy_isnan(f[i, j, k]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":201 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_wrap.pyx":202 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":203 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * kkmin = k - wkz + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_wrap.pyx":204 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * kkmin = k - wkz + * kkmax = k + wkz + 1 + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":205 + * jjmin = j - wky + * jjmax = j + wky + 1 + * kkmin = k - wkz # <<<<<<<<<<<<<< + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_kkmin = (__pyx_v_k - __pyx_v_wkz); + + /* "astropy/convolution/boundary_wrap.pyx":206 + * jjmax = j + wky + 1 + * kkmin = k - wkz + * kkmax = k + wkz + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_kkmax = ((__pyx_v_k + __pyx_v_wkz) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":207 + * kkmin = k - wkz + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + */ + __pyx_t_21 = __pyx_v_iimax; + for (__pyx_t_22 = __pyx_v_iimin; __pyx_t_22 < __pyx_t_21; __pyx_t_22+=1) { + __pyx_v_ii = __pyx_t_22; + + /* "astropy/convolution/boundary_wrap.pyx":208 + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * for kk in range(kkmin, kkmax): + * iii = ii % nx + */ + __pyx_t_23 = __pyx_v_jjmax; + for (__pyx_t_24 = __pyx_v_jjmin; __pyx_t_24 < __pyx_t_23; __pyx_t_24+=1) { + __pyx_v_jj = __pyx_t_24; + + /* "astropy/convolution/boundary_wrap.pyx":209 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): # <<<<<<<<<<<<<< + * iii = ii % nx + * jjj = jj % ny + */ + __pyx_t_25 = __pyx_v_kkmax; + for (__pyx_t_26 = __pyx_v_kkmin; __pyx_t_26 < __pyx_t_25; __pyx_t_26+=1) { + __pyx_v_kk = __pyx_t_26; + + /* "astropy/convolution/boundary_wrap.pyx":210 + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + * iii = ii % nx # <<<<<<<<<<<<<< + * jjj = jj % ny + * kkk = kk % nz + */ + if (unlikely(__pyx_v_nx == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 210; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_iii = __Pyx_mod_int(__pyx_v_ii, __pyx_v_nx); + + /* "astropy/convolution/boundary_wrap.pyx":211 + * for kk in range(kkmin, kkmax): + * iii = ii % nx + * jjj = jj % ny # <<<<<<<<<<<<<< + * kkk = kk % nz + * val = f[iii, jjj, kkk] + */ + if (unlikely(__pyx_v_ny == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 211; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_jjj = __Pyx_mod_int(__pyx_v_jj, __pyx_v_ny); + + /* "astropy/convolution/boundary_wrap.pyx":212 + * iii = ii % nx + * jjj = jj % ny + * kkk = kk % nz # <<<<<<<<<<<<<< + * val = f[iii, jjj, kkk] + * if not npy_isnan(val): + */ + if (unlikely(__pyx_v_nz == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 212; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_kkk = __Pyx_mod_int(__pyx_v_kk, __pyx_v_nz); + + /* "astropy/convolution/boundary_wrap.pyx":213 + * jjj = jj % ny + * kkk = kk % nz + * val = f[iii, jjj, kkk] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * ker = g[(wkx + ii - i), + */ + __pyx_t_27 = __pyx_v_iii; + __pyx_t_28 = __pyx_v_jjj; + __pyx_t_29 = __pyx_v_kkk; + __pyx_v_val = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_27, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_28, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_29, __pyx_pybuffernd_f.diminfo[2].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":214 + * kkk = kk % nz + * val = f[iii, jjj, kkk] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j), + */ + __pyx_t_4 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_wrap.pyx":217 + * ker = g[(wkx + ii - i), + * (wky + jj - j), + * (wkz + kk - k)] # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_30 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_31 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_t_32 = ((unsigned int)((__pyx_v_wkz + __pyx_v_kk) - __pyx_v_k)); + __pyx_v_ker = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_30, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_31, __pyx_pybuffernd_g.diminfo[1].strides, __pyx_t_32, __pyx_pybuffernd_g.diminfo[2].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":218 + * (wky + jj - j), + * (wkz + kk - k)] + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_wrap.pyx":219 + * (wkz + kk - k)] + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * + * if bot != 0.: + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L17; + } + __pyx_L17:; + } + } + } + + /* "astropy/convolution/boundary_wrap.pyx":221 + * bot += ker + * + * if bot != 0.: # <<<<<<<<<<<<<< + * fixed[i, j, k] = top / bot + * else: + */ + __pyx_t_4 = (__pyx_v_bot != 0.); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_wrap.pyx":222 + * + * if bot != 0.: + * fixed[i, j, k] = top / bot # <<<<<<<<<<<<<< + * else: + * fixed[i, j, k] = f[i, j, k] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 222; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_33 = __pyx_v_i; + __pyx_t_34 = __pyx_v_j; + __pyx_t_35 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_33, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_34, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_35, __pyx_pybuffernd_fixed.diminfo[2].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L18; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":224 + * fixed[i, j, k] = top / bot + * else: + * fixed[i, j, k] = f[i, j, k] # <<<<<<<<<<<<<< + * else: + * fixed[i, j, k] = f[i, j, k] + */ + __pyx_t_36 = __pyx_v_i; + __pyx_t_37 = __pyx_v_j; + __pyx_t_38 = __pyx_v_k; + __pyx_t_39 = __pyx_v_i; + __pyx_t_40 = __pyx_v_j; + __pyx_t_41 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_39, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_40, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_41, __pyx_pybuffernd_fixed.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_36, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_37, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_38, __pyx_pybuffernd_f.diminfo[2].strides)); + } + __pyx_L18:; + goto __pyx_L10; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":226 + * fixed[i, j, k] = f[i, j, k] + * else: + * fixed[i, j, k] = f[i, j, k] # <<<<<<<<<<<<<< + * + * # Now run the proper convolution + */ + __pyx_t_42 = __pyx_v_i; + __pyx_t_43 = __pyx_v_j; + __pyx_t_44 = __pyx_v_k; + __pyx_t_45 = __pyx_v_i; + __pyx_t_46 = __pyx_v_j; + __pyx_t_47 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_45, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_46, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_47, __pyx_pybuffernd_fixed.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_f.rcbuffer->pybuffer.buf, __pyx_t_42, __pyx_pybuffernd_f.diminfo[0].strides, __pyx_t_43, __pyx_pybuffernd_f.diminfo[1].strides, __pyx_t_44, __pyx_pybuffernd_f.diminfo[2].strides)); + } + __pyx_L10:; + } + } + } + + /* "astropy/convolution/boundary_wrap.pyx":229 + * + * # Now run the proper convolution + * for i in range(nx): # <<<<<<<<<<<<<< + * for j in range(ny): + * for k in range(nz): + */ + __pyx_t_12 = __pyx_v_nx; + for (__pyx_t_13 = 0; __pyx_t_13 < __pyx_t_12; __pyx_t_13+=1) { + __pyx_v_i = __pyx_t_13; + + /* "astropy/convolution/boundary_wrap.pyx":230 + * # Now run the proper convolution + * for i in range(nx): + * for j in range(ny): # <<<<<<<<<<<<<< + * for k in range(nz): + * if not npy_isnan(fixed[i, j, k]): + */ + __pyx_t_14 = __pyx_v_ny; + for (__pyx_t_15 = 0; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { + __pyx_v_j = __pyx_t_15; + + /* "astropy/convolution/boundary_wrap.pyx":231 + * for i in range(nx): + * for j in range(ny): + * for k in range(nz): # <<<<<<<<<<<<<< + * if not npy_isnan(fixed[i, j, k]): + * top = 0. + */ + __pyx_t_16 = __pyx_v_nz; + for (__pyx_t_17 = 0; __pyx_t_17 < __pyx_t_16; __pyx_t_17+=1) { + __pyx_v_k = __pyx_t_17; + + /* "astropy/convolution/boundary_wrap.pyx":232 + * for j in range(ny): + * for k in range(nz): + * if not npy_isnan(fixed[i, j, k]): # <<<<<<<<<<<<<< + * top = 0. + * bot = 0. + */ + __pyx_t_48 = __pyx_v_i; + __pyx_t_49 = __pyx_v_j; + __pyx_t_50 = __pyx_v_k; + __pyx_t_4 = (!npy_isnan((*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_48, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_49, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_50, __pyx_pybuffernd_fixed.diminfo[2].strides)))); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_wrap.pyx":233 + * for k in range(nz): + * if not npy_isnan(fixed[i, j, k]): + * top = 0. # <<<<<<<<<<<<<< + * bot = 0. + * iimin = i - wkx + */ + __pyx_v_top = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":234 + * if not npy_isnan(fixed[i, j, k]): + * top = 0. + * bot = 0. # <<<<<<<<<<<<<< + * iimin = i - wkx + * iimax = i + wkx + 1 + */ + __pyx_v_bot = 0.; + + /* "astropy/convolution/boundary_wrap.pyx":235 + * top = 0. + * bot = 0. + * iimin = i - wkx # <<<<<<<<<<<<<< + * iimax = i + wkx + 1 + * jjmin = j - wky + */ + __pyx_v_iimin = (__pyx_v_i - __pyx_v_wkx); + + /* "astropy/convolution/boundary_wrap.pyx":236 + * bot = 0. + * iimin = i - wkx + * iimax = i + wkx + 1 # <<<<<<<<<<<<<< + * jjmin = j - wky + * jjmax = j + wky + 1 + */ + __pyx_v_iimax = ((__pyx_v_i + __pyx_v_wkx) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":237 + * iimin = i - wkx + * iimax = i + wkx + 1 + * jjmin = j - wky # <<<<<<<<<<<<<< + * jjmax = j + wky + 1 + * kkmin = k - wkz + */ + __pyx_v_jjmin = (__pyx_v_j - __pyx_v_wky); + + /* "astropy/convolution/boundary_wrap.pyx":238 + * iimax = i + wkx + 1 + * jjmin = j - wky + * jjmax = j + wky + 1 # <<<<<<<<<<<<<< + * kkmin = k - wkz + * kkmax = k + wkz + 1 + */ + __pyx_v_jjmax = ((__pyx_v_j + __pyx_v_wky) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":239 + * jjmin = j - wky + * jjmax = j + wky + 1 + * kkmin = k - wkz # <<<<<<<<<<<<<< + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + */ + __pyx_v_kkmin = (__pyx_v_k - __pyx_v_wkz); + + /* "astropy/convolution/boundary_wrap.pyx":240 + * jjmax = j + wky + 1 + * kkmin = k - wkz + * kkmax = k + wkz + 1 # <<<<<<<<<<<<<< + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + */ + __pyx_v_kkmax = ((__pyx_v_k + __pyx_v_wkz) + 1); + + /* "astropy/convolution/boundary_wrap.pyx":241 + * kkmin = k - wkz + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): # <<<<<<<<<<<<<< + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + */ + __pyx_t_21 = __pyx_v_iimax; + for (__pyx_t_22 = __pyx_v_iimin; __pyx_t_22 < __pyx_t_21; __pyx_t_22+=1) { + __pyx_v_ii = __pyx_t_22; + + /* "astropy/convolution/boundary_wrap.pyx":242 + * kkmax = k + wkz + 1 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): # <<<<<<<<<<<<<< + * for kk in range(kkmin, kkmax): + * iii = ii % nx + */ + __pyx_t_23 = __pyx_v_jjmax; + for (__pyx_t_24 = __pyx_v_jjmin; __pyx_t_24 < __pyx_t_23; __pyx_t_24+=1) { + __pyx_v_jj = __pyx_t_24; + + /* "astropy/convolution/boundary_wrap.pyx":243 + * for ii in range(iimin, iimax): + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): # <<<<<<<<<<<<<< + * iii = ii % nx + * jjj = jj % ny + */ + __pyx_t_25 = __pyx_v_kkmax; + for (__pyx_t_26 = __pyx_v_kkmin; __pyx_t_26 < __pyx_t_25; __pyx_t_26+=1) { + __pyx_v_kk = __pyx_t_26; + + /* "astropy/convolution/boundary_wrap.pyx":244 + * for jj in range(jjmin, jjmax): + * for kk in range(kkmin, kkmax): + * iii = ii % nx # <<<<<<<<<<<<<< + * jjj = jj % ny + * kkk = kk % nz + */ + if (unlikely(__pyx_v_nx == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 244; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_iii = __Pyx_mod_int(__pyx_v_ii, __pyx_v_nx); + + /* "astropy/convolution/boundary_wrap.pyx":245 + * for kk in range(kkmin, kkmax): + * iii = ii % nx + * jjj = jj % ny # <<<<<<<<<<<<<< + * kkk = kk % nz + * val = fixed[iii, jjj, kkk] + */ + if (unlikely(__pyx_v_ny == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 245; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_jjj = __Pyx_mod_int(__pyx_v_jj, __pyx_v_ny); + + /* "astropy/convolution/boundary_wrap.pyx":246 + * iii = ii % nx + * jjj = jj % ny + * kkk = kk % nz # <<<<<<<<<<<<<< + * val = fixed[iii, jjj, kkk] + * ker = g[(wkx + ii - i), + */ + if (unlikely(__pyx_v_nz == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 246; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_v_kkk = __Pyx_mod_int(__pyx_v_kk, __pyx_v_nz); + + /* "astropy/convolution/boundary_wrap.pyx":247 + * jjj = jj % ny + * kkk = kk % nz + * val = fixed[iii, jjj, kkk] # <<<<<<<<<<<<<< + * ker = g[(wkx + ii - i), + * (wky + jj - j), + */ + __pyx_t_51 = __pyx_v_iii; + __pyx_t_52 = __pyx_v_jjj; + __pyx_t_53 = __pyx_v_kkk; + __pyx_v_val = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_51, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_52, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_53, __pyx_pybuffernd_fixed.diminfo[2].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":250 + * ker = g[(wkx + ii - i), + * (wky + jj - j), + * (wkz + kk - k)] # <<<<<<<<<<<<<< + * if not npy_isnan(val): + * top += val * ker + */ + __pyx_t_54 = ((unsigned int)((__pyx_v_wkx + __pyx_v_ii) - __pyx_v_i)); + __pyx_t_55 = ((unsigned int)((__pyx_v_wky + __pyx_v_jj) - __pyx_v_j)); + __pyx_t_56 = ((unsigned int)((__pyx_v_wkz + __pyx_v_kk) - __pyx_v_k)); + __pyx_v_ker = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_g.rcbuffer->pybuffer.buf, __pyx_t_54, __pyx_pybuffernd_g.diminfo[0].strides, __pyx_t_55, __pyx_pybuffernd_g.diminfo[1].strides, __pyx_t_56, __pyx_pybuffernd_g.diminfo[2].strides)); + + /* "astropy/convolution/boundary_wrap.pyx":251 + * (wky + jj - j), + * (wkz + kk - k)] + * if not npy_isnan(val): # <<<<<<<<<<<<<< + * top += val * ker + * bot += ker + */ + __pyx_t_4 = (!npy_isnan(__pyx_v_val)); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_wrap.pyx":252 + * (wkz + kk - k)] + * if not npy_isnan(val): + * top += val * ker # <<<<<<<<<<<<<< + * bot += ker + * if bot != 0: + */ + __pyx_v_top = (__pyx_v_top + (__pyx_v_val * __pyx_v_ker)); + + /* "astropy/convolution/boundary_wrap.pyx":253 + * if not npy_isnan(val): + * top += val * ker + * bot += ker # <<<<<<<<<<<<<< + * if bot != 0: + * conv[i, j, k] = top / bot + */ + __pyx_v_bot = (__pyx_v_bot + __pyx_v_ker); + goto __pyx_L32; + } + __pyx_L32:; + } + } + } + + /* "astropy/convolution/boundary_wrap.pyx":254 + * top += val * ker + * bot += ker + * if bot != 0: # <<<<<<<<<<<<<< + * conv[i, j, k] = top / bot + * else: + */ + __pyx_t_4 = (__pyx_v_bot != 0.0); + if (__pyx_t_4) { + + /* "astropy/convolution/boundary_wrap.pyx":255 + * bot += ker + * if bot != 0: + * conv[i, j, k] = top / bot # <<<<<<<<<<<<<< + * else: + * conv[i, j, k] = fixed[i, j, k] + */ + if (unlikely(__pyx_v_bot == 0)) { + PyErr_Format(PyExc_ZeroDivisionError, "float division"); + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 255; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_57 = __pyx_v_i; + __pyx_t_58 = __pyx_v_j; + __pyx_t_59 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_57, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_58, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_59, __pyx_pybuffernd_conv.diminfo[2].strides) = (__pyx_v_top / __pyx_v_bot); + goto __pyx_L33; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":257 + * conv[i, j, k] = top / bot + * else: + * conv[i, j, k] = fixed[i, j, k] # <<<<<<<<<<<<<< + * else: + * conv[i, j, k] = fixed[i, j, k] + */ + __pyx_t_60 = __pyx_v_i; + __pyx_t_61 = __pyx_v_j; + __pyx_t_62 = __pyx_v_k; + __pyx_t_63 = __pyx_v_i; + __pyx_t_64 = __pyx_v_j; + __pyx_t_65 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_63, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_64, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_65, __pyx_pybuffernd_conv.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_60, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_61, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_62, __pyx_pybuffernd_fixed.diminfo[2].strides)); + } + __pyx_L33:; + goto __pyx_L25; + } + /*else*/ { + + /* "astropy/convolution/boundary_wrap.pyx":259 + * conv[i, j, k] = fixed[i, j, k] + * else: + * conv[i, j, k] = fixed[i, j, k] # <<<<<<<<<<<<<< + * + * return conv + */ + __pyx_t_66 = __pyx_v_i; + __pyx_t_67 = __pyx_v_j; + __pyx_t_68 = __pyx_v_k; + __pyx_t_69 = __pyx_v_i; + __pyx_t_70 = __pyx_v_j; + __pyx_t_71 = __pyx_v_k; + *__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_conv.rcbuffer->pybuffer.buf, __pyx_t_69, __pyx_pybuffernd_conv.diminfo[0].strides, __pyx_t_70, __pyx_pybuffernd_conv.diminfo[1].strides, __pyx_t_71, __pyx_pybuffernd_conv.diminfo[2].strides) = (*__Pyx_BufPtrStrided3d(__pyx_t_7astropy_11convolution_13boundary_wrap_DTYPE_t *, __pyx_pybuffernd_fixed.rcbuffer->pybuffer.buf, __pyx_t_66, __pyx_pybuffernd_fixed.diminfo[0].strides, __pyx_t_67, __pyx_pybuffernd_fixed.diminfo[1].strides, __pyx_t_68, __pyx_pybuffernd_fixed.diminfo[2].strides)); + } + __pyx_L25:; + } + } + } + + /* "astropy/convolution/boundary_wrap.pyx":261 + * conv[i, j, k] = fixed[i, j, k] + * + * return conv # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_conv)); + __pyx_r = ((PyObject *)__pyx_v_conv); + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + { PyObject *__pyx_type, *__pyx_value, *__pyx_tb; + __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);} + __Pyx_AddTraceback("astropy.convolution.boundary_wrap.convolve3d_boundary_wrap", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + goto __pyx_L2; + __pyx_L0:; + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_conv.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_f.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_fixed.rcbuffer->pybuffer); + __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_g.rcbuffer->pybuffer); + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_fixed); + __Pyx_XDECREF((PyObject *)__pyx_v_conv); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/ +static CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0); + __pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags)); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":194 + * # experimental exception made for __getbuffer__ and __releasebuffer__ + * # -- the details of this may change. + * def __getbuffer__(ndarray self, Py_buffer* info, int flags): # <<<<<<<<<<<<<< + * # This implementation of getbuffer is geared towards Cython + * # requirements, and does not yet fullfill the PEP. + */ + +static int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) { + int __pyx_v_copy_shape; + int __pyx_v_i; + int __pyx_v_ndim; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + int __pyx_v_t; + char *__pyx_v_f; + PyArray_Descr *__pyx_v_descr = 0; + int __pyx_v_offset; + int __pyx_v_hasfields; + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + char *__pyx_t_9; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__getbuffer__", 0); + if (__pyx_v_info != NULL) { + __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(__pyx_v_info->obj); + } + + /* "numpy.pxd":200 + * # of flags + * + * if info == NULL: return # <<<<<<<<<<<<<< + * + * cdef int copy_shape, i, ndim + */ + __pyx_t_1 = (__pyx_v_info == NULL); + if (__pyx_t_1) { + __pyx_r = 0; + goto __pyx_L0; + goto __pyx_L3; + } + __pyx_L3:; + + /* "numpy.pxd":203 + * + * cdef int copy_shape, i, ndim + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + */ + __pyx_v_endian_detector = 1; + + /* "numpy.pxd":204 + * cdef int copy_shape, i, ndim + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * + * ndim = PyArray_NDIM(self) + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "numpy.pxd":206 + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * + * ndim = PyArray_NDIM(self) # <<<<<<<<<<<<<< + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_v_ndim = PyArray_NDIM(__pyx_v_self); + + /* "numpy.pxd":208 + * ndim = PyArray_NDIM(self) + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * copy_shape = 1 + * else: + */ + __pyx_t_1 = ((sizeof(npy_intp)) != (sizeof(Py_ssize_t))); + if (__pyx_t_1) { + + /* "numpy.pxd":209 + * + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * copy_shape = 1 # <<<<<<<<<<<<<< + * else: + * copy_shape = 0 + */ + __pyx_v_copy_shape = 1; + goto __pyx_L4; + } + /*else*/ { + + /* "numpy.pxd":211 + * copy_shape = 1 + * else: + * copy_shape = 0 # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + */ + __pyx_v_copy_shape = 0; + } + __pyx_L4:; + + /* "numpy.pxd":213 + * copy_shape = 0 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") + */ + __pyx_t_1 = ((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS); + if (__pyx_t_1) { + + /* "numpy.pxd":214 + * + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not C contiguous") + * + */ + __pyx_t_2 = (!PyArray_CHKFLAGS(__pyx_v_self, NPY_C_CONTIGUOUS)); + __pyx_t_3 = __pyx_t_2; + } else { + __pyx_t_3 = __pyx_t_1; + } + if (__pyx_t_3) { + + /* "numpy.pxd":215 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_6), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L5; + } + __pyx_L5:; + + /* "numpy.pxd":217 + * raise ValueError(u"ndarray is not C contiguous") + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) # <<<<<<<<<<<<<< + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") + */ + __pyx_t_3 = ((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS); + if (__pyx_t_3) { + + /* "numpy.pxd":218 + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): # <<<<<<<<<<<<<< + * raise ValueError(u"ndarray is not Fortran contiguous") + * + */ + __pyx_t_1 = (!PyArray_CHKFLAGS(__pyx_v_self, NPY_F_CONTIGUOUS)); + __pyx_t_2 = __pyx_t_1; + } else { + __pyx_t_2 = __pyx_t_3; + } + if (__pyx_t_2) { + + /* "numpy.pxd":219 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_8), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L6; + } + __pyx_L6:; + + /* "numpy.pxd":221 + * raise ValueError(u"ndarray is not Fortran contiguous") + * + * info.buf = PyArray_DATA(self) # <<<<<<<<<<<<<< + * info.ndim = ndim + * if copy_shape: + */ + __pyx_v_info->buf = PyArray_DATA(__pyx_v_self); + + /* "numpy.pxd":222 + * + * info.buf = PyArray_DATA(self) + * info.ndim = ndim # <<<<<<<<<<<<<< + * if copy_shape: + * # Allocate new buffer for strides and shape info. + */ + __pyx_v_info->ndim = __pyx_v_ndim; + + /* "numpy.pxd":223 + * info.buf = PyArray_DATA(self) + * info.ndim = ndim + * if copy_shape: # <<<<<<<<<<<<<< + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + */ + if (__pyx_v_copy_shape) { + + /* "numpy.pxd":226 + * # Allocate new buffer for strides and shape info. + * # This is allocated as one block, strides first. + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) # <<<<<<<<<<<<<< + * info.shape = info.strides + ndim + * for i in range(ndim): + */ + __pyx_v_info->strides = ((Py_ssize_t *)malloc((((sizeof(Py_ssize_t)) * ((size_t)__pyx_v_ndim)) * 2))); + + /* "numpy.pxd":227 + * # This is allocated as one block, strides first. + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) + * info.shape = info.strides + ndim # <<<<<<<<<<<<<< + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + */ + __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim); + + /* "numpy.pxd":228 + * info.strides = stdlib.malloc(sizeof(Py_ssize_t) * ndim * 2) + * info.shape = info.strides + ndim + * for i in range(ndim): # <<<<<<<<<<<<<< + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] + */ + __pyx_t_5 = __pyx_v_ndim; + for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { + __pyx_v_i = __pyx_t_6; + + /* "numpy.pxd":229 + * info.shape = info.strides + ndim + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] # <<<<<<<<<<<<<< + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + */ + (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]); + + /* "numpy.pxd":230 + * for i in range(ndim): + * info.strides[i] = PyArray_STRIDES(self)[i] + * info.shape[i] = PyArray_DIMS(self)[i] # <<<<<<<<<<<<<< + * else: + * info.strides = PyArray_STRIDES(self) + */ + (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]); + } + goto __pyx_L7; + } + /*else*/ { + + /* "numpy.pxd":232 + * info.shape[i] = PyArray_DIMS(self)[i] + * else: + * info.strides = PyArray_STRIDES(self) # <<<<<<<<<<<<<< + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + */ + __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self)); + + /* "numpy.pxd":233 + * else: + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) # <<<<<<<<<<<<<< + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + */ + __pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self)); + } + __pyx_L7:; + + /* "numpy.pxd":234 + * info.strides = PyArray_STRIDES(self) + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL # <<<<<<<<<<<<<< + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) + */ + __pyx_v_info->suboffsets = NULL; + + /* "numpy.pxd":235 + * info.shape = PyArray_DIMS(self) + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) # <<<<<<<<<<<<<< + * info.readonly = not PyArray_ISWRITEABLE(self) + * + */ + __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self); + + /* "numpy.pxd":236 + * info.suboffsets = NULL + * info.itemsize = PyArray_ITEMSIZE(self) + * info.readonly = not PyArray_ISWRITEABLE(self) # <<<<<<<<<<<<<< + * + * cdef int t + */ + __pyx_v_info->readonly = (!PyArray_ISWRITEABLE(__pyx_v_self)); + + /* "numpy.pxd":239 + * + * cdef int t + * cdef char* f = NULL # <<<<<<<<<<<<<< + * cdef dtype descr = self.descr + * cdef list stack + */ + __pyx_v_f = NULL; + + /* "numpy.pxd":240 + * cdef int t + * cdef char* f = NULL + * cdef dtype descr = self.descr # <<<<<<<<<<<<<< + * cdef list stack + * cdef int offset + */ + __pyx_t_4 = ((PyObject *)__pyx_v_self->descr); + __Pyx_INCREF(__pyx_t_4); + __pyx_v_descr = ((PyArray_Descr *)__pyx_t_4); + __pyx_t_4 = 0; + + /* "numpy.pxd":244 + * cdef int offset + * + * cdef bint hasfields = PyDataType_HASFIELDS(descr) # <<<<<<<<<<<<<< + * + * if not hasfields and not copy_shape: + */ + __pyx_v_hasfields = PyDataType_HASFIELDS(__pyx_v_descr); + + /* "numpy.pxd":246 + * cdef bint hasfields = PyDataType_HASFIELDS(descr) + * + * if not hasfields and not copy_shape: # <<<<<<<<<<<<<< + * # do not call releasebuffer + * info.obj = None + */ + __pyx_t_2 = (!__pyx_v_hasfields); + if (__pyx_t_2) { + __pyx_t_3 = (!__pyx_v_copy_shape); + __pyx_t_1 = __pyx_t_3; + } else { + __pyx_t_1 = __pyx_t_2; + } + if (__pyx_t_1) { + + /* "numpy.pxd":248 + * if not hasfields and not copy_shape: + * # do not call releasebuffer + * info.obj = None # <<<<<<<<<<<<<< + * else: + * # need to call releasebuffer + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = Py_None; + goto __pyx_L10; + } + /*else*/ { + + /* "numpy.pxd":251 + * else: + * # need to call releasebuffer + * info.obj = self # <<<<<<<<<<<<<< + * + * if not hasfields: + */ + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); + __pyx_v_info->obj = ((PyObject *)__pyx_v_self); + } + __pyx_L10:; + + /* "numpy.pxd":253 + * info.obj = self + * + * if not hasfields: # <<<<<<<<<<<<<< + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + */ + __pyx_t_1 = (!__pyx_v_hasfields); + if (__pyx_t_1) { + + /* "numpy.pxd":254 + * + * if not hasfields: + * t = descr.type_num # <<<<<<<<<<<<<< + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + */ + __pyx_t_5 = __pyx_v_descr->type_num; + __pyx_v_t = __pyx_t_5; + + /* "numpy.pxd":255 + * if not hasfields: + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_1 = (__pyx_v_descr->byteorder == '>'); + if (__pyx_t_1) { + __pyx_t_2 = __pyx_v_little_endian; + } else { + __pyx_t_2 = __pyx_t_1; + } + if (!__pyx_t_2) { + + /* "numpy.pxd":256 + * t = descr.type_num + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + */ + __pyx_t_1 = (__pyx_v_descr->byteorder == '<'); + if (__pyx_t_1) { + __pyx_t_3 = (!__pyx_v_little_endian); + __pyx_t_7 = __pyx_t_3; + } else { + __pyx_t_7 = __pyx_t_1; + } + __pyx_t_1 = __pyx_t_7; + } else { + __pyx_t_1 = __pyx_t_2; + } + if (__pyx_t_1) { + + /* "numpy.pxd":257 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_10), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L12; + } + __pyx_L12:; + + /* "numpy.pxd":258 + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + */ + __pyx_t_1 = (__pyx_v_t == NPY_BYTE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__b; + goto __pyx_L13; + } + + /* "numpy.pxd":259 + * raise ValueError(u"Non-native byte order not supported") + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + */ + __pyx_t_1 = (__pyx_v_t == NPY_UBYTE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__B; + goto __pyx_L13; + } + + /* "numpy.pxd":260 + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + */ + __pyx_t_1 = (__pyx_v_t == NPY_SHORT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__h; + goto __pyx_L13; + } + + /* "numpy.pxd":261 + * elif t == NPY_UBYTE: f = "B" + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + */ + __pyx_t_1 = (__pyx_v_t == NPY_USHORT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__H; + goto __pyx_L13; + } + + /* "numpy.pxd":262 + * elif t == NPY_SHORT: f = "h" + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + */ + __pyx_t_1 = (__pyx_v_t == NPY_INT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__i; + goto __pyx_L13; + } + + /* "numpy.pxd":263 + * elif t == NPY_USHORT: f = "H" + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + */ + __pyx_t_1 = (__pyx_v_t == NPY_UINT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__I; + goto __pyx_L13; + } + + /* "numpy.pxd":264 + * elif t == NPY_INT: f = "i" + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__l; + goto __pyx_L13; + } + + /* "numpy.pxd":265 + * elif t == NPY_UINT: f = "I" + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + */ + __pyx_t_1 = (__pyx_v_t == NPY_ULONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__L; + goto __pyx_L13; + } + + /* "numpy.pxd":266 + * elif t == NPY_LONG: f = "l" + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONGLONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__q; + goto __pyx_L13; + } + + /* "numpy.pxd":267 + * elif t == NPY_ULONG: f = "L" + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + */ + __pyx_t_1 = (__pyx_v_t == NPY_ULONGLONG); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Q; + goto __pyx_L13; + } + + /* "numpy.pxd":268 + * elif t == NPY_LONGLONG: f = "q" + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + */ + __pyx_t_1 = (__pyx_v_t == NPY_FLOAT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__f; + goto __pyx_L13; + } + + /* "numpy.pxd":269 + * elif t == NPY_ULONGLONG: f = "Q" + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + */ + __pyx_t_1 = (__pyx_v_t == NPY_DOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__d; + goto __pyx_L13; + } + + /* "numpy.pxd":270 + * elif t == NPY_FLOAT: f = "f" + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + */ + __pyx_t_1 = (__pyx_v_t == NPY_LONGDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__g; + goto __pyx_L13; + } + + /* "numpy.pxd":271 + * elif t == NPY_DOUBLE: f = "d" + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + */ + __pyx_t_1 = (__pyx_v_t == NPY_CFLOAT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zf; + goto __pyx_L13; + } + + /* "numpy.pxd":272 + * elif t == NPY_LONGDOUBLE: f = "g" + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" + */ + __pyx_t_1 = (__pyx_v_t == NPY_CDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zd; + goto __pyx_L13; + } + + /* "numpy.pxd":273 + * elif t == NPY_CFLOAT: f = "Zf" + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f = "O" + * else: + */ + __pyx_t_1 = (__pyx_v_t == NPY_CLONGDOUBLE); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__Zg; + goto __pyx_L13; + } + + /* "numpy.pxd":274 + * elif t == NPY_CDOUBLE: f = "Zd" + * elif t == NPY_CLONGDOUBLE: f = "Zg" + * elif t == NPY_OBJECT: f = "O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_1 = (__pyx_v_t == NPY_OBJECT); + if (__pyx_t_1) { + __pyx_v_f = __pyx_k__O; + goto __pyx_L13; + } + /*else*/ { + + /* "numpy.pxd":276 + * elif t == NPY_OBJECT: f = "O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * info.format = f + * return + */ + __pyx_t_4 = PyInt_FromLong(__pyx_v_t); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_8 = PyNumber_Remainder(((PyObject *)__pyx_kp_u_11), __pyx_t_4); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_8)); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)__pyx_t_8)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_8)); + __pyx_t_8 = 0; + __pyx_t_8 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_8, 0, 0, 0); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_L13:; + + /* "numpy.pxd":277 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f # <<<<<<<<<<<<<< + * return + * else: + */ + __pyx_v_info->format = __pyx_v_f; + + /* "numpy.pxd":278 + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * info.format = f + * return # <<<<<<<<<<<<<< + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) + */ + __pyx_r = 0; + goto __pyx_L0; + goto __pyx_L11; + } + /*else*/ { + + /* "numpy.pxd":280 + * return + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) # <<<<<<<<<<<<<< + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 + */ + __pyx_v_info->format = ((char *)malloc(255)); + + /* "numpy.pxd":281 + * else: + * info.format = stdlib.malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment # <<<<<<<<<<<<<< + * offset = 0 + * f = _util_dtypestring(descr, info.format + 1, + */ + (__pyx_v_info->format[0]) = '^'; + + /* "numpy.pxd":282 + * info.format = stdlib.malloc(_buffer_format_string_len) + * info.format[0] = c'^' # Native data types, manual alignment + * offset = 0 # <<<<<<<<<<<<<< + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + */ + __pyx_v_offset = 0; + + /* "numpy.pxd":285 + * f = _util_dtypestring(descr, info.format + 1, + * info.format + _buffer_format_string_len, + * &offset) # <<<<<<<<<<<<<< + * f[0] = c'\0' # Terminate format string + * + */ + __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 255), (&__pyx_v_offset)); if (unlikely(__pyx_t_9 == NULL)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 283; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_v_f = __pyx_t_9; + + /* "numpy.pxd":286 + * info.format + _buffer_format_string_len, + * &offset) + * f[0] = c'\0' # Terminate format string # <<<<<<<<<<<<<< + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + */ + (__pyx_v_f[0]) = '\x00'; + } + __pyx_L11:; + + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("numpy.ndarray.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + if (__pyx_v_info != NULL && __pyx_v_info->obj != NULL) { + __Pyx_GOTREF(__pyx_v_info->obj); + __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = NULL; + } + goto __pyx_L2; + __pyx_L0:; + if (__pyx_v_info != NULL && __pyx_v_info->obj == Py_None) { + __Pyx_GOTREF(Py_None); + __Pyx_DECREF(Py_None); __pyx_v_info->obj = NULL; + } + __pyx_L2:; + __Pyx_XDECREF((PyObject *)__pyx_v_descr); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/ +static CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0); + __pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info)); + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":288 + * f[0] = c'\0' # Terminate format string + * + * def __releasebuffer__(ndarray self, Py_buffer* info): # <<<<<<<<<<<<<< + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) + */ + +static void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) { + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__releasebuffer__", 0); + + /* "numpy.pxd":289 + * + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): # <<<<<<<<<<<<<< + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + */ + __pyx_t_1 = PyArray_HASFIELDS(__pyx_v_self); + if (__pyx_t_1) { + + /* "numpy.pxd":290 + * def __releasebuffer__(ndarray self, Py_buffer* info): + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) # <<<<<<<<<<<<<< + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * stdlib.free(info.strides) + */ + free(__pyx_v_info->format); + goto __pyx_L3; + } + __pyx_L3:; + + /* "numpy.pxd":291 + * if PyArray_HASFIELDS(self): + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): # <<<<<<<<<<<<<< + * stdlib.free(info.strides) + * # info.shape was stored after info.strides in the same block + */ + __pyx_t_1 = ((sizeof(npy_intp)) != (sizeof(Py_ssize_t))); + if (__pyx_t_1) { + + /* "numpy.pxd":292 + * stdlib.free(info.format) + * if sizeof(npy_intp) != sizeof(Py_ssize_t): + * stdlib.free(info.strides) # <<<<<<<<<<<<<< + * # info.shape was stored after info.strides in the same block + * + */ + free(__pyx_v_info->strides); + goto __pyx_L4; + } + __pyx_L4:; + + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":768 + * ctypedef npy_cdouble complex_t + * + * cdef inline object PyArray_MultiIterNew1(a): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(1, a) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew1", 0); + + /* "numpy.pxd":769 + * + * cdef inline object PyArray_MultiIterNew1(a): + * return PyArray_MultiIterNew(1, a) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew2(a, b): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 769; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew1", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":771 + * return PyArray_MultiIterNew(1, a) + * + * cdef inline object PyArray_MultiIterNew2(a, b): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(2, a, b) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew2", 0); + + /* "numpy.pxd":772 + * + * cdef inline object PyArray_MultiIterNew2(a, b): + * return PyArray_MultiIterNew(2, a, b) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 772; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew2", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":774 + * return PyArray_MultiIterNew(2, a, b) + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(3, a, b, c) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew3", 0); + + /* "numpy.pxd":775 + * + * cdef inline object PyArray_MultiIterNew3(a, b, c): + * return PyArray_MultiIterNew(3, a, b, c) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 775; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew3", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":777 + * return PyArray_MultiIterNew(3, a, b, c) + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(4, a, b, c, d) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew4", 0); + + /* "numpy.pxd":778 + * + * cdef inline object PyArray_MultiIterNew4(a, b, c, d): + * return PyArray_MultiIterNew(4, a, b, c, d) # <<<<<<<<<<<<<< + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 778; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew4", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":780 + * return PyArray_MultiIterNew(4, a, b, c, d) + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): # <<<<<<<<<<<<<< + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("PyArray_MultiIterNew5", 0); + + /* "numpy.pxd":781 + * + * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e): + * return PyArray_MultiIterNew(5, a, b, c, d, e) # <<<<<<<<<<<<<< + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 781; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("numpy.PyArray_MultiIterNew5", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":783 + * return PyArray_MultiIterNew(5, a, b, c, d, e) + * + * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL: # <<<<<<<<<<<<<< + * # Recursive utility function used in __getbuffer__ to get format + * # string. The new location in the format string is returned. + */ + +static CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) { + PyArray_Descr *__pyx_v_child = 0; + int __pyx_v_endian_detector; + int __pyx_v_little_endian; + PyObject *__pyx_v_fields = 0; + PyObject *__pyx_v_childname = NULL; + PyObject *__pyx_v_new_offset = NULL; + PyObject *__pyx_v_t = NULL; + char *__pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + Py_ssize_t __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *(*__pyx_t_6)(PyObject *); + int __pyx_t_7; + int __pyx_t_8; + int __pyx_t_9; + int __pyx_t_10; + long __pyx_t_11; + char *__pyx_t_12; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_util_dtypestring", 0); + + /* "numpy.pxd":790 + * cdef int delta_offset + * cdef tuple i + * cdef int endian_detector = 1 # <<<<<<<<<<<<<< + * cdef bint little_endian = ((&endian_detector)[0] != 0) + * cdef tuple fields + */ + __pyx_v_endian_detector = 1; + + /* "numpy.pxd":791 + * cdef tuple i + * cdef int endian_detector = 1 + * cdef bint little_endian = ((&endian_detector)[0] != 0) # <<<<<<<<<<<<<< + * cdef tuple fields + * + */ + __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0); + + /* "numpy.pxd":794 + * cdef tuple fields + * + * for childname in descr.names: # <<<<<<<<<<<<<< + * fields = descr.fields[childname] + * child, new_offset = fields + */ + if (unlikely(((PyObject *)__pyx_v_descr->names) == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_t_1 = ((PyObject *)__pyx_v_descr->names); __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0; + for (;;) { + if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_COMPILING_IN_CPYTHON + __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #else + __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 794; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + __Pyx_XDECREF(__pyx_v_childname); + __pyx_v_childname = __pyx_t_3; + __pyx_t_3 = 0; + + /* "numpy.pxd":795 + * + * for childname in descr.names: + * fields = descr.fields[childname] # <<<<<<<<<<<<<< + * child, new_offset = fields + * + */ + __pyx_t_3 = PyObject_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (!__pyx_t_3) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 795; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected tuple, got %.200s", Py_TYPE(__pyx_t_3)->tp_name), 0))) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 795; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_XDECREF(((PyObject *)__pyx_v_fields)); + __pyx_v_fields = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "numpy.pxd":796 + * for childname in descr.names: + * fields = descr.fields[childname] + * child, new_offset = fields # <<<<<<<<<<<<<< + * + * if (end - f) - (new_offset - offset[0]) < 15: + */ + if (likely(PyTuple_CheckExact(((PyObject *)__pyx_v_fields)))) { + PyObject* sequence = ((PyObject *)__pyx_v_fields); + #if CYTHON_COMPILING_IN_CPYTHON + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + #if CYTHON_COMPILING_IN_CPYTHON + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + #endif + } else if (1) { + __Pyx_RaiseNoneNotIterableError(); {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } else + { + Py_ssize_t index = -1; + __pyx_t_5 = PyObject_GetIter(((PyObject *)__pyx_v_fields)); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = Py_TYPE(__pyx_t_5)->tp_iternext; + index = 0; __pyx_t_3 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_3)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + index = 1; __pyx_t_4 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_4)) goto __pyx_L5_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_6(__pyx_t_5), 2) < 0) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_6 = NULL; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L6_unpacking_done; + __pyx_L5_unpacking_failed:; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_6 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_L6_unpacking_done:; + } + if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 796; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_XDECREF(((PyObject *)__pyx_v_child)); + __pyx_v_child = ((PyArray_Descr *)__pyx_t_3); + __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_v_new_offset); + __pyx_v_new_offset = __pyx_t_4; + __pyx_t_4 = 0; + + /* "numpy.pxd":798 + * child, new_offset = fields + * + * if (end - f) - (new_offset - offset[0]) < 15: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + */ + __pyx_t_4 = PyInt_FromLong((__pyx_v_end - __pyx_v_f)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = PyInt_FromLong((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_3); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyNumber_Subtract(__pyx_t_4, __pyx_t_5); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = PyObject_RichCompare(__pyx_t_3, __pyx_int_15, Py_LT); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 798; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + + /* "numpy.pxd":799 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_RuntimeError, ((PyObject *)__pyx_k_tuple_13), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L7; + } + __pyx_L7:; + + /* "numpy.pxd":801 + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") + * + * if ((child.byteorder == c'>' and little_endian) or # <<<<<<<<<<<<<< + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") + */ + __pyx_t_7 = (__pyx_v_child->byteorder == '>'); + if (__pyx_t_7) { + __pyx_t_8 = __pyx_v_little_endian; + } else { + __pyx_t_8 = __pyx_t_7; + } + if (!__pyx_t_8) { + + /* "numpy.pxd":802 + * + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): # <<<<<<<<<<<<<< + * raise ValueError(u"Non-native byte order not supported") + * # One could encode it in the format string and have Cython + */ + __pyx_t_7 = (__pyx_v_child->byteorder == '<'); + if (__pyx_t_7) { + __pyx_t_9 = (!__pyx_v_little_endian); + __pyx_t_10 = __pyx_t_9; + } else { + __pyx_t_10 = __pyx_t_7; + } + __pyx_t_7 = __pyx_t_10; + } else { + __pyx_t_7 = __pyx_t_8; + } + if (__pyx_t_7) { + + /* "numpy.pxd":803 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_14), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L8; + } + __pyx_L8:; + + /* "numpy.pxd":813 + * + * # Output padding bytes + * while offset[0] < new_offset: # <<<<<<<<<<<<<< + * f[0] = 120 # "x"; pad byte + * f += 1 + */ + while (1) { + __pyx_t_5 = PyInt_FromLong((__pyx_v_offset[0])); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_5, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 813; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!__pyx_t_7) break; + + /* "numpy.pxd":814 + * # Output padding bytes + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte # <<<<<<<<<<<<<< + * f += 1 + * offset[0] += 1 + */ + (__pyx_v_f[0]) = 120; + + /* "numpy.pxd":815 + * while offset[0] < new_offset: + * f[0] = 120 # "x"; pad byte + * f += 1 # <<<<<<<<<<<<<< + * offset[0] += 1 + * + */ + __pyx_v_f = (__pyx_v_f + 1); + + /* "numpy.pxd":816 + * f[0] = 120 # "x"; pad byte + * f += 1 + * offset[0] += 1 # <<<<<<<<<<<<<< + * + * offset[0] += child.itemsize + */ + __pyx_t_11 = 0; + (__pyx_v_offset[__pyx_t_11]) = ((__pyx_v_offset[__pyx_t_11]) + 1); + } + + /* "numpy.pxd":818 + * offset[0] += 1 + * + * offset[0] += child.itemsize # <<<<<<<<<<<<<< + * + * if not PyDataType_HASFIELDS(child): + */ + __pyx_t_11 = 0; + (__pyx_v_offset[__pyx_t_11]) = ((__pyx_v_offset[__pyx_t_11]) + __pyx_v_child->elsize); + + /* "numpy.pxd":820 + * offset[0] += child.itemsize + * + * if not PyDataType_HASFIELDS(child): # <<<<<<<<<<<<<< + * t = child.type_num + * if end - f < 5: + */ + __pyx_t_7 = (!PyDataType_HASFIELDS(__pyx_v_child)); + if (__pyx_t_7) { + + /* "numpy.pxd":821 + * + * if not PyDataType_HASFIELDS(child): + * t = child.type_num # <<<<<<<<<<<<<< + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") + */ + __pyx_t_3 = PyInt_FromLong(__pyx_v_child->type_num); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 821; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_v_t); + __pyx_v_t = __pyx_t_3; + __pyx_t_3 = 0; + + /* "numpy.pxd":822 + * if not PyDataType_HASFIELDS(child): + * t = child.type_num + * if end - f < 5: # <<<<<<<<<<<<<< + * raise RuntimeError(u"Format string allocated too short.") + * + */ + __pyx_t_7 = ((__pyx_v_end - __pyx_v_f) < 5); + if (__pyx_t_7) { + + /* "numpy.pxd":823 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_t_3 = PyObject_Call(__pyx_builtin_RuntimeError, ((PyObject *)__pyx_k_tuple_16), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + goto __pyx_L12; + } + __pyx_L12:; + + /* "numpy.pxd":826 + * + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" # <<<<<<<<<<<<<< + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + */ + __pyx_t_3 = PyInt_FromLong(NPY_BYTE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 98; + goto __pyx_L13; + } + + /* "numpy.pxd":827 + * # Until ticket #99 is fixed, use integers to avoid warnings + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" # <<<<<<<<<<<<<< + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + */ + __pyx_t_5 = PyInt_FromLong(NPY_UBYTE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 66; + goto __pyx_L13; + } + + /* "numpy.pxd":828 + * if t == NPY_BYTE: f[0] = 98 #"b" + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" # <<<<<<<<<<<<<< + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + */ + __pyx_t_3 = PyInt_FromLong(NPY_SHORT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 828; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 104; + goto __pyx_L13; + } + + /* "numpy.pxd":829 + * elif t == NPY_UBYTE: f[0] = 66 #"B" + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" # <<<<<<<<<<<<<< + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + */ + __pyx_t_5 = PyInt_FromLong(NPY_USHORT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 829; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 72; + goto __pyx_L13; + } + + /* "numpy.pxd":830 + * elif t == NPY_SHORT: f[0] = 104 #"h" + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" # <<<<<<<<<<<<<< + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + */ + __pyx_t_3 = PyInt_FromLong(NPY_INT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 830; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 105; + goto __pyx_L13; + } + + /* "numpy.pxd":831 + * elif t == NPY_USHORT: f[0] = 72 #"H" + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" # <<<<<<<<<<<<<< + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + */ + __pyx_t_5 = PyInt_FromLong(NPY_UINT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 831; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 73; + goto __pyx_L13; + } + + /* "numpy.pxd":832 + * elif t == NPY_INT: f[0] = 105 #"i" + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" # <<<<<<<<<<<<<< + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONG); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 832; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 108; + goto __pyx_L13; + } + + /* "numpy.pxd":833 + * elif t == NPY_UINT: f[0] = 73 #"I" + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" # <<<<<<<<<<<<<< + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + */ + __pyx_t_5 = PyInt_FromLong(NPY_ULONG); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 833; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 76; + goto __pyx_L13; + } + + /* "numpy.pxd":834 + * elif t == NPY_LONG: f[0] = 108 #"l" + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" # <<<<<<<<<<<<<< + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONGLONG); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 834; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 113; + goto __pyx_L13; + } + + /* "numpy.pxd":835 + * elif t == NPY_ULONG: f[0] = 76 #"L" + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" # <<<<<<<<<<<<<< + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + */ + __pyx_t_5 = PyInt_FromLong(NPY_ULONGLONG); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 81; + goto __pyx_L13; + } + + /* "numpy.pxd":836 + * elif t == NPY_LONGLONG: f[0] = 113 #"q" + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" # <<<<<<<<<<<<<< + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + */ + __pyx_t_3 = PyInt_FromLong(NPY_FLOAT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 836; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 102; + goto __pyx_L13; + } + + /* "numpy.pxd":837 + * elif t == NPY_ULONGLONG: f[0] = 81 #"Q" + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" # <<<<<<<<<<<<<< + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + */ + __pyx_t_5 = PyInt_FromLong(NPY_DOUBLE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 100; + goto __pyx_L13; + } + + /* "numpy.pxd":838 + * elif t == NPY_FLOAT: f[0] = 102 #"f" + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" # <<<<<<<<<<<<<< + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + */ + __pyx_t_3 = PyInt_FromLong(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 838; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 103; + goto __pyx_L13; + } + + /* "numpy.pxd":839 + * elif t == NPY_DOUBLE: f[0] = 100 #"d" + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf # <<<<<<<<<<<<<< + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + */ + __pyx_t_5 = PyInt_FromLong(NPY_CFLOAT); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 839; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 102; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":840 + * elif t == NPY_LONGDOUBLE: f[0] = 103 #"g" + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd # <<<<<<<<<<<<<< + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" + */ + __pyx_t_3 = PyInt_FromLong(NPY_CDOUBLE); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 100; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":841 + * elif t == NPY_CFLOAT: f[0] = 90; f[1] = 102; f += 1 # Zf + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg # <<<<<<<<<<<<<< + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + */ + __pyx_t_5 = PyInt_FromLong(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 841; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 90; + (__pyx_v_f[1]) = 103; + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L13; + } + + /* "numpy.pxd":842 + * elif t == NPY_CDOUBLE: f[0] = 90; f[1] = 100; f += 1 # Zd + * elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg + * elif t == NPY_OBJECT: f[0] = 79 #"O" # <<<<<<<<<<<<<< + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + */ + __pyx_t_3 = PyInt_FromLong(NPY_OBJECT); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_7) { + (__pyx_v_f[0]) = 79; + goto __pyx_L13; + } + /*else*/ { + + /* "numpy.pxd":844 + * elif t == NPY_OBJECT: f[0] = 79 #"O" + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) # <<<<<<<<<<<<<< + * f += 1 + * else: + */ + __pyx_t_5 = PyNumber_Remainder(((PyObject *)__pyx_kp_u_11), __pyx_v_t); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_5)); + __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)__pyx_t_5)); + __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); + __pyx_t_5 = 0; + __pyx_t_5 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + {__pyx_filename = __pyx_f[1]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + __pyx_L13:; + + /* "numpy.pxd":845 + * else: + * raise ValueError(u"unknown dtype code in numpy.pxd (%d)" % t) + * f += 1 # <<<<<<<<<<<<<< + * else: + * # Cython ignores struct boundary information ("T{...}"), + */ + __pyx_v_f = (__pyx_v_f + 1); + goto __pyx_L11; + } + /*else*/ { + + /* "numpy.pxd":849 + * # Cython ignores struct boundary information ("T{...}"), + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) # <<<<<<<<<<<<<< + * return f + * + */ + __pyx_t_12 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_12 == NULL)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 849; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_v_f = __pyx_t_12; + } + __pyx_L11:; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "numpy.pxd":850 + * # so don't output it + * f = _util_dtypestring(child, f, end, offset) + * return f # <<<<<<<<<<<<<< + * + * + */ + __pyx_r = __pyx_v_f; + goto __pyx_L0; + + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("numpy._util_dtypestring", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_child); + __Pyx_XDECREF(__pyx_v_fields); + __Pyx_XDECREF(__pyx_v_childname); + __Pyx_XDECREF(__pyx_v_new_offset); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "numpy.pxd":965 + * + * + * cdef inline void set_array_base(ndarray arr, object base): # <<<<<<<<<<<<<< + * cdef PyObject* baseptr + * if base is None: + */ + +static CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_arr, PyObject *__pyx_v_base) { + PyObject *__pyx_v_baseptr; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("set_array_base", 0); + + /* "numpy.pxd":967 + * cdef inline void set_array_base(ndarray arr, object base): + * cdef PyObject* baseptr + * if base is None: # <<<<<<<<<<<<<< + * baseptr = NULL + * else: + */ + __pyx_t_1 = (__pyx_v_base == Py_None); + if (__pyx_t_1) { + + /* "numpy.pxd":968 + * cdef PyObject* baseptr + * if base is None: + * baseptr = NULL # <<<<<<<<<<<<<< + * else: + * Py_INCREF(base) # important to do this before decref below! + */ + __pyx_v_baseptr = NULL; + goto __pyx_L3; + } + /*else*/ { + + /* "numpy.pxd":970 + * baseptr = NULL + * else: + * Py_INCREF(base) # important to do this before decref below! # <<<<<<<<<<<<<< + * baseptr = base + * Py_XDECREF(arr.base) + */ + Py_INCREF(__pyx_v_base); + + /* "numpy.pxd":971 + * else: + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base # <<<<<<<<<<<<<< + * Py_XDECREF(arr.base) + * arr.base = baseptr + */ + __pyx_v_baseptr = ((PyObject *)__pyx_v_base); + } + __pyx_L3:; + + /* "numpy.pxd":972 + * Py_INCREF(base) # important to do this before decref below! + * baseptr = base + * Py_XDECREF(arr.base) # <<<<<<<<<<<<<< + * arr.base = baseptr + * + */ + Py_XDECREF(__pyx_v_arr->base); + + /* "numpy.pxd":973 + * baseptr = base + * Py_XDECREF(arr.base) + * arr.base = baseptr # <<<<<<<<<<<<<< + * + * cdef inline object get_array_base(ndarray arr): + */ + __pyx_v_arr->base = __pyx_v_baseptr; + + __Pyx_RefNannyFinishContext(); +} + +/* "numpy.pxd":975 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + +static CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__pyx_v_arr) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("get_array_base", 0); + + /* "numpy.pxd":976 + * + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: # <<<<<<<<<<<<<< + * return None + * else: + */ + __pyx_t_1 = (__pyx_v_arr->base == NULL); + if (__pyx_t_1) { + + /* "numpy.pxd":977 + * cdef inline object get_array_base(ndarray arr): + * if arr.base is NULL: + * return None # <<<<<<<<<<<<<< + * else: + * return arr.base + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L0; + goto __pyx_L3; + } + /*else*/ { + + /* "numpy.pxd":979 + * return None + * else: + * return arr.base # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(((PyObject *)__pyx_v_arr->base)); + __pyx_r = ((PyObject *)__pyx_v_arr->base); + goto __pyx_L0; + } + __pyx_L3:; + + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef __pyx_moduledef = { + #if PY_VERSION_HEX < 0x03020000 + { PyObject_HEAD_INIT(NULL) NULL, 0, NULL }, + #else + PyModuleDef_HEAD_INIT, + #endif + __Pyx_NAMESTR("boundary_wrap"), + 0, /* m_doc */ + -1, /* m_size */ + __pyx_methods /* m_methods */, + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_s_1, __pyx_k_1, sizeof(__pyx_k_1), 0, 0, 1, 0}, + {&__pyx_kp_u_11, __pyx_k_11, sizeof(__pyx_k_11), 0, 1, 0, 0}, + {&__pyx_kp_u_12, __pyx_k_12, sizeof(__pyx_k_12), 0, 1, 0, 0}, + {&__pyx_kp_u_15, __pyx_k_15, sizeof(__pyx_k_15), 0, 1, 0, 0}, + {&__pyx_n_s_19, __pyx_k_19, sizeof(__pyx_k_19), 0, 0, 1, 1}, + {&__pyx_kp_s_20, __pyx_k_20, sizeof(__pyx_k_20), 0, 0, 1, 0}, + {&__pyx_n_s_21, __pyx_k_21, sizeof(__pyx_k_21), 0, 0, 1, 1}, + {&__pyx_n_s_24, __pyx_k_24, sizeof(__pyx_k_24), 0, 0, 1, 1}, + {&__pyx_n_s_27, __pyx_k_27, sizeof(__pyx_k_27), 0, 0, 1, 1}, + {&__pyx_kp_u_5, __pyx_k_5, sizeof(__pyx_k_5), 0, 1, 0, 0}, + {&__pyx_kp_u_7, __pyx_k_7, sizeof(__pyx_k_7), 0, 1, 0, 0}, + {&__pyx_kp_u_9, __pyx_k_9, sizeof(__pyx_k_9), 0, 1, 0, 0}, + {&__pyx_n_s__DTYPE, __pyx_k__DTYPE, sizeof(__pyx_k__DTYPE), 0, 0, 1, 1}, + {&__pyx_n_s__RuntimeError, __pyx_k__RuntimeError, sizeof(__pyx_k__RuntimeError), 0, 0, 1, 1}, + {&__pyx_n_s__ValueError, __pyx_k__ValueError, sizeof(__pyx_k__ValueError), 0, 0, 1, 1}, + {&__pyx_n_s____main__, __pyx_k____main__, sizeof(__pyx_k____main__), 0, 0, 1, 1}, + {&__pyx_n_s____test__, __pyx_k____test__, sizeof(__pyx_k____test__), 0, 0, 1, 1}, + {&__pyx_n_s__bot, __pyx_k__bot, sizeof(__pyx_k__bot), 0, 0, 1, 1}, + {&__pyx_n_s__conv, __pyx_k__conv, sizeof(__pyx_k__conv), 0, 0, 1, 1}, + {&__pyx_n_s__dtype, __pyx_k__dtype, sizeof(__pyx_k__dtype), 0, 0, 1, 1}, + {&__pyx_n_s__empty, __pyx_k__empty, sizeof(__pyx_k__empty), 0, 0, 1, 1}, + {&__pyx_n_s__f, __pyx_k__f, sizeof(__pyx_k__f), 0, 0, 1, 1}, + {&__pyx_n_s__fixed, __pyx_k__fixed, sizeof(__pyx_k__fixed), 0, 0, 1, 1}, + {&__pyx_n_s__float, __pyx_k__float, sizeof(__pyx_k__float), 0, 0, 1, 1}, + {&__pyx_n_s__g, __pyx_k__g, sizeof(__pyx_k__g), 0, 0, 1, 1}, + {&__pyx_n_s__i, __pyx_k__i, sizeof(__pyx_k__i), 0, 0, 1, 1}, + {&__pyx_n_s__ii, __pyx_k__ii, sizeof(__pyx_k__ii), 0, 0, 1, 1}, + {&__pyx_n_s__iii, __pyx_k__iii, sizeof(__pyx_k__iii), 0, 0, 1, 1}, + {&__pyx_n_s__iimax, __pyx_k__iimax, sizeof(__pyx_k__iimax), 0, 0, 1, 1}, + {&__pyx_n_s__iimin, __pyx_k__iimin, sizeof(__pyx_k__iimin), 0, 0, 1, 1}, + {&__pyx_n_s__j, __pyx_k__j, sizeof(__pyx_k__j), 0, 0, 1, 1}, + {&__pyx_n_s__jj, __pyx_k__jj, sizeof(__pyx_k__jj), 0, 0, 1, 1}, + {&__pyx_n_s__jjj, __pyx_k__jjj, sizeof(__pyx_k__jjj), 0, 0, 1, 1}, + {&__pyx_n_s__jjmax, __pyx_k__jjmax, sizeof(__pyx_k__jjmax), 0, 0, 1, 1}, + {&__pyx_n_s__jjmin, __pyx_k__jjmin, sizeof(__pyx_k__jjmin), 0, 0, 1, 1}, + {&__pyx_n_s__k, __pyx_k__k, sizeof(__pyx_k__k), 0, 0, 1, 1}, + {&__pyx_n_s__ker, __pyx_k__ker, sizeof(__pyx_k__ker), 0, 0, 1, 1}, + {&__pyx_n_s__kk, __pyx_k__kk, sizeof(__pyx_k__kk), 0, 0, 1, 1}, + {&__pyx_n_s__kkk, __pyx_k__kkk, sizeof(__pyx_k__kkk), 0, 0, 1, 1}, + {&__pyx_n_s__kkmax, __pyx_k__kkmax, sizeof(__pyx_k__kkmax), 0, 0, 1, 1}, + {&__pyx_n_s__kkmin, __pyx_k__kkmin, sizeof(__pyx_k__kkmin), 0, 0, 1, 1}, + {&__pyx_n_s__nkx, __pyx_k__nkx, sizeof(__pyx_k__nkx), 0, 0, 1, 1}, + {&__pyx_n_s__nky, __pyx_k__nky, sizeof(__pyx_k__nky), 0, 0, 1, 1}, + {&__pyx_n_s__nkz, __pyx_k__nkz, sizeof(__pyx_k__nkz), 0, 0, 1, 1}, + {&__pyx_n_s__np, __pyx_k__np, sizeof(__pyx_k__np), 0, 0, 1, 1}, + {&__pyx_n_s__numpy, __pyx_k__numpy, sizeof(__pyx_k__numpy), 0, 0, 1, 1}, + {&__pyx_n_s__nx, __pyx_k__nx, sizeof(__pyx_k__nx), 0, 0, 1, 1}, + {&__pyx_n_s__ny, __pyx_k__ny, sizeof(__pyx_k__ny), 0, 0, 1, 1}, + {&__pyx_n_s__nz, __pyx_k__nz, sizeof(__pyx_k__nz), 0, 0, 1, 1}, + {&__pyx_n_s__range, __pyx_k__range, sizeof(__pyx_k__range), 0, 0, 1, 1}, + {&__pyx_n_s__top, __pyx_k__top, sizeof(__pyx_k__top), 0, 0, 1, 1}, + {&__pyx_n_s__val, __pyx_k__val, sizeof(__pyx_k__val), 0, 0, 1, 1}, + {&__pyx_n_s__wkx, __pyx_k__wkx, sizeof(__pyx_k__wkx), 0, 0, 1, 1}, + {&__pyx_n_s__wky, __pyx_k__wky, sizeof(__pyx_k__wky), 0, 0, 1, 1}, + {&__pyx_n_s__wkz, __pyx_k__wkz, sizeof(__pyx_k__wkz), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_ValueError = __Pyx_GetName(__pyx_b, __pyx_n_s__ValueError); if (!__pyx_builtin_ValueError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_builtin_range = __Pyx_GetName(__pyx_b, __pyx_n_s__range); if (!__pyx_builtin_range) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 38; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_builtin_RuntimeError = __Pyx_GetName(__pyx_b, __pyx_n_s__RuntimeError); if (!__pyx_builtin_RuntimeError) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "astropy/convolution/boundary_wrap.pyx":20 + * + * if g.shape[0] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_2 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_2); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_2)); + + /* "astropy/convolution/boundary_wrap.pyx":88 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_3 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_3); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_3)); + + /* "astropy/convolution/boundary_wrap.pyx":171 + * + * if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + * raise ValueError("Convolution kernel must have odd dimensions") # <<<<<<<<<<<<<< + * + * assert f.dtype == DTYPE and g.dtype == DTYPE + */ + __pyx_k_tuple_4 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_s_1)); if (unlikely(!__pyx_k_tuple_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 171; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_4); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_4)); + + /* "numpy.pxd":215 + * if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_C_CONTIGUOUS)): + * raise ValueError(u"ndarray is not C contiguous") # <<<<<<<<<<<<<< + * + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + */ + __pyx_k_tuple_6 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_5)); if (unlikely(!__pyx_k_tuple_6)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 215; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_6); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_6)); + + /* "numpy.pxd":219 + * if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS) + * and not PyArray_CHKFLAGS(self, NPY_F_CONTIGUOUS)): + * raise ValueError(u"ndarray is not Fortran contiguous") # <<<<<<<<<<<<<< + * + * info.buf = PyArray_DATA(self) + */ + __pyx_k_tuple_8 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_7)); if (unlikely(!__pyx_k_tuple_8)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_8); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_8)); + + /* "numpy.pxd":257 + * if ((descr.byteorder == c'>' and little_endian) or + * (descr.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * if t == NPY_BYTE: f = "b" + * elif t == NPY_UBYTE: f = "B" + */ + __pyx_k_tuple_10 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_9)); if (unlikely(!__pyx_k_tuple_10)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_10); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_10)); + + /* "numpy.pxd":799 + * + * if (end - f) - (new_offset - offset[0]) < 15: + * raise RuntimeError(u"Format string allocated too short, see comment in numpy.pxd") # <<<<<<<<<<<<<< + * + * if ((child.byteorder == c'>' and little_endian) or + */ + __pyx_k_tuple_13 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_12)); if (unlikely(!__pyx_k_tuple_13)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 799; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_13); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_13)); + + /* "numpy.pxd":803 + * if ((child.byteorder == c'>' and little_endian) or + * (child.byteorder == c'<' and not little_endian)): + * raise ValueError(u"Non-native byte order not supported") # <<<<<<<<<<<<<< + * # One could encode it in the format string and have Cython + * # complain instead, BUT: < and > in format strings also imply + */ + __pyx_k_tuple_14 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_9)); if (unlikely(!__pyx_k_tuple_14)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 803; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_14); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_14)); + + /* "numpy.pxd":823 + * t = child.type_num + * if end - f < 5: + * raise RuntimeError(u"Format string allocated too short.") # <<<<<<<<<<<<<< + * + * # Until ticket #99 is fixed, use integers to avoid warnings + */ + __pyx_k_tuple_16 = PyTuple_Pack(1, ((PyObject *)__pyx_kp_u_15)); if (unlikely(!__pyx_k_tuple_16)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 823; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_16); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_16)); + + /* "astropy/convolution/boundary_wrap.pyx":16 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_wrap(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g): + * + */ + __pyx_k_tuple_17 = PyTuple_Pack(16, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__iii), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_17)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_17); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_17)); + __pyx_k_codeobj_18 = (PyObject*)__Pyx_PyCode_New(2, 0, 16, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_19, 16, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_18)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + + /* "astropy/convolution/boundary_wrap.pyx":84 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_wrap(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g): + * + */ + __pyx_k_tuple_22 = PyTuple_Pack(24, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__ny), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__nky), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__wky), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__j), ((PyObject *)__pyx_n_s__iii), ((PyObject *)__pyx_n_s__jjj), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__jj), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__jjmin), ((PyObject *)__pyx_n_s__jjmax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_22)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_22); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_22)); + __pyx_k_codeobj_23 = (PyObject*)__Pyx_PyCode_New(2, 0, 24, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_22, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_24, 84, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_23)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + + /* "astropy/convolution/boundary_wrap.pyx":167 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_wrap(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g): + * + */ + __pyx_k_tuple_25 = PyTuple_Pack(32, ((PyObject *)__pyx_n_s__f), ((PyObject *)__pyx_n_s__g), ((PyObject *)__pyx_n_s__nx), ((PyObject *)__pyx_n_s__ny), ((PyObject *)__pyx_n_s__nz), ((PyObject *)__pyx_n_s__nkx), ((PyObject *)__pyx_n_s__nky), ((PyObject *)__pyx_n_s__nkz), ((PyObject *)__pyx_n_s__wkx), ((PyObject *)__pyx_n_s__wky), ((PyObject *)__pyx_n_s__wkz), ((PyObject *)__pyx_n_s__fixed), ((PyObject *)__pyx_n_s__conv), ((PyObject *)__pyx_n_s__i), ((PyObject *)__pyx_n_s__j), ((PyObject *)__pyx_n_s__k), ((PyObject *)__pyx_n_s__iii), ((PyObject *)__pyx_n_s__jjj), ((PyObject *)__pyx_n_s__kkk), ((PyObject *)__pyx_n_s__ii), ((PyObject *)__pyx_n_s__jj), ((PyObject *)__pyx_n_s__kk), ((PyObject *)__pyx_n_s__iimin), ((PyObject *)__pyx_n_s__iimax), ((PyObject *)__pyx_n_s__jjmin), ((PyObject *)__pyx_n_s__jjmax), ((PyObject *)__pyx_n_s__kkmin), ((PyObject *)__pyx_n_s__kkmax), ((PyObject *)__pyx_n_s__top), ((PyObject *)__pyx_n_s__bot), ((PyObject *)__pyx_n_s__ker), ((PyObject *)__pyx_n_s__val)); if (unlikely(!__pyx_k_tuple_25)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 167; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_k_tuple_25); + __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_25)); + __pyx_k_codeobj_26 = (PyObject*)__Pyx_PyCode_New(2, 0, 32, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_k_tuple_25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_20, __pyx_n_s_27, 167, __pyx_empty_bytes); if (unlikely(!__pyx_k_codeobj_26)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 167; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + __pyx_int_15 = PyInt_FromLong(15); if (unlikely(!__pyx_int_15)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + return 0; + __pyx_L1_error:; + return -1; +} + +#if PY_MAJOR_VERSION < 3 +PyMODINIT_FUNC initboundary_wrap(void); /*proto*/ +PyMODINIT_FUNC initboundary_wrap(void) +#else +PyMODINIT_FUNC PyInit_boundary_wrap(void); /*proto*/ +PyMODINIT_FUNC PyInit_boundary_wrap(void) +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_REFNANNY + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); + if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); + } + #endif + __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit_boundary_wrap(void)", 0); + if ( __Pyx_check_binary_version() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #ifdef __Pyx_CyFunction_USED + if (__Pyx_CyFunction_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4(__Pyx_NAMESTR("boundary_wrap"), __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (!PyDict_GetItemString(modules, "astropy.convolution.boundary_wrap")) { + if (unlikely(PyDict_SetItemString(modules, "astropy.convolution.boundary_wrap", __pyx_m) < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + } + } + #endif + __pyx_b = PyImport_AddModule(__Pyx_NAMESTR(__Pyx_BUILTIN_MODULE_NAME)); if (unlikely(!__pyx_b)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (__Pyx_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + /*--- Initialize various global constants etc. ---*/ + if (unlikely(__Pyx_InitGlobals() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (__pyx_module_is_main_astropy__convolution__boundary_wrap) { + if (__Pyx_SetAttrString(__pyx_m, "__name__", __pyx_n_s____main__) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + } + /*--- Builtin init code ---*/ + if (unlikely(__Pyx_InitCachedBuiltins() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Constants init code ---*/ + if (unlikely(__Pyx_InitCachedConstants() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Global init code ---*/ + /*--- Variable export code ---*/ + /*--- Function export code ---*/ + /*--- Type init code ---*/ + /*--- Type import code ---*/ + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", + #if CYTHON_COMPILING_IN_PYPY + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 9; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_dtype = __Pyx_ImportType("numpy", "dtype", sizeof(PyArray_Descr), 0); if (unlikely(!__pyx_ptype_5numpy_dtype)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 155; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_flatiter = __Pyx_ImportType("numpy", "flatiter", sizeof(PyArrayIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_flatiter)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 165; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_broadcast = __Pyx_ImportType("numpy", "broadcast", sizeof(PyArrayMultiIterObject), 0); if (unlikely(!__pyx_ptype_5numpy_broadcast)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 169; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_ndarray = __Pyx_ImportType("numpy", "ndarray", sizeof(PyArrayObject), 0); if (unlikely(!__pyx_ptype_5numpy_ndarray)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 178; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_ptype_5numpy_ufunc = __Pyx_ImportType("numpy", "ufunc", sizeof(PyUFuncObject), 0); if (unlikely(!__pyx_ptype_5numpy_ufunc)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 861; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /*--- Variable import code ---*/ + /*--- Function import code ---*/ + /*--- Execution code ---*/ + + /* "astropy/convolution/boundary_wrap.pyx":3 + * # Licensed under a 3-clause BSD style license - see LICENSE.rst + * from __future__ import division + * import numpy as np # <<<<<<<<<<<<<< + * cimport numpy as np + * + */ + __pyx_t_1 = __Pyx_Import(((PyObject *)__pyx_n_s__numpy), 0, -1); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s__np, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "astropy/convolution/boundary_wrap.pyx":6 + * cimport numpy as np + * + * DTYPE = np.float # <<<<<<<<<<<<<< + * ctypedef np.float_t DTYPE_t + * + */ + __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__np); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__float); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (PyObject_SetAttr(__pyx_m, __pyx_n_s__DTYPE, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 6; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_wrap.pyx":16 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve1d_boundary_wrap(np.ndarray[DTYPE_t, ndim=1] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=1] g): + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_13boundary_wrap_1convolve1d_boundary_wrap, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_19, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_wrap.pyx":84 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve2d_boundary_wrap(np.ndarray[DTYPE_t, ndim=2] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=2] g): + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_13boundary_wrap_3convolve2d_boundary_wrap, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_24, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 84; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_wrap.pyx":167 + * + * @cython.boundscheck(False) # turn off bounds-checking for entire function + * def convolve3d_boundary_wrap(np.ndarray[DTYPE_t, ndim=3] f, # <<<<<<<<<<<<<< + * np.ndarray[DTYPE_t, ndim=3] g): + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7astropy_11convolution_13boundary_wrap_5convolve3d_boundary_wrap, NULL, __pyx_n_s_21); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 167; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(__pyx_t_2); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_27, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 167; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "astropy/convolution/boundary_wrap.pyx":1 + * # Licensed under a 3-clause BSD style license - see LICENSE.rst # <<<<<<<<<<<<<< + * from __future__ import division + * import numpy as np + */ + __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_GOTREF(((PyObject *)__pyx_t_2)); + if (PyObject_SetAttr(__pyx_m, __pyx_n_s____test__, ((PyObject *)__pyx_t_2)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; + + /* "numpy.pxd":975 + * arr.base = baseptr + * + * cdef inline object get_array_base(ndarray arr): # <<<<<<<<<<<<<< + * if arr.base is NULL: + * return None + */ + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + if (__pyx_m) { + __Pyx_AddTraceback("init astropy.convolution.boundary_wrap", __pyx_clineno, __pyx_lineno, __pyx_filename); + Py_DECREF(__pyx_m); __pyx_m = 0; + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init astropy.convolution.boundary_wrap"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if PY_MAJOR_VERSION < 3 + return; + #else + return __pyx_m; + #endif +} + +/* Runtime support code */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule((char *)modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif /* CYTHON_REFNANNY */ + +static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name) { + PyObject *result; + result = PyObject_GetAttr(dict, name); + if (!result) { + if (dict != __pyx_b) { + PyErr_Clear(); + result = PyObject_GetAttr(__pyx_b, name); + } + if (!result) { + PyErr_SetObject(PyExc_NameError, name); + } + } + return result; +} + +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%s() takes %s %" CYTHON_FORMAT_SSIZE_T "d positional argument%s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%s() got an unexpected keyword argument '%s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact) +{ + if (!type) { + PyErr_Format(PyExc_SystemError, "Missing type object"); + return 0; + } + if (none_allowed && obj == Py_None) return 1; + else if (exact) { + if (Py_TYPE(obj) == type) return 1; + } + else { + if (PyObject_TypeCheck(obj, type)) return 1; + } + PyErr_Format(PyExc_TypeError, + "Argument '%s' has incorrect type (expected %s, got %s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); + return 0; +} + +static CYTHON_INLINE int __Pyx_IsLittleEndian(void) { + unsigned int n = 1; + return *(unsigned char*)(&n) != 0; +} +static void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx, + __Pyx_BufFmt_StackElem* stack, + __Pyx_TypeInfo* type) { + stack[0].field = &ctx->root; + stack[0].parent_offset = 0; + ctx->root.type = type; + ctx->root.name = "buffer dtype"; + ctx->root.offset = 0; + ctx->head = stack; + ctx->head->field = &ctx->root; + ctx->fmt_offset = 0; + ctx->head->parent_offset = 0; + ctx->new_packmode = '@'; + ctx->enc_packmode = '@'; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->is_complex = 0; + ctx->is_valid_array = 0; + ctx->struct_alignment = 0; + while (type->typegroup == 'S') { + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = 0; + type = type->fields->type; + } +} +static int __Pyx_BufFmt_ParseNumber(const char** ts) { + int count; + const char* t = *ts; + if (*t < '0' || *t > '9') { + return -1; + } else { + count = *t++ - '0'; + while (*t >= '0' && *t < '9') { + count *= 10; + count += *t++ - '0'; + } + } + *ts = t; + return count; +} +static int __Pyx_BufFmt_ExpectNumber(const char **ts) { + int number = __Pyx_BufFmt_ParseNumber(ts); + if (number == -1) /* First char was not a digit */ + PyErr_Format(PyExc_ValueError,\ + "Does not understand character buffer dtype format string ('%c')", **ts); + return number; +} +static void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) { + PyErr_Format(PyExc_ValueError, + "Unexpected format string character: '%c'", ch); +} +static const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) { + switch (ch) { + case 'c': return "'char'"; + case 'b': return "'signed char'"; + case 'B': return "'unsigned char'"; + case 'h': return "'short'"; + case 'H': return "'unsigned short'"; + case 'i': return "'int'"; + case 'I': return "'unsigned int'"; + case 'l': return "'long'"; + case 'L': return "'unsigned long'"; + case 'q': return "'long long'"; + case 'Q': return "'unsigned long long'"; + case 'f': return (is_complex ? "'complex float'" : "'float'"); + case 'd': return (is_complex ? "'complex double'" : "'double'"); + case 'g': return (is_complex ? "'complex long double'" : "'long double'"); + case 'T': return "a struct"; + case 'O': return "Python object"; + case 'P': return "a pointer"; + case 's': case 'p': return "a string"; + case 0: return "end"; + default: return "unparseable format string"; + } +} +static size_t __Pyx_BufFmt_TypeCharToStandardSize(char ch, int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return 2; + case 'i': case 'I': case 'l': case 'L': return 4; + case 'q': case 'Q': return 8; + case 'f': return (is_complex ? 8 : 4); + case 'd': return (is_complex ? 16 : 8); + case 'g': { + PyErr_SetString(PyExc_ValueError, "Python does not define a standard format string size for long double ('g').."); + return 0; + } + case 'O': case 'P': return sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +static size_t __Pyx_BufFmt_TypeCharToNativeSize(char ch, int is_complex) { + switch (ch) { + case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(short); + case 'i': case 'I': return sizeof(int); + case 'l': case 'L': return sizeof(long); + #ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(PY_LONG_LONG); + #endif + case 'f': return sizeof(float) * (is_complex ? 2 : 1); + case 'd': return sizeof(double) * (is_complex ? 2 : 1); + case 'g': return sizeof(long double) * (is_complex ? 2 : 1); + case 'O': case 'P': return sizeof(void*); + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} +typedef struct { char c; short x; } __Pyx_st_short; +typedef struct { char c; int x; } __Pyx_st_int; +typedef struct { char c; long x; } __Pyx_st_long; +typedef struct { char c; float x; } __Pyx_st_float; +typedef struct { char c; double x; } __Pyx_st_double; +typedef struct { char c; long double x; } __Pyx_st_longdouble; +typedef struct { char c; void *x; } __Pyx_st_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { char c; PY_LONG_LONG x; } __Pyx_st_longlong; +#endif +static size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_st_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_st_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_st_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_st_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_st_float) - sizeof(float); + case 'd': return sizeof(__Pyx_st_double) - sizeof(double); + case 'g': return sizeof(__Pyx_st_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_st_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +/* These are for computing the padding at the end of the struct to align + on the first member of the struct. This will probably the same as above, + but we don't have any guarantees. + */ +typedef struct { short x; char c; } __Pyx_pad_short; +typedef struct { int x; char c; } __Pyx_pad_int; +typedef struct { long x; char c; } __Pyx_pad_long; +typedef struct { float x; char c; } __Pyx_pad_float; +typedef struct { double x; char c; } __Pyx_pad_double; +typedef struct { long double x; char c; } __Pyx_pad_longdouble; +typedef struct { void *x; char c; } __Pyx_pad_void_p; +#ifdef HAVE_LONG_LONG +typedef struct { PY_LONG_LONG x; char c; } __Pyx_pad_longlong; +#endif +static size_t __Pyx_BufFmt_TypeCharToPadding(char ch, CYTHON_UNUSED int is_complex) { + switch (ch) { + case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1; + case 'h': case 'H': return sizeof(__Pyx_pad_short) - sizeof(short); + case 'i': case 'I': return sizeof(__Pyx_pad_int) - sizeof(int); + case 'l': case 'L': return sizeof(__Pyx_pad_long) - sizeof(long); +#ifdef HAVE_LONG_LONG + case 'q': case 'Q': return sizeof(__Pyx_pad_longlong) - sizeof(PY_LONG_LONG); +#endif + case 'f': return sizeof(__Pyx_pad_float) - sizeof(float); + case 'd': return sizeof(__Pyx_pad_double) - sizeof(double); + case 'g': return sizeof(__Pyx_pad_longdouble) - sizeof(long double); + case 'P': case 'O': return sizeof(__Pyx_pad_void_p) - sizeof(void*); + default: + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } +} +static char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) { + switch (ch) { + case 'c': + return 'H'; + case 'b': case 'h': case 'i': + case 'l': case 'q': case 's': case 'p': + return 'I'; + case 'B': case 'H': case 'I': case 'L': case 'Q': + return 'U'; + case 'f': case 'd': case 'g': + return (is_complex ? 'C' : 'R'); + case 'O': + return 'O'; + case 'P': + return 'P'; + default: { + __Pyx_BufFmt_RaiseUnexpectedChar(ch); + return 0; + } + } +} +static void __Pyx_BufFmt_RaiseExpected(__Pyx_BufFmt_Context* ctx) { + if (ctx->head == NULL || ctx->head->field == &ctx->root) { + const char* expected; + const char* quote; + if (ctx->head == NULL) { + expected = "end"; + quote = ""; + } else { + expected = ctx->head->field->type->name; + quote = "'"; + } + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected %s%s%s but got %s", + quote, expected, quote, + __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex)); + } else { + __Pyx_StructField* field = ctx->head->field; + __Pyx_StructField* parent = (ctx->head - 1)->field; + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch, expected '%s' but got %s in '%s.%s'", + field->type->name, __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex), + parent->type->name, field->name); + } +} +static int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) { + char group; + size_t size, offset, arraysize = 1; + if (ctx->enc_type == 0) return 0; + if (ctx->head->field->type->arraysize[0]) { + int i, ndim = 0; + if (ctx->enc_type == 's' || ctx->enc_type == 'p') { + ctx->is_valid_array = ctx->head->field->type->ndim == 1; + ndim = 1; + if (ctx->enc_count != ctx->head->field->type->arraysize[0]) { + PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %zu", + ctx->head->field->type->arraysize[0], ctx->enc_count); + return -1; + } + } + if (!ctx->is_valid_array) { + PyErr_Format(PyExc_ValueError, "Expected %d dimensions, got %d", + ctx->head->field->type->ndim, ndim); + return -1; + } + for (i = 0; i < ctx->head->field->type->ndim; i++) { + arraysize *= ctx->head->field->type->arraysize[i]; + } + ctx->is_valid_array = 0; + ctx->enc_count = 1; + } + group = __Pyx_BufFmt_TypeCharToGroup(ctx->enc_type, ctx->is_complex); + do { + __Pyx_StructField* field = ctx->head->field; + __Pyx_TypeInfo* type = field->type; + if (ctx->enc_packmode == '@' || ctx->enc_packmode == '^') { + size = __Pyx_BufFmt_TypeCharToNativeSize(ctx->enc_type, ctx->is_complex); + } else { + size = __Pyx_BufFmt_TypeCharToStandardSize(ctx->enc_type, ctx->is_complex); + } + if (ctx->enc_packmode == '@') { + size_t align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex); + size_t align_mod_offset; + if (align_at == 0) return -1; + align_mod_offset = ctx->fmt_offset % align_at; + if (align_mod_offset > 0) ctx->fmt_offset += align_at - align_mod_offset; + if (ctx->struct_alignment == 0) + ctx->struct_alignment = __Pyx_BufFmt_TypeCharToPadding(ctx->enc_type, + ctx->is_complex); + } + if (type->size != size || type->typegroup != group) { + if (type->typegroup == 'C' && type->fields != NULL) { + size_t parent_offset = ctx->head->parent_offset + field->offset; + ++ctx->head; + ctx->head->field = type->fields; + ctx->head->parent_offset = parent_offset; + continue; + } + if ((type->typegroup == 'H' || group == 'H') && type->size == size) { + } else { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + } + offset = ctx->head->parent_offset + field->offset; + if (ctx->fmt_offset != offset) { + PyErr_Format(PyExc_ValueError, + "Buffer dtype mismatch; next field is at offset %" CYTHON_FORMAT_SSIZE_T "d but %" CYTHON_FORMAT_SSIZE_T "d expected", + (Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset); + return -1; + } + ctx->fmt_offset += size; + if (arraysize) + ctx->fmt_offset += (arraysize - 1) * size; + --ctx->enc_count; /* Consume from buffer string */ + while (1) { + if (field == &ctx->root) { + ctx->head = NULL; + if (ctx->enc_count != 0) { + __Pyx_BufFmt_RaiseExpected(ctx); + return -1; + } + break; /* breaks both loops as ctx->enc_count == 0 */ + } + ctx->head->field = ++field; + if (field->type == NULL) { + --ctx->head; + field = ctx->head->field; + continue; + } else if (field->type->typegroup == 'S') { + size_t parent_offset = ctx->head->parent_offset + field->offset; + if (field->type->fields->type == NULL) continue; /* empty struct */ + field = field->type->fields; + ++ctx->head; + ctx->head->field = field; + ctx->head->parent_offset = parent_offset; + break; + } else { + break; + } + } + } while (ctx->enc_count); + ctx->enc_type = 0; + ctx->is_complex = 0; + return 0; +} +static CYTHON_INLINE PyObject * +__pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp) +{ + const char *ts = *tsp; + int i = 0, number; + int ndim = ctx->head->field->type->ndim; +; + ++ts; + if (ctx->new_count != 1) { + PyErr_SetString(PyExc_ValueError, + "Cannot handle repeated arrays in format string"); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + while (*ts && *ts != ')') { + if (isspace(*ts)) + continue; + number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + if (i < ndim && (size_t) number != ctx->head->field->type->arraysize[i]) + return PyErr_Format(PyExc_ValueError, + "Expected a dimension of size %zu, got %d", + ctx->head->field->type->arraysize[i], number); + if (*ts != ',' && *ts != ')') + return PyErr_Format(PyExc_ValueError, + "Expected a comma in format string, got '%c'", *ts); + if (*ts == ',') ts++; + i++; + } + if (i != ndim) + return PyErr_Format(PyExc_ValueError, "Expected %d dimension(s), got %d", + ctx->head->field->type->ndim, i); + if (!*ts) { + PyErr_SetString(PyExc_ValueError, + "Unexpected end of format string, expected ')'"); + return NULL; + } + ctx->is_valid_array = 1; + ctx->new_count = 1; + *tsp = ++ts; + return Py_None; +} +static const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts) { + int got_Z = 0; + while (1) { + switch(*ts) { + case 0: + if (ctx->enc_type != 0 && ctx->head == NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + if (ctx->head != NULL) { + __Pyx_BufFmt_RaiseExpected(ctx); + return NULL; + } + return ts; + case ' ': + case 10: + case 13: + ++ts; + break; + case '<': + if (!__Pyx_IsLittleEndian()) { + PyErr_SetString(PyExc_ValueError, "Little-endian buffer not supported on big-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '>': + case '!': + if (__Pyx_IsLittleEndian()) { + PyErr_SetString(PyExc_ValueError, "Big-endian buffer not supported on little-endian compiler"); + return NULL; + } + ctx->new_packmode = '='; + ++ts; + break; + case '=': + case '@': + case '^': + ctx->new_packmode = *ts++; + break; + case 'T': /* substruct */ + { + const char* ts_after_sub; + size_t i, struct_count = ctx->new_count; + size_t struct_alignment = ctx->struct_alignment; + ctx->new_count = 1; + ++ts; + if (*ts != '{') { + PyErr_SetString(PyExc_ValueError, "Buffer acquisition: Expected '{' after 'T'"); + return NULL; + } + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; /* Erase processed last struct element */ + ctx->enc_count = 0; + ctx->struct_alignment = 0; + ++ts; + ts_after_sub = ts; + for (i = 0; i != struct_count; ++i) { + ts_after_sub = __Pyx_BufFmt_CheckString(ctx, ts); + if (!ts_after_sub) return NULL; + } + ts = ts_after_sub; + if (struct_alignment) ctx->struct_alignment = struct_alignment; + } + break; + case '}': /* end of substruct; either repeat or move on */ + { + size_t alignment = ctx->struct_alignment; + ++ts; + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_type = 0; /* Erase processed last struct element */ + if (alignment && ctx->fmt_offset % alignment) { + ctx->fmt_offset += alignment - (ctx->fmt_offset % alignment); + } + } + return ts; + case 'x': + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->fmt_offset += ctx->new_count; + ctx->new_count = 1; + ctx->enc_count = 0; + ctx->enc_type = 0; + ctx->enc_packmode = ctx->new_packmode; + ++ts; + break; + case 'Z': + got_Z = 1; + ++ts; + if (*ts != 'f' && *ts != 'd' && *ts != 'g') { + __Pyx_BufFmt_RaiseUnexpectedChar('Z'); + return NULL; + } /* fall through */ + case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I': + case 'l': case 'L': case 'q': case 'Q': + case 'f': case 'd': case 'g': + case 'O': case 's': case 'p': + if (ctx->enc_type == *ts && got_Z == ctx->is_complex && + ctx->enc_packmode == ctx->new_packmode) { + ctx->enc_count += ctx->new_count; + } else { + if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL; + ctx->enc_count = ctx->new_count; + ctx->enc_packmode = ctx->new_packmode; + ctx->enc_type = *ts; + ctx->is_complex = got_Z; + } + ++ts; + ctx->new_count = 1; + got_Z = 0; + break; + case ':': + ++ts; + while(*ts != ':') ++ts; + ++ts; + break; + case '(': + if (!__pyx_buffmt_parse_array(ctx, &ts)) return NULL; + break; + default: + { + int number = __Pyx_BufFmt_ExpectNumber(&ts); + if (number == -1) return NULL; + ctx->new_count = (size_t)number; + } + } + } +} +static CYTHON_INLINE void __Pyx_ZeroBuffer(Py_buffer* buf) { + buf->buf = NULL; + buf->obj = NULL; + buf->strides = __Pyx_zeros; + buf->shape = __Pyx_zeros; + buf->suboffsets = __Pyx_minusones; +} +static CYTHON_INLINE int __Pyx_GetBufferAndValidate( + Py_buffer* buf, PyObject* obj, __Pyx_TypeInfo* dtype, int flags, + int nd, int cast, __Pyx_BufFmt_StackElem* stack) +{ + if (obj == Py_None || obj == NULL) { + __Pyx_ZeroBuffer(buf); + return 0; + } + buf->buf = NULL; + if (__Pyx_GetBuffer(obj, buf, flags) == -1) goto fail; + if (buf->ndim != nd) { + PyErr_Format(PyExc_ValueError, + "Buffer has wrong number of dimensions (expected %d, got %d)", + nd, buf->ndim); + goto fail; + } + if (!cast) { + __Pyx_BufFmt_Context ctx; + __Pyx_BufFmt_Init(&ctx, stack, dtype); + if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail; + } + if ((unsigned)buf->itemsize != dtype->size) { + PyErr_Format(PyExc_ValueError, + "Item size of buffer (%" CYTHON_FORMAT_SSIZE_T "d byte%s) does not match size of '%s' (%" CYTHON_FORMAT_SSIZE_T "d byte%s)", + buf->itemsize, (buf->itemsize > 1) ? "s" : "", + dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? "s" : ""); + goto fail; + } + if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones; + return 0; +fail:; + __Pyx_ZeroBuffer(buf); + return -1; +} +static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) { + if (info->buf == NULL) return; + if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL; + __Pyx_ReleaseBuffer(info); +} + +static CYTHON_INLINE long __Pyx_mod_long(long a, long b) { + long r = a % b; + r += ((r != 0) & ((r ^ b) < 0)) * b; + return r; +} + +static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb) { +#if CYTHON_COMPILING_IN_CPYTHON + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyThreadState *tstate = PyThreadState_GET(); + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_Restore(type, value, tb); +#endif +} +static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb) { +#if CYTHON_COMPILING_IN_CPYTHON + PyThreadState *tstate = PyThreadState_GET(); + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(type, value, tb); +#endif +} + +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + #if PY_VERSION_HEX < 0x02050000 + if (PyClass_Check(type)) { + #else + if (PyType_Check(type)) { + #endif +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + #if PY_VERSION_HEX < 0x02050000 + if (PyInstance_Check(type)) { + type = (PyObject*) ((PyInstanceObject*)type)->in_class; + Py_INCREF(type); + } + else { + type = 0; + PyErr_SetString(PyExc_TypeError, + "raise: exception must be an old-style class or instance"); + goto raise_error; + } + #else + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + #endif + } + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else /* Python 3+ */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } + else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyEval_CallObject(type, args); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause && cause != Py_None) { + PyObject *fixed_cause; + if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } + else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } + else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { + PyThreadState *tstate = PyThreadState_GET(); + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +static CYTHON_INLINE long __Pyx_div_long(long a, long b) { + long q = a / b; + long r = a - q*b; + q -= ((r != 0) & ((r ^ b) < 0)); + return q; +} + +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_Format(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(PyObject_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +static CYTHON_INLINE int __Pyx_mod_int(int a, int b) { + int r = a % b; + r += ((r != 0) & ((r ^ b) < 0)) * b; + return r; +} + +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%s to unpack", + index, (index == 1) ? "" : "s"); +} + +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_COMPILING_IN_CPYTHON + PyThreadState *tstate = PyThreadState_GET(); + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(exc_type == PyExc_StopIteration) || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +#if PY_MAJOR_VERSION < 3 +static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) { + CYTHON_UNUSED PyObject *getbuffer_cobj; + #if PY_VERSION_HEX >= 0x02060000 + if (PyObject_CheckBuffer(obj)) return PyObject_GetBuffer(obj, view, flags); + #endif + if (PyObject_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) return __pyx_pw_5numpy_7ndarray_1__getbuffer__(obj, view, flags); + #if PY_VERSION_HEX < 0x02060000 + if (obj->ob_type->tp_dict && + (getbuffer_cobj = PyMapping_GetItemString(obj->ob_type->tp_dict, + "__pyx_getbuffer"))) { + getbufferproc func; + #if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION == 0) + func = (getbufferproc) PyCapsule_GetPointer(getbuffer_cobj, "getbuffer(obj, view, flags)"); + #else + func = (getbufferproc) PyCObject_AsVoidPtr(getbuffer_cobj); + #endif + Py_DECREF(getbuffer_cobj); + if (!func) + goto fail; + return func(obj, view, flags); + } else { + PyErr_Clear(); + } + #endif + PyErr_Format(PyExc_TypeError, "'%100s' does not have the buffer interface", Py_TYPE(obj)->tp_name); +#if PY_VERSION_HEX < 0x02060000 +fail: +#endif + return -1; +} +static void __Pyx_ReleaseBuffer(Py_buffer *view) { + PyObject *obj = view->obj; + CYTHON_UNUSED PyObject *releasebuffer_cobj; + if (!obj) return; + #if PY_VERSION_HEX >= 0x02060000 + if (PyObject_CheckBuffer(obj)) { + PyBuffer_Release(view); + return; + } + #endif + if (PyObject_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) { __pyx_pw_5numpy_7ndarray_3__releasebuffer__(obj, view); return; } + #if PY_VERSION_HEX < 0x02060000 + if (obj->ob_type->tp_dict && + (releasebuffer_cobj = PyMapping_GetItemString(obj->ob_type->tp_dict, + "__pyx_releasebuffer"))) { + releasebufferproc func; + #if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION == 0) + func = (releasebufferproc) PyCapsule_GetPointer(releasebuffer_cobj, "releasebuffer(obj, view)"); + #else + func = (releasebufferproc) PyCObject_AsVoidPtr(releasebuffer_cobj); + #endif + Py_DECREF(releasebuffer_cobj); + if (!func) + goto fail; + func(obj, view); + return; + } else { + PyErr_Clear(); + } + #endif + goto nofail; +#if PY_VERSION_HEX < 0x02060000 +fail: +#endif + PyErr_WriteUnraisable(obj); +nofail: + Py_DECREF(obj); + view->obj = NULL; +} +#endif /* PY_MAJOR_VERSION < 3 */ + + + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_import = 0; + py_import = __Pyx_GetAttrString(__pyx_b, "__import__"); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + #if PY_VERSION_HEX >= 0x02050000 + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(1); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + #endif + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; /* try absolute import on failure */ + } + #endif + if (!module) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } + #else + if (level>0) { + PyErr_SetString(PyExc_RuntimeError, "Relative import is not supported for Python <=2.4."); + goto bad; + } + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, NULL); + #endif +bad: + #if PY_VERSION_HEX < 0x03030000 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return ::std::complex< float >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + return x + y*(__pyx_t_float_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) { + __pyx_t_float_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +#if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eqf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sumf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_difff(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prodf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quotf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float denom = b.real * b.real + b.imag * b.imag; + z.real = (a.real * b.real + a.imag * b.imag) / denom; + z.imag = (a.imag * b.real - a.real * b.imag) / denom; + return z; + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_negf(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zerof(__pyx_t_float_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conjf(__pyx_t_float_complex a) { + __pyx_t_float_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE float __Pyx_c_absf(__pyx_t_float_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrtf(z.real*z.real + z.imag*z.imag); + #else + return hypotf(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_powf(__pyx_t_float_complex a, __pyx_t_float_complex b) { + __pyx_t_float_complex z; + float r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + float denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(a, a); + case 3: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(z, a); + case 4: + z = __Pyx_c_prodf(a, a); + return __Pyx_c_prodf(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } + r = a.real; + theta = 0; + } else { + r = __Pyx_c_absf(a); + theta = atan2f(a.imag, a.real); + } + lnr = logf(r); + z_r = expf(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cosf(z_theta); + z.imag = z_r * sinf(z_theta); + return z; + } + #endif +#endif + +#if CYTHON_CCOMPLEX + #ifdef __cplusplus + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return ::std::complex< double >(x, y); + } + #else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + return x + y*(__pyx_t_double_complex)_Complex_I; + } + #endif +#else + static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) { + __pyx_t_double_complex z; + z.real = x; + z.imag = y; + return z; + } +#endif + +#if CYTHON_CCOMPLEX +#else + static CYTHON_INLINE int __Pyx_c_eq(__pyx_t_double_complex a, __pyx_t_double_complex b) { + return (a.real == b.real) && (a.imag == b.imag); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real + b.real; + z.imag = a.imag + b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real - b.real; + z.imag = a.imag - b.imag; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + z.real = a.real * b.real - a.imag * b.imag; + z.imag = a.real * b.imag + a.imag * b.real; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double denom = b.real * b.real + b.imag * b.imag; + z.real = (a.real * b.real + a.imag * b.imag) / denom; + z.imag = (a.imag * b.real - a.real * b.imag) / denom; + return z; + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = -a.real; + z.imag = -a.imag; + return z; + } + static CYTHON_INLINE int __Pyx_c_is_zero(__pyx_t_double_complex a) { + return (a.real == 0) && (a.imag == 0); + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj(__pyx_t_double_complex a) { + __pyx_t_double_complex z; + z.real = a.real; + z.imag = -a.imag; + return z; + } + #if 1 + static CYTHON_INLINE double __Pyx_c_abs(__pyx_t_double_complex z) { + #if !defined(HAVE_HYPOT) || defined(_MSC_VER) + return sqrt(z.real*z.real + z.imag*z.imag); + #else + return hypot(z.real, z.imag); + #endif + } + static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow(__pyx_t_double_complex a, __pyx_t_double_complex b) { + __pyx_t_double_complex z; + double r, lnr, theta, z_r, z_theta; + if (b.imag == 0 && b.real == (int)b.real) { + if (b.real < 0) { + double denom = a.real * a.real + a.imag * a.imag; + a.real = a.real / denom; + a.imag = -a.imag / denom; + b.real = -b.real; + } + switch ((int)b.real) { + case 0: + z.real = 1; + z.imag = 0; + return z; + case 1: + return a; + case 2: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(a, a); + case 3: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(z, a); + case 4: + z = __Pyx_c_prod(a, a); + return __Pyx_c_prod(z, z); + } + } + if (a.imag == 0) { + if (a.real == 0) { + return a; + } + r = a.real; + theta = 0; + } else { + r = __Pyx_c_abs(a); + theta = atan2(a.imag, a.real); + } + lnr = log(r); + z_r = exp(lnr * b.real - theta * b.imag); + z_theta = theta * b.real + lnr * b.imag; + z.real = z_r * cos(z_theta); + z.imag = z_r * sin(z_theta); + return z; + } + #endif +#endif + +static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject* x) { + const unsigned char neg_one = (unsigned char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned char" : + "value too large to convert to unsigned char"); + } + return (unsigned char)-1; + } + return (unsigned char)val; + } + return (unsigned char)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject* x) { + const unsigned short neg_one = (unsigned short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned short" : + "value too large to convert to unsigned short"); + } + return (unsigned short)-1; + } + return (unsigned short)val; + } + return (unsigned short)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject* x) { + const unsigned int neg_one = (unsigned int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(unsigned int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(unsigned int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to unsigned int" : + "value too large to convert to unsigned int"); + } + return (unsigned int)-1; + } + return (unsigned int)val; + } + return (unsigned int)__Pyx_PyInt_AsUnsignedLong(x); +} + +static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject* x) { + const char neg_one = (char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to char" : + "value too large to convert to char"); + } + return (char)-1; + } + return (char)val; + } + return (char)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject* x) { + const short neg_one = (short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to short" : + "value too large to convert to short"); + } + return (short)-1; + } + return (short)val; + } + return (short)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject* x) { + const int neg_one = (int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to int" : + "value too large to convert to int"); + } + return (int)-1; + } + return (int)val; + } + return (int)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject* x) { + const signed char neg_one = (signed char)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed char) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed char)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed char" : + "value too large to convert to signed char"); + } + return (signed char)-1; + } + return (signed char)val; + } + return (signed char)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject* x) { + const signed short neg_one = (signed short)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed short) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed short)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed short" : + "value too large to convert to signed short"); + } + return (signed short)-1; + } + return (signed short)val; + } + return (signed short)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject* x) { + const signed int neg_one = (signed int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(signed int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(signed int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to signed int" : + "value too large to convert to signed int"); + } + return (signed int)-1; + } + return (signed int)val; + } + return (signed int)__Pyx_PyInt_AsSignedLong(x); +} + +static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject* x) { + const int neg_one = (int)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; + if (sizeof(int) < sizeof(long)) { + long val = __Pyx_PyInt_AsLong(x); + if (unlikely(val != (long)(int)val)) { + if (!unlikely(val == -1 && PyErr_Occurred())) { + PyErr_SetString(PyExc_OverflowError, + (is_unsigned && unlikely(val < 0)) ? + "can't convert negative value to int" : + "value too large to convert to int"); + } + return (int)-1; + } + return (int)val; + } + return (int)__Pyx_PyInt_AsLong(x); +} + +static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject* x) { + const unsigned long neg_one = (unsigned long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned long"); + return (unsigned long)-1; + } + return (unsigned long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned long"); + return (unsigned long)-1; + } + return (unsigned long)PyLong_AsUnsignedLong(x); + } else { + return (unsigned long)PyLong_AsLong(x); + } + } else { + unsigned long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (unsigned long)-1; + val = __Pyx_PyInt_AsUnsignedLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject* x) { + const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned PY_LONG_LONG"); + return (unsigned PY_LONG_LONG)-1; + } + return (unsigned PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to unsigned PY_LONG_LONG"); + return (unsigned PY_LONG_LONG)-1; + } + return (unsigned PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (unsigned PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + unsigned PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (unsigned PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsUnsignedLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject* x) { + const long neg_one = (long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long)-1; + } + return (long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long)-1; + } + return (long)PyLong_AsUnsignedLong(x); + } else { + return (long)PyLong_AsLong(x); + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (long)-1; + val = __Pyx_PyInt_AsLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject* x) { + const PY_LONG_LONG neg_one = (PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to PY_LONG_LONG"); + return (PY_LONG_LONG)-1; + } + return (PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to PY_LONG_LONG"); + return (PY_LONG_LONG)-1; + } + return (PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject* x) { + const signed long neg_one = (signed long)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed long"); + return (signed long)-1; + } + return (signed long)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed long"); + return (signed long)-1; + } + return (signed long)PyLong_AsUnsignedLong(x); + } else { + return (signed long)PyLong_AsLong(x); + } + } else { + signed long val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (signed long)-1; + val = __Pyx_PyInt_AsSignedLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject* x) { + const signed PY_LONG_LONG neg_one = (signed PY_LONG_LONG)-1, const_zero = 0; + const int is_unsigned = neg_one > const_zero; +#if PY_VERSION_HEX < 0x03000000 + if (likely(PyInt_Check(x))) { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed PY_LONG_LONG"); + return (signed PY_LONG_LONG)-1; + } + return (signed PY_LONG_LONG)val; + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { + if (unlikely(Py_SIZE(x) < 0)) { + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to signed PY_LONG_LONG"); + return (signed PY_LONG_LONG)-1; + } + return (signed PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); + } else { + return (signed PY_LONG_LONG)PyLong_AsLongLong(x); + } + } else { + signed PY_LONG_LONG val; + PyObject *tmp = __Pyx_PyNumber_Int(x); + if (!tmp) return (signed PY_LONG_LONG)-1; + val = __Pyx_PyInt_AsSignedLongLong(tmp); + Py_DECREF(tmp); + return val; + } +} + +static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + #if PY_VERSION_HEX < 0x02050000 + return PyErr_Warn(NULL, message); + #else + return PyErr_WarnEx(NULL, message, 1); + #endif + } + return 0; +} + +#ifndef __PYX_HAVE_RT_ImportModule +#define __PYX_HAVE_RT_ImportModule +static PyObject *__Pyx_ImportModule(const char *name) { + PyObject *py_name = 0; + PyObject *py_module = 0; + py_name = __Pyx_PyIdentifier_FromString(name); + if (!py_name) + goto bad; + py_module = PyImport_Import(py_name); + Py_DECREF(py_name); + return py_module; +bad: + Py_XDECREF(py_name); + return 0; +} +#endif + +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, + size_t size, int strict) +{ + PyObject *py_module = 0; + PyObject *result = 0; + PyObject *py_name = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + py_module = __Pyx_ImportModule(module_name); + if (!py_module) + goto bad; + py_name = __Pyx_PyIdentifier_FromString(class_name); + if (!py_name) + goto bad; + result = PyObject_GetAttr(py_module, py_name); + Py_DECREF(py_name); + py_name = 0; + Py_DECREF(py_module); + py_module = 0; + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%s.%s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if (!strict && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility", + module_name, class_name); + #if PY_VERSION_HEX < 0x02050000 + if (PyErr_Warn(NULL, warning) < 0) goto bad; + #else + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + #endif + } + else if ((size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%s.%s has the wrong size, try recompiling", + module_name, class_name); + goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(py_module); + Py_XDECREF(result); + return NULL; +} +#endif + +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = (start + end) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, /*int argcount,*/ + 0, /*int kwonlyargcount,*/ + 0, /*int nlocals,*/ + 0, /*int stacksize,*/ + 0, /*int flags,*/ + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, /*int firstlineno,*/ + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_globals = 0; + PyFrameObject *py_frame = 0; + py_code = __pyx_find_code_object(c_line ? c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? c_line : py_line, py_code); + } + py_globals = PyModule_GetDict(__pyx_m); + if (!py_globals) goto bad; + py_frame = PyFrame_New( + PyThreadState_GET(), /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + py_globals, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + py_frame->f_lineno = py_line; + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else /* Python 3+ has unicode identifiers */ + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) { + PyNumberMethods *m; + const char *name = NULL; + PyObject *res = NULL; +#if PY_VERSION_HEX < 0x03000000 + if (PyInt_Check(x) || PyLong_Check(x)) +#else + if (PyLong_Check(x)) +#endif + return Py_INCREF(x), x; + m = Py_TYPE(x)->tp_as_number; +#if PY_VERSION_HEX < 0x03000000 + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = PyNumber_Long(x); + } +#else + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Long(x); + } +#endif + if (res) { +#if PY_VERSION_HEX < 0x03000000 + if (!PyInt_Check(res) && !PyLong_Check(res)) { +#else + if (!PyLong_Check(res)) { +#endif + PyErr_Format(PyExc_TypeError, + "__%s__ returned non-%s (type %.200s)", + name, name, Py_TYPE(res)->tp_name); + Py_DECREF(res); + return NULL; + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject* x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { +#if PY_VERSION_HEX < 0x02050000 + if (ival <= LONG_MAX) + return PyInt_FromLong((long)ival); + else { + unsigned char *bytes = (unsigned char *) &ival; + int one = 1; int little = (int)*(unsigned char*)&one; + return _PyLong_FromByteArray(bytes, sizeof(size_t), little, 0); + } +#else + return PyInt_FromSize_t(ival); +#endif +} +static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject* x) { + unsigned PY_LONG_LONG val = __Pyx_PyInt_AsUnsignedLongLong(x); + if (unlikely(val == (unsigned PY_LONG_LONG)-1 && PyErr_Occurred())) { + return (size_t)-1; + } else if (unlikely(val != (unsigned PY_LONG_LONG)(size_t)val)) { + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to size_t"); + return (size_t)-1; + } + return (size_t)val; +} + + +#endif /* Py_PYTHON_H */ diff --git a/astropy/convolution/boundary_wrap.pyx b/astropy/convolution/boundary_wrap.pyx new file mode 100644 index 0000000..6ff5793 --- /dev/null +++ b/astropy/convolution/boundary_wrap.pyx @@ -0,0 +1,261 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import division +import numpy as np +cimport numpy as np + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + +cdef extern from "numpy/npy_math.h": + bint npy_isnan(double x) + +cimport cython + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve1d_boundary_wrap(np.ndarray[DTYPE_t, ndim=1] f, + np.ndarray[DTYPE_t, ndim=1] g): + + if g.shape[0] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int nkx = g.shape[0] + cdef int wkx = nkx // 2 + cdef np.ndarray[DTYPE_t, ndim=1] fixed = np.empty([nx], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=1] conv = np.empty([nx], dtype=DTYPE) + cdef unsigned int i, iii + cdef int ii + + cdef int iimin, iimax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + if npy_isnan(f[i]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + for ii in range(iimin, iimax): + iii = ii % nx + val = f[iii] + if not npy_isnan(val): + ker = g[(wkx + ii - i)] + top += val * ker + bot += ker + + if bot != 0.: + fixed[i] = top / bot + else: + fixed[i] = f[i] + else: + fixed[i] = f[i] + + # Now run the proper convolution + for i in range(nx): + if not npy_isnan(fixed[i]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + for ii in range(iimin, iimax): + iii = ii % nx + val = fixed[iii] + ker = g[(wkx + ii - i)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i] = top / bot + else: + conv[i] = fixed[i] + else: + conv[i] = fixed[i] + + return conv + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve2d_boundary_wrap(np.ndarray[DTYPE_t, ndim=2] f, + np.ndarray[DTYPE_t, ndim=2] g): + + if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int ny = f.shape[1] + cdef int nkx = g.shape[0] + cdef int nky = g.shape[1] + cdef int wkx = nkx // 2 + cdef int wky = nky // 2 + cdef np.ndarray[DTYPE_t, ndim=2] fixed = np.empty([nx, ny], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=2] conv = np.empty([nx, ny], dtype=DTYPE) + cdef unsigned int i, j, iii, jjj + cdef int ii, jj + + cdef int iimin, iimax, jjmin, jjmax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + for j in range(ny): + if npy_isnan(f[i, j]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + iii = ii % nx + jjj = jj % ny + val = f[iii, jjj] + if not npy_isnan(val): + ker = g[(wkx + ii - i), + (wky + jj - j)] + top += val * ker + bot += ker + + if bot != 0.: + fixed[i, j] = top / bot + else: + fixed[i, j] = f[i, j] + else: + fixed[i, j] = f[i, j] + + # Now run the proper convolution + for i in range(nx): + for j in range(ny): + if not npy_isnan(fixed[i, j]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + iii = ii % nx + jjj = jj % ny + val = fixed[iii, jjj] + ker = g[(wkx + ii - i), + (wky + jj - j)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i, j] = top / bot + else: + conv[i, j] = fixed[i, j] + else: + conv[i, j] = fixed[i, j] + + return conv + + +@cython.boundscheck(False) # turn off bounds-checking for entire function +def convolve3d_boundary_wrap(np.ndarray[DTYPE_t, ndim=3] f, + np.ndarray[DTYPE_t, ndim=3] g): + + if g.shape[0] % 2 != 1 or g.shape[1] % 2 != 1 or g.shape[2] % 2 != 1: + raise ValueError("Convolution kernel must have odd dimensions") + + assert f.dtype == DTYPE and g.dtype == DTYPE + + cdef int nx = f.shape[0] + cdef int ny = f.shape[1] + cdef int nz = f.shape[2] + cdef int nkx = g.shape[0] + cdef int nky = g.shape[1] + cdef int nkz = g.shape[2] + cdef int wkx = nkx // 2 + cdef int wky = nky // 2 + cdef int wkz = nkz // 2 + cdef np.ndarray[DTYPE_t, ndim=3] fixed = np.empty([nx, ny, nz], dtype=DTYPE) + cdef np.ndarray[DTYPE_t, ndim=3] conv = np.empty([nx, ny, nz], dtype=DTYPE) + cdef unsigned int i, j, k, iii, jjj, kkk + cdef int ii, jj, kk + + cdef int iimin, iimax, jjmin, jjmax, kkmin, kkmax + + cdef DTYPE_t top, bot, ker, val + + # Need a first pass to replace NaN values with value convolved from + # neighboring values + for i in range(nx): + for j in range(ny): + for k in range(nz): + if npy_isnan(f[i, j, k]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + kkmin = k - wkz + kkmax = k + wkz + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + for kk in range(kkmin, kkmax): + iii = ii % nx + jjj = jj % ny + kkk = kk % nz + val = f[iii, jjj, kkk] + if not npy_isnan(val): + ker = g[(wkx + ii - i), + (wky + jj - j), + (wkz + kk - k)] + top += val * ker + bot += ker + + if bot != 0.: + fixed[i, j, k] = top / bot + else: + fixed[i, j, k] = f[i, j, k] + else: + fixed[i, j, k] = f[i, j, k] + + # Now run the proper convolution + for i in range(nx): + for j in range(ny): + for k in range(nz): + if not npy_isnan(fixed[i, j, k]): + top = 0. + bot = 0. + iimin = i - wkx + iimax = i + wkx + 1 + jjmin = j - wky + jjmax = j + wky + 1 + kkmin = k - wkz + kkmax = k + wkz + 1 + for ii in range(iimin, iimax): + for jj in range(jjmin, jjmax): + for kk in range(kkmin, kkmax): + iii = ii % nx + jjj = jj % ny + kkk = kk % nz + val = fixed[iii, jjj, kkk] + ker = g[(wkx + ii - i), + (wky + jj - j), + (wkz + kk - k)] + if not npy_isnan(val): + top += val * ker + bot += ker + if bot != 0: + conv[i, j, k] = top / bot + else: + conv[i, j, k] = fixed[i, j, k] + else: + conv[i, j, k] = fixed[i, j, k] + + return conv diff --git a/astropy/convolution/convolve.py b/astropy/convolution/convolve.py new file mode 100644 index 0000000..426200b --- /dev/null +++ b/astropy/convolution/convolve.py @@ -0,0 +1,565 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import warnings + +import numpy as np + +from .core import Kernel, Kernel1D, Kernel2D, MAX_NORMALIZATION +from ..utils.exceptions import AstropyUserWarning +from ..utils.console import human_file_size + + +# Disabling all doctests in this module until a better way of handling warnings +# in doctests can be determined +__doctest_skip__ = ['*'] + + +def convolve(array, kernel, boundary='fill', fill_value=0., + normalize_kernel=False): + ''' + Convolve an array with a kernel. + + This routine differs from `scipy.ndimage.filters.convolve` because + it includes a special treatment for ``NaN`` values. Rather than + including ``NaN``s in the convolution calculation, which causes large + ``NaN`` holes in the convolved image, ``NaN`` values are replaced with + interpolated values using the kernel as an interpolation function. + + Parameters + ---------- + array : `numpy.ndarray` + The array to convolve. This should be a 1, 2, or 3-dimensional array + or a list or a set of nested lists representing a 1, 2, or + 3-dimensional array. + kernel : `numpy.ndarray` or `~astropy.convolution.Kernel` + The convolution kernel. The number of dimensions should match those + for the array, and the dimensions should be odd in all directions. + boundary : str, optional + A flag indicating how to handle boundaries: + * `None` + Set the ``result`` values to zero where the kernel + extends beyond the edge of the array (default). + * 'fill' + Set values outside the array boundary to ``fill_value``. + * 'wrap' + Periodic boundary that wrap to the other side of ``array``. + * 'extend' + Set values outside the array to the nearest ``array`` + value. + fill_value : float, optional + The value to use outside the array when using boundary='fill' + normalize_kernel : bool, optional + Whether to normalize the kernel prior to convolving + + Returns + ------- + result : `numpy.ndarray` + An array with the same dimensions and as the input array, + convolved with kernel. The data type depends on the input + array type. If array is a floating point type, then the + return array keeps the same data type, otherwise the type + is ``numpy.float``. + + Notes + ----- + Masked arrays are not supported at this time. The convolution + is always done at ``numpy.float`` precision. + ''' + from .boundary_none import (convolve1d_boundary_none, + convolve2d_boundary_none, + convolve3d_boundary_none) + + from .boundary_extend import (convolve1d_boundary_extend, + convolve2d_boundary_extend, + convolve3d_boundary_extend) + + from .boundary_fill import (convolve1d_boundary_fill, + convolve2d_boundary_fill, + convolve3d_boundary_fill) + + from .boundary_wrap import (convolve1d_boundary_wrap, + convolve2d_boundary_wrap, + convolve3d_boundary_wrap) + + # The cython routines all need float type inputs (so, a particular + # bit size, endianness, etc.). So we have to convert, which also + # has the effect of making copies so we don't modify the inputs. + # After this, the variables we work with will be array_internal, and + # kernel_internal. However -- we do want to keep track of what type + # the input array was so we can cast the result to that at the end + # if it's a floating point type. Don't bother with this for lists -- + # just always push those as np.float. + # It is always necessary to make a copy of kernel (since it is modified), + # but, if we just so happen to be lucky enough to have the input array + # have exactly the desired type, we just alias to array_internal + + # Check if kernel is kernel instance + if isinstance(kernel, Kernel): + # Check if array is also kernel instance, if so convolve and + # return new kernel instance + if isinstance(array, Kernel): + if isinstance(array, Kernel1D) and isinstance(kernel, Kernel1D): + new_array = convolve1d_boundary_fill(array.array, kernel.array, 0) + new_kernel = Kernel1D(array=new_array) + elif isinstance(array, Kernel2D) and isinstance(kernel, Kernel2D): + new_array = convolve2d_boundary_fill(array.array, kernel.array, 0) + new_kernel = Kernel2D(array=new_array) + else: + raise Exception("Can't convolve 1D and 2D kernel.") + new_kernel._separable = kernel._separable and array._separable + new_kernel._is_bool = False + return new_kernel + kernel = kernel.array + + # Check that the arguments are lists or Numpy arrays + + if isinstance(array, list): + array_internal = np.array(array, dtype=np.float) + array_dtype = array_internal.dtype + elif isinstance(array, np.ndarray): + # Note this won't copy if it doesn't have to -- which is okay + # because none of what follows modifies array_internal. However, + # only numpy > 1.7 has support for no-copy astype, so we use + # a try/except because astropy supports 1.5 and 1.6 + array_dtype = array.dtype + try: + array_internal = array.astype(float, copy=False) + except TypeError: + array_internal = array.astype(float) + else: + raise TypeError("array should be a list or a Numpy array") + + if isinstance(kernel, list): + kernel_internal = np.array(kernel, dtype=float) + elif isinstance(kernel, np.ndarray): + # Note this always makes a copy, since we will be modifying it + kernel_internal = kernel.astype(float) + else: + raise TypeError("kernel should be a list or a Numpy array") + + # Check that the number of dimensions is compatible + if array_internal.ndim != kernel_internal.ndim: + raise Exception('array and kernel have differing number of ' + 'dimensions.') + + # Because the Cython routines have to normalize the kernel on the fly, we + # explicitly normalize the kernel here, and then scale the image at the + # end if normalization was not requested. + kernel_sum = kernel_internal.sum() + + if kernel_sum < 1. / MAX_NORMALIZATION and normalize_kernel: + raise Exception("The kernel can't be normalized, because its sum is " + "close to zero. The sum of the given kernel is < {0}" + .format(1. / MAX_NORMALIZATION)) + kernel_internal /= kernel_sum + + if array_internal.ndim == 0: + raise Exception("cannot convolve 0-dimensional arrays") + elif array_internal.ndim == 1: + if boundary == 'extend': + result = convolve1d_boundary_extend(array_internal, + kernel_internal) + elif boundary == 'fill': + result = convolve1d_boundary_fill(array_internal, + kernel_internal, + float(fill_value)) + elif boundary == 'wrap': + result = convolve1d_boundary_wrap(array_internal, + kernel_internal) + else: + result = convolve1d_boundary_none(array_internal, + kernel_internal) + elif array_internal.ndim == 2: + if boundary == 'extend': + result = convolve2d_boundary_extend(array_internal, + kernel_internal) + elif boundary == 'fill': + result = convolve2d_boundary_fill(array_internal, + kernel_internal, + float(fill_value)) + elif boundary == 'wrap': + result = convolve2d_boundary_wrap(array_internal, + kernel_internal) + else: + result = convolve2d_boundary_none(array_internal, + kernel_internal) + elif array_internal.ndim == 3: + if boundary == 'extend': + result = convolve3d_boundary_extend(array_internal, + kernel_internal) + elif boundary == 'fill': + result = convolve3d_boundary_fill(array_internal, + kernel_internal, + float(fill_value)) + elif boundary == 'wrap': + result = convolve3d_boundary_wrap(array_internal, + kernel_internal) + else: + result = convolve3d_boundary_none(array_internal, + kernel_internal) + else: + raise NotImplemented('convolve only supports 1, 2, and 3-dimensional ' + 'arrays at this time') + + # If normalization was not requested, we need to scale the array (since + # the kernel was normalized prior to convolution) + if not normalize_kernel: + result *= kernel_sum + + # Try to preserve the input type if it's a floating point type + if array_dtype.kind == 'f': + # Avoid making another copy if possible + try: + return result.astype(array_dtype, copy=False) + except TypeError: + return result.astype(array_dtype) + else: + return result + + +def convolve_fft(array, kernel, boundary='fill', fill_value=0, crop=True, + return_fft=False, fft_pad=True, psf_pad=False, + interpolate_nan=False, quiet=False, ignore_edge_zeros=False, + min_wt=0.0, normalize_kernel=False, allow_huge=False, + fftn=np.fft.fftn, ifftn=np.fft.ifftn, + complex_dtype=np.complex): + """ + Convolve an ndarray with an nd-kernel. Returns a convolved image with + shape = array.shape. Assumes kernel is centered. + + `convolve_fft` differs from `scipy.signal.fftconvolve` in a few ways: + + * It can treat ``NaN`` values as zeros or interpolate over them. + * ``inf`` values are treated as ``NaN`` + * (optionally) It pads to the nearest 2^n size to improve FFT speed. + * Its only valid ``mode`` is 'same' (i.e., the same shape array is returned) + * It lets you use your own fft, e.g., + `pyFFTW `_ or + `pyFFTW3 `_ , which can lead to + performance improvements, depending on your system configuration. pyFFTW3 + is threaded, and therefore may yield significant performance benefits on + multi-core machines at the cost of greater memory requirements. Specify + the ``fftn`` and ``ifftn`` keywords to override the default, which is + `numpy.fft.fft` and `numpy.fft.ifft`. + + Parameters + ---------- + array : `numpy.ndarray` + Array to be convolved with ``kernel`` + kernel : `numpy.ndarray` + Will be normalized if ``normalize_kernel`` is set. Assumed to be + centered (i.e., shifts may result if your kernel is asymmetric) + boundary : {'fill', 'wrap'}, optional + A flag indicating how to handle boundaries: + + * 'fill': set values outside the array boundary to fill_value + (default) + * 'wrap': periodic boundary + + interpolate_nan : bool, optional + The convolution will be re-weighted assuming ``NaN`` values are meant to be + ignored, not treated as zero. If this is off, all ``NaN`` values will be + treated as zero. + ignore_edge_zeros : bool, optional + Ignore the zero-pad-created zeros. This will effectively decrease + the kernel area on the edges but will not re-normalize the kernel. + This parameter may result in 'edge-brightening' effects if you're using + a normalized kernel + min_wt : float, optional + If ignoring ``NaN`` / zeros, force all grid points with a weight less than + this value to ``NaN`` (the weight of a grid point with *no* ignored + neighbors is 1.0). + If ``min_wt`` is zero, then all zero-weight points will be set to zero + instead of ``NaN`` (which they would be otherwise, because 1/0 = nan). + See the examples below + normalize_kernel : function or boolean, optional + If specified, this is the function to divide kernel by to normalize it. + e.g., ``normalize_kernel=np.sum`` means that kernel will be modified to be: + ``kernel = kernel / np.sum(kernel)``. If True, defaults to + ``normalize_kernel = np.sum``. + + Other Parameters + ---------------- + fft_pad : bool, optional + Default on. Zero-pad image to the nearest 2^n + psf_pad : bool, optional + Default off. Zero-pad image to be at least the sum of the image sizes + (in order to avoid edge-wrapping when smoothing) + crop : bool, optional + Default on. Return an image of the size of the largest input image. + If the images are asymmetric in opposite directions, will return the + largest image in both directions. + For example, if an input image has shape [100,3] but a kernel with shape + [6,6] is used, the output will be [100,6]. + return_fft : bool, optional + Return the fft(image)*fft(kernel) instead of the convolution (which is + ifft(fft(image)*fft(kernel))). Useful for making PSDs. + fftn, ifftn : functions, optional + The fft and inverse fft functions. Can be overridden to use your own + ffts, e.g. an fftw3 wrapper or scipy's fftn, e.g. + ``fftn=scipy.fftpack.fftn`` + complex_dtype : np.complex, optional + Which complex dtype to use. `numpy` has a range of options, from 64 to + 256. + quiet : bool, optional + Silence warning message about NaN interpolation + allow_huge : bool, optional + Allow huge arrays in the FFT? If False, will raise an exception if the + array or kernel size is >1 GB + + Raises + ------ + ValueError: + If the array is bigger than 1 GB after padding, will raise this exception + unless allow_huge is True + + See Also + -------- + convolve : Convolve is a non-fft version of this code. It is more + memory efficient and for small kernels can be faster. + + Returns + ------- + default : ndarray + **array** convolved with ``kernel``. + If ``return_fft`` is set, returns fft(**array**) * fft(``kernel``). + If crop is not set, returns the image, but with the fft-padded size + instead of the input size + + Examples + -------- + >>> convolve_fft([1, 0, 3], [1, 1, 1]) + array([ 1., 4., 3.]) + + >>> convolve_fft([1, np.nan, 3], [1, 1, 1]) + array([ 1., 4., 3.]) + + >>> convolve_fft([1, 0, 3], [0, 1, 0]) + array([ 1., 0., 3.]) + + >>> convolve_fft([1, 2, 3], [1]) + array([ 1., 2., 3.]) + + >>> convolve_fft([1, np.nan, 3], [0, 1, 0], interpolate_nan=True) + ... + array([ 1., 0., 3.]) + + >>> convolve_fft([1, np.nan, 3], [0, 1, 0], interpolate_nan=True, + ... min_wt=1e-8) + array([ 1., nan, 3.]) + + >>> convolve_fft([1, np.nan, 3], [1, 1, 1], interpolate_nan=True) + array([ 1., 4., 3.]) + + >>> convolve_fft([1, np.nan, 3], [1, 1, 1], interpolate_nan=True, + ... normalize_kernel=True, ignore_edge_zeros=True) + array([ 1., 2., 3.]) + + >>> import scipy.fftpack # optional - requires scipy + >>> convolve_fft([1, np.nan, 3], [1, 1, 1], interpolate_nan=True, + ... normalize_kernel=True, ignore_edge_zeros=True, + ... fftn=scipy.fftpack.fft, ifftn=scipy.fftpack.ifft) + array([ 1., 2., 3.]) + + """ + + # Checking copied from convolve.py - however, since FFTs have real & + # complex components, we change the types. Only the real part will be + # returned! Note that this always makes a copy. + # Check kernel is kernel instance + if isinstance(kernel, Kernel): + kernel = kernel.array + if isinstance(array, Kernel): + raise TypeError("Can't convolve two kernels. Use convolve() instead.") + + # Convert array dtype to complex + # and ensure that list inputs become arrays + array = np.asarray(array, dtype=np.complex) + kernel = np.asarray(kernel, dtype=np.complex) + + # Check that the number of dimensions is compatible + if array.ndim != kernel.ndim: + raise ValueError("Image and kernel must have same number of " + "dimensions") + + arrayshape = array.shape + kernshape = kernel.shape + + array_size_B = (np.product(arrayshape, dtype=np.int64) * + np.dtype(complex_dtype).itemsize) + if array_size_B > 1024**3 and not allow_huge: + raise ValueError("Size Error: Arrays will be %s. Use " + "allow_huge=True to override this exception." + % human_file_size(array_size_B)) + + # mask catching - masks must be turned into NaNs for use later + if np.ma.is_masked(array): + mask = array.mask + array = np.array(array) + array[mask] = np.nan + if np.ma.is_masked(kernel): + mask = kernel.mask + kernel = np.array(kernel) + kernel[mask] = np.nan + + # NaN and inf catching + nanmaskarray = np.isnan(array) + np.isinf(array) + array[nanmaskarray] = 0 + nanmaskkernel = np.isnan(kernel) + np.isinf(kernel) + kernel[nanmaskkernel] = 0 + if ((nanmaskarray.sum() > 0 or nanmaskkernel.sum() > 0) and + not interpolate_nan and not quiet): + warnings.warn("NOT ignoring NaN values even though they are present " + " (they are treated as 0)", AstropyUserWarning) + + if normalize_kernel is True: + if kernel.sum() < 1. / MAX_NORMALIZATION: + raise Exception("The kernel can't be normalized, because its sum is " + "close to zero. The sum of the given kernel is < {0}" + .format(1. / MAX_NORMALIZATION)) + kernel = kernel / kernel.sum() + kernel_is_normalized = True + elif normalize_kernel: + # try this. If a function is not passed, the code will just crash... I + # think type checking would be better but PEPs say otherwise... + kernel = kernel / normalize_kernel(kernel) + kernel_is_normalized = True + else: + if np.abs(kernel.sum() - 1) < 1e-8: + kernel_is_normalized = True + else: + kernel_is_normalized = False + if (interpolate_nan or ignore_edge_zeros): + warnings.warn("Kernel is not normalized, therefore " + "ignore_edge_zeros and interpolate_nan will be " + "ignored.", AstropyUserWarning) + + if boundary is None: + warnings.warn("The convolve_fft version of boundary=None is " + "equivalent to the convolve boundary='fill'. There is " + "no FFT equivalent to convolve's " + "zero-if-kernel-leaves-boundary", AstropyUserWarning) + psf_pad = True + elif boundary == 'fill': + # create a boundary region at least as large as the kernel + psf_pad = True + elif boundary == 'wrap': + psf_pad = False + fft_pad = False + fill_value = 0 # force zero; it should not be used + elif boundary == 'extend': + raise NotImplementedError("The 'extend' option is not implemented " + "for fft-based convolution") + + # find ideal size (power of 2) for fft. + # Can add shapes because they are tuples + if fft_pad: # default=True + if psf_pad: # default=False + # add the dimensions and then take the max (bigger) + fsize = 2 ** np.ceil(np.log2( + np.max(np.array(arrayshape) + np.array(kernshape)))) + else: + # add the shape lists (max of a list of length 4) (smaller) + # also makes the shapes square + fsize = 2 ** np.ceil(np.log2(np.max(arrayshape + kernshape))) + newshape = np.array([fsize for ii in range(array.ndim)], dtype=int) + else: + if psf_pad: + # just add the biggest dimensions + newshape = np.array(arrayshape) + np.array(kernshape) + else: + newshape = np.array([np.max([imsh, kernsh]) + for imsh, kernsh in zip(arrayshape, kernshape)]) + + # For future reference, this can be used to predict "almost exactly" + # how much *additional* memory will be used. + # size * (array + kernel + kernelfft + arrayfft + + # (kernel*array)fft + + # optional(weight image + weight_fft + weight_ifft) + + # optional(returned_fft)) + #total_memory_used_GB = (np.product(newshape)*np.dtype(complex_dtype).itemsize + # * (5 + 3*((interpolate_nan or ignore_edge_zeros) and kernel_is_normalized)) + # + (1 + (not return_fft)) * + # np.product(arrayshape)*np.dtype(complex_dtype).itemsize + # + np.product(arrayshape)*np.dtype(bool).itemsize + # + np.product(kernshape)*np.dtype(bool).itemsize) + # ) / 1024.**3 + + # separate each dimension by the padding size... this is to determine the + # appropriate slice size to get back to the input dimensions + arrayslices = [] + kernslices = [] + for ii, (newdimsize, arraydimsize, kerndimsize) in enumerate(zip(newshape, arrayshape, kernshape)): + center = newdimsize - (newdimsize + 1) // 2 + arrayslices += [slice(center - arraydimsize // 2, + center + (arraydimsize + 1) // 2)] + kernslices += [slice(center - kerndimsize // 2, + center + (kerndimsize + 1) // 2)] + + if not np.all(newshape == arrayshape): + bigarray = np.ones(newshape, dtype=complex_dtype) * fill_value + bigarray[arrayslices] = array + else: + bigarray = array + + if not np.all(newshape == kernshape): + bigkernel = np.zeros(newshape, dtype=complex_dtype) + bigkernel[kernslices] = kernel + else: + bigkernel = kernel + + arrayfft = fftn(bigarray) + # need to shift the kernel so that, e.g., [0,0,1,0] -> [1,0,0,0] = unity + kernfft = fftn(np.fft.ifftshift(bigkernel)) + fftmult = arrayfft * kernfft + + if (interpolate_nan or ignore_edge_zeros) and kernel_is_normalized: + if ignore_edge_zeros: + bigimwt = np.zeros(newshape, dtype=complex_dtype) + else: + bigimwt = np.ones(newshape, dtype=complex_dtype) + bigimwt[arrayslices] = 1.0 - nanmaskarray * interpolate_nan + wtfft = fftn(bigimwt) + # I think this one HAS to be normalized (i.e., the weights can't be + # computed with a non-normalized kernel) + wtfftmult = wtfft * kernfft / kernel.sum() + wtsm = ifftn(wtfftmult) + # need to re-zero weights outside of the image (if it is padded, we + # still don't weight those regions) + bigimwt[arrayslices] = wtsm.real[arrayslices] + # curiously, at the floating-point limit, can get slightly negative numbers + # they break the min_wt=0 "flag" and must therefore be removed + bigimwt[bigimwt < 0] = 0 + else: + bigimwt = 1 + + if np.isnan(fftmult).any(): + # this check should be unnecessary; call it an insanity check + raise ValueError("Encountered NaNs in convolve. This is disallowed.") + + # restore NaNs in original image (they were modified inplace earlier) + # We don't have to worry about masked arrays - if input was masked, it was + # copied + array[nanmaskarray] = np.nan + kernel[nanmaskkernel] = np.nan + + if return_fft: + return fftmult + + if interpolate_nan or ignore_edge_zeros: + rifft = (ifftn(fftmult)) / bigimwt + if not np.isscalar(bigimwt): + rifft[bigimwt < min_wt] = np.nan + if min_wt == 0.0: + rifft[bigimwt == 0.0] = 0.0 + else: + rifft = (ifftn(fftmult)) + + if crop: + result = rifft[arrayslices].real + return result + else: + return rifft.real diff --git a/astropy/convolution/core.py b/astropy/convolution/core.py new file mode 100644 index 0000000..34fe846 --- /dev/null +++ b/astropy/convolution/core.py @@ -0,0 +1,381 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" +This module contains the convolution and filter functionalities of astropy. + +A few conceptual notes: +A filter kernel is mainly characterized by its response function. In the 1D +case we speak of "impulse response function", in the 2D case we call it "point +spread function". This response function is given for every kernel by an +astropy `FittableModel`, which is evaluated on a grid to obtain a filter array, +which can then be applied to binned data. + +The model is centered on the array and should have an amplitude such that the array +integrates to one per default. + +Currently only symmetric 2D kernels are supported. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import warnings +import copy + +import numpy as np +from ..utils.exceptions import AstropyUserWarning +from .utils import (discretize_model, add_kernel_arrays_1D, + add_kernel_arrays_2D) + +MAX_NORMALIZATION = 100 + +__all__ = ['Kernel', 'Kernel1D', 'Kernel2D', 'kernel_arithmetics'] + + +class Kernel(object): + """ + Convolution kernel base class. + + Parameters + ---------- + array : ndarray + Kernel array. + """ + _separable = False + _is_bool = True + _model = None + + def __init__(self, array): + self._array = array + if self._array.sum() == 0: + self._normalization = np.inf + else: + self._normalization = 1. / self._array.sum() + + @property + def truncation(self): + """ + Deviation from the normalization to one. + """ + return self._truncation + + @property + def is_bool(self): + """ + Indicates if kernel is bool. + + If the kernel is bool the multiplication in the convolution could + be omitted, to increase the performance. + """ + return self._is_bool + + @property + def model(self): + """ + Kernel response model. + """ + return self._model + + @property + def dimension(self): + """ + Kernel dimension. + """ + return self.array.ndim + + @property + def center(self): + """ + Index of the kernel center. + """ + return [axes_size // 2 for axes_size in self._array.shape] + + @property + def normalization(self): + """ + Kernel normalization factor + """ + return self._normalization + + def normalize(self, mode='integral'): + """ + Force normalization of filter kernel. + + Parameters + ---------- + mode : {'integral', 'peak'} + One of the following modes: + * 'integral' (default) + Kernel normalized such that its integral = 1. + * 'peak' + Kernel normalized such that its peak = 1. + """ + # There are kernel that sum to zero and + # the user should be warned in this case + if np.isinf(self._normalization): + warnings.warn('Kernel cannot be normalized because the ' + 'normalization factor is infinite.', + AstropyUserWarning) + return + if np.abs(self._normalization) > MAX_NORMALIZATION: + warnings.warn("Normalization factor of kernel is " + "exceptionally large > {0}.".format(MAX_NORMALIZATION), + AstropyUserWarning) + if mode == 'integral': + self._array *= self._normalization + if mode == 'peak': + np.divide(self._array, self._array.max(), self.array) + self._normalization = 1. / self._array.sum() + + @property + def shape(self): + """ + Shape of the kernel array. + """ + return self._array.shape + + @property + def separable(self): + """ + Indicates if the filter kernel is separable. + + A 2D filter is separable, when its filter array can be written as the + outer product of two 1D arrays. + + If a filter kernel is separable, higher dimension convolutions will be + performed by applying the 1D filter array consecutively on every dimension. + This is significantly faster, than using a filter array with the same + dimension. + """ + return self._separable + + @property + def array(self): + """ + Filter kernel array. + """ + return self._array + + def __add__(self, kernel): + """ + Add two filter kernels. + """ + return kernel_arithmetics(self, kernel, 'add') + + def __sub__(self, kernel): + """ + Subtract two filter kernels. + """ + return kernel_arithmetics(self, kernel, 'sub') + + def __mul__(self, value): + """ + Multiply kernel with number or convolve two kernels. + """ + return kernel_arithmetics(self, value, "mul") + + def __rmul__(self, value): + """ + Multiply kernel with number or convolve two kernels. + """ + return kernel_arithmetics(self, value, "mul") + + def __array__(self): + """ + Array representation of the kernel. + """ + return self._array + + def __array_wrap__(self, array, context=None): + """ + Wrapper for multiplication with numpy arrays. + """ + if type(context[0]) == np.ufunc: + return NotImplemented + else: + return array + + +class Kernel1D(Kernel): + """ + Base class for 1D filter kernels. + + Parameters + ---------- + model : `~astropy.modeling.FittableModel` + Model to be evaluated. + x_size : odd int, optional + Size of the kernel array. Default = 8 * width. + array : ndarray + Kernel array. + width : number + Width of the filter kernel. + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by linearly interpolating + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + """ + def __init__(self, model=None, x_size=None, array=None, **kwargs): + # Initialize from model + if array is None: + + if x_size is None: + x_size = self._default_size + elif x_size != int(x_size): + raise TypeError("x_size should be an integer") + + # Set ranges where to evaluate the model + + if x_size % 2 == 0: # even kernel + x_range = (-(int(x_size)) // 2 + 0.5, (int(x_size)) // 2 + 0.5) + else: # odd kernel + x_range = (-(int(x_size) - 1) // 2, (int(x_size) - 1) // 2 + 1) + + array = discretize_model(self._model, x_range, **kwargs) + + # Initialize from array + elif array is not None: + self._model = None + else: + raise TypeError("Must specify either array or model.") + super(Kernel1D, self).__init__(array) + + +class Kernel2D(Kernel): + """ + Base class for 2D filter kernels. + + Parameters + ---------- + model : `~astropy.modeling.FittableModel` + Model to be evaluated. + x_size : odd int, optional + Size in x direction of the kernel array. Default = 8 * width. + y_size : odd int, optional + Size in y direction of the kernel array. Default = 8 * width. + array : ndarray + Kernel array. + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by performing a bilinear interpolation + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + width : number + Width of the filter kernel. + factor : number, optional + Factor of oversampling. Default factor = 10. + """ + def __init__(self, model=None, x_size=None, y_size=None, array=None, **kwargs): + + # Initialize from model + if array is None: + + if x_size is None: + x_size = self._default_size + elif x_size != int(x_size): + raise TypeError("x_size should be an integer") + + if y_size is None: + y_size = x_size + elif x_size != int(x_size): + raise TypeError("y_size should be an integer") + + # Set ranges where to evaluate the model + + if x_size % 2 == 0: # even kernel + x_range = (-(int(x_size)) // 2 + 0.5, (int(x_size)) // 2 + 0.5) + else: # odd kernel + x_range = (-(int(x_size) - 1) // 2, (int(x_size) - 1) // 2 + 1) + + if y_size % 2 == 0: # even kernel + y_range = (-(int(y_size)) // 2 + 0.5, (int(y_size)) // 2 + 0.5) + else: # odd kernel + y_range = (-(int(y_size) - 1) // 2, (int(y_size) - 1) // 2 + 1) + + array = discretize_model(self._model, x_range, y_range, **kwargs) + + # Initialize from array + elif array is not None: + self._model = None + else: + raise TypeError("Must specify either array or model.") + + super(Kernel2D, self).__init__(array) + + +def kernel_arithmetics(kernel, value, operation): + """ + Add, subtract or multiply two kernels. + + Parameters + ---------- + kernel : `astropy.convolution.Kernel` + Kernel instance + value : kernel, float or int + Value to operate with + operation : {'add', 'sub', 'mul'} + One of the following operations: + * 'add' + Add two kernels + * 'sub' + Subtract two kernels + * 'mul' + Multiply kernel with number or convolve two kernels. + """ + # 1D kernels + if isinstance(kernel, Kernel1D) and isinstance(value, Kernel1D): + if operation == "add": + new_array = add_kernel_arrays_1D(kernel.array, value.array) + if operation == "sub": + new_array = add_kernel_arrays_1D(kernel.array, -value.array) + if operation == "mul": + raise Exception("Kernel operation not supported. Maybe you want to" + "use convolve(kernel1, kernel2) instead.") + new_kernel = Kernel1D(array=new_array) + new_kernel._separable = kernel._separable and value._separable + new_kernel._is_bool = kernel._is_bool or value._is_bool + + # 2D kernels + elif isinstance(kernel, Kernel2D) and isinstance(value, Kernel2D): + if operation == "add": + new_array = add_kernel_arrays_2D(kernel.array, value.array) + if operation == "sub": + new_array = add_kernel_arrays_2D(kernel.array, -value.array) + if operation == "mul": + raise Exception("Kernel operation not supported. Maybe you want to" + "use convolve(kernel1, kernel2) instead.") + new_kernel = Kernel2D(array=new_array) + new_kernel._separable = kernel._separable and value._separable + new_kernel._is_bool = kernel._is_bool or value._is_bool + + # kernel and number + elif ((isinstance(kernel, Kernel1D) or isinstance(kernel, Kernel2D)) + and np.isscalar(value)): + if operation == "mul": + new_kernel = copy.copy(kernel) + new_kernel._array *= value + new_kernel._normalization /= value + else: + raise Exception("Kernel operation not supported.") + else: + raise Exception("Kernel operation not supported.") + return new_kernel diff --git a/astropy/convolution/kernels.py b/astropy/convolution/kernels.py new file mode 100644 index 0000000..1279fcb --- /dev/null +++ b/astropy/convolution/kernels.py @@ -0,0 +1,949 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import numpy as np + +from .core import Kernel1D, Kernel2D, Kernel +from .utils import KernelSizeError +from ..modeling import models +from ..modeling.core import Fittable1DModel, Fittable2DModel + +__all__ = sorted(['Gaussian1DKernel', 'Gaussian2DKernel', 'CustomKernel', + 'Box1DKernel', 'Box2DKernel', 'Tophat2DKernel', + 'Trapezoid1DKernel', 'MexicanHat1DKernel', + 'MexicanHat2DKernel', 'AiryDisk2DKernel', + 'Model1DKernel', 'Model2DKernel', + 'TrapezoidDisk2DKernel', 'Ring2DKernel']) + + +def _round_up_to_odd_integer(value): + i = int(np.ceil(value)) + if i % 2 == 0: + return i + 1 + else: + return i + + +class Gaussian1DKernel(Kernel1D): + """ + 1D Gaussian filter kernel. + + The Gaussian filter is a filter with great smoothing properties. It is + isotropic and does not produce artifacts. + + Parameters + ---------- + stddev : number + Standard deviation of the Gaussian kernel. + x_size : odd int, optional + Size of the kernel array. Default = 8 * stddev + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by linearly interpolating + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. Very slow. + factor : number, optional + Factor of oversampling. Default factor = 10. If the factor + is too large, evaluation can be very slow. + + + See Also + -------- + Box1DKernel, Trapezoid1DKernel, MexicanHat1DKernel + + + Examples + -------- + Kernel response: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from astropy.convolution import Gaussian1DKernel + gauss_1D_kernel = Gaussian1DKernel(10) + plt.plot(gauss_1D_kernel, drawstyle='steps') + plt.xlabel('x [pixels]') + plt.ylabel('value') + plt.show() + """ + _separable = True + _is_bool = False + + def __init__(self, stddev, **kwargs): + self._model = models.Gaussian1D(1. / (np.sqrt(2 * np.pi) * stddev), + 0, stddev) + self._default_size = _round_up_to_odd_integer(8 * stddev) + super(Gaussian1DKernel, self).__init__(**kwargs) + self._truncation = np.abs(1. - 1 / self._normalization) + + +class Gaussian2DKernel(Kernel2D): + """ + 2D Gaussian filter kernel. + + The Gaussian filter is a filter with great smoothing properties. It is + isotropic and does not produce artifacts. + + Parameters + ---------- + stddev : number + Standard deviation of the Gaussian kernel. + x_size : odd int, optional + Size in x direction of the kernel array. Default = 8 * stddev. + y_size : odd int, optional + Size in y direction of the kernel array. Default = 8 * stddev. + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by performing a bilinear interpolation + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + + See Also + -------- + Box2DKernel, Tophat2DKernel, MexicanHat2DKernel, Ring2DKernel, + TrapezoidDisk2DKernel, AiryDisk2DKernel + + Examples + -------- + Kernel response: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from astropy.convolution import Gaussian2DKernel + gaussian_2D_kernel = Gaussian2DKernel(10) + plt.imshow(gaussian_2D_kernel, interpolation='none', origin='lower') + plt.xlabel('x [pixels]') + plt.ylabel('y [pixels]') + plt.colorbar() + plt.show() + + """ + _separable = True + _is_bool = False + + def __init__(self, stddev, **kwargs): + self._model = models.Gaussian2D(1. / (2 * np.pi * stddev ** 2), 0, + 0, stddev, stddev) + self._default_size = _round_up_to_odd_integer(8 * stddev) + super(Gaussian2DKernel, self).__init__(**kwargs) + self._truncation = np.abs(1. - 1 / self._normalization) + + +class Box1DKernel(Kernel1D): + """ + 1D Box filter kernel. + + The Box filter or running mean is a smoothing filter. It is not isotropic + and can produce artifacts, when applied repeatedly to the same data. + + By default the Box kernel uses the ``linear_interp`` discretization mode, + which allows non-shifting, even-sized kernels. This is achieved by + weighting the edge pixels with 1/2. E.g a Box kernel with an effective + smoothing of 4 pixel would have the following array: [0.5, 1, 1, 1, 0.5]. + + + Parameters + ---------- + width : number + Width of the filter kernel. + mode : str, optional + One of the following discretization modes: + * 'center' + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' (default) + Discretize model by linearly interpolating + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + See Also + -------- + Gaussian1DKernel, Trapezoid1DKernel, MexicanHat1DKernel + + + Examples + -------- + Kernel response function: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from astropy.convolution import Box1DKernel + box_1D_kernel = Box1DKernel(9) + plt.plot(box_1D_kernel, drawstyle='steps') + plt.xlim(-1, 9) + plt.xlabel('x [pixels]') + plt.ylabel('value') + plt.show() + + """ + _separable = True + _is_bool = True + + def __init__(self, width, **kwargs): + self._model = models.Box1D(1. / width, 0, width) + self._default_size = _round_up_to_odd_integer(width) + kwargs['mode'] = 'linear_interp' + super(Box1DKernel, self).__init__(**kwargs) + self._truncation = 0 + self.normalize() + + +class Box2DKernel(Kernel2D): + """ + 2D Box filter kernel. + + The Box filter or running mean is a smoothing filter. It is not isotropic + and can produce artifact, when applied repeatedly to the same data. + + By default the Box kernel uses the ``linear_interp`` discretization mode, + which allows non-shifting, even-sized kernels. This is achieved by + weighting the edge pixels with 1/2. + + + Parameters + ---------- + width : number + Width of the filter kernel. + mode : str, optional + One of the following discretization modes: + * 'center' + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' (default) + Discretize model by performing a bilinear interpolation + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + + See Also + -------- + Box2DKernel, Tophat2DKernel, MexicanHat2DKernel, Ring2DKernel, + TrapezoidDisk2DKernel, AiryDisk2DKernel + + Examples + -------- + Kernel response: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from astropy.convolution import Box2DKernel + box_2D_kernel = Box2DKernel(9) + plt.imshow(box_2D_kernel, interpolation='none', origin='lower', + vmin=0.0, vmax=0.015) + plt.xlim(-1, 9) + plt.ylim(-1, 9) + plt.xlabel('x [pixels]') + plt.ylabel('y [pixels]') + plt.colorbar() + plt.show() + """ + _separable = True + _is_bool = True + + def __init__(self, width, **kwargs): + self._model = models.Box2D(1. / width ** 2, 0, 0, width, width) + self._default_size = _round_up_to_odd_integer(width) + kwargs['mode'] = 'linear_interp' + super(Box2DKernel, self).__init__(**kwargs) + self._truncation = 0 + self.normalize() + + +class Tophat2DKernel(Kernel2D): + """ + 2D Tophat filter kernel. + + The Tophat filter is an isotropic smoothing filter. It can produce + artifacts when applied repeatedly on the same data. + + Parameters + ---------- + radius : int + Radius of the filter kernel. + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by performing a bilinear interpolation + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + + See Also + -------- + Box2DKernel, Tophat2DKernel, MexicanHat2DKernel, Ring2DKernel, + TrapezoidDisk2DKernel, AiryDisk2DKernel + + Examples + -------- + Kernel response: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from astropy.convolution import Tophat2DKernel + tophat_2D_kernel = Tophat2DKernel(40) + plt.imshow(tophat_2D_kernel, interpolation='none', origin='lower') + plt.xlabel('x [pixels]') + plt.ylabel('y [pixels]') + plt.colorbar() + plt.show() + + """ + def __init__(self, radius, **kwargs): + self._model = models.Disk2D(1. / (np.pi * radius ** 2), 0, 0, radius) + self._default_size = _round_up_to_odd_integer(2 * radius) + super(Tophat2DKernel, self).__init__(**kwargs) + self._truncation = 0 + + +class Ring2DKernel(Kernel2D): + """ + 2D Ring filter kernel. + + The Ring filter kernel is the difference between two Tophat kernels of + different width. This kernel is useful for, e.g., background estimation. + + Parameters + ---------- + radius_in : number + Inner radius of the ring kernel. + width : number + Width of the ring kernel. + mode: str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by performing a bilinear interpolation + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + See Also + -------- + Box2DKernel, Gaussian2DKernel, MexicanHat2DKernel, Tophat2DKernel + + Examples + -------- + Kernel response: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from astropy.convolution import Ring2DKernel + ring_2D_kernel = Ring2DKernel(9, 8) + plt.imshow(ring_2D_kernel, interpolation='none', origin='lower') + plt.xlabel('x [pixels]') + plt.ylabel('y [pixels]') + plt.colorbar() + plt.show() + """ + def __init__(self, radius_in, width, **kwargs): + radius_out = radius_in + width + self._model = models.Ring2D(1. / (np.pi * (radius_out ** 2 - radius_in ** 2)), + 0, 0, radius_in, width) + self._default_size = _round_up_to_odd_integer(2 * radius_out) + super(Ring2DKernel, self).__init__(**kwargs) + self._truncation = 0 + + +class Trapezoid1DKernel(Kernel1D): + """ + 1D trapezoid kernel. + + Parameters + ---------- + width : number + Width of the filter kernel, defined as the width of the constant part, + before it begins to slope down. + slope : number + Slope of the filter kernel's tails + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by linearly interpolating + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + See Also + -------- + Box1DKernel, Gaussian1DKernel, MexicanHat1DKernel + + Examples + -------- + Kernel response: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from astropy.convolution import Trapezoid1DKernel + trapezoid_1D_kernel = Trapezoid1DKernel(17, slope=0.2) + plt.plot(trapezoid_1D_kernel, drawstyle='steps') + plt.xlabel('x [pixels]') + plt.ylabel('amplitude') + plt.xlim(-1, 28) + plt.show() + """ + _is_bool = False + + def __init__(self, width, slope=1., **kwargs): + self._model = models.Trapezoid1D(1, 0, width, slope) + self._default_size = _round_up_to_odd_integer(width + 2. / slope) + super(Trapezoid1DKernel, self).__init__(**kwargs) + self._truncation = 0 + self.normalize() + + +class TrapezoidDisk2DKernel(Kernel2D): + """ + 2D trapezoid kernel. + + Parameters + ---------- + radius : number + Width of the filter kernel, defined as the width of the constant part, + before it begins to slope down. + slope : number + Slope of the filter kernel's tails + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by performing a bilinear interpolation + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + See Also + -------- + Box2DKernel, Tophat2DKernel, MexicanHat2DKernel, Ring2DKernel, + TrapezoidDisk2DKernel, AiryDisk2DKernel + + Examples + -------- + Kernel response: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from astropy.convolution import TrapezoidDisk2DKernel + trapezoid_2D_kernel = TrapezoidDisk2DKernel(20, slope=0.2) + plt.imshow(trapezoid_2D_kernel, interpolation='none', origin='lower') + plt.xlabel('x [pixels]') + plt.ylabel('y [pixels]') + plt.colorbar() + plt.show() + + """ + _is_bool = False + + def __init__(self, radius, slope=1., **kwargs): + self._model = models.TrapezoidDisk2D(1, 0, 0, radius, slope) + self._default_size = _round_up_to_odd_integer(2 * radius + 2. / slope) + super(TrapezoidDisk2DKernel, self).__init__(**kwargs) + self._truncation = 0 + self.normalize() + + +class MexicanHat1DKernel(Kernel1D): + """ + 1D Mexican hat filter kernel. + + The Mexican Hat, or inverted Gaussian-Laplace filter, is a + bandpass filter. It smoothes the data and removes slowly varying + or constant structures (e.g. Background). It is useful for peak or + multi-scale detection. + + This kernel is derived from a normalized Gaussian function, by + computing the second derivative. This results in an amplitude + at the kernels center of 1. / (sqrt(2 * pi) * width ** 3). The + normalization is the same as for `scipy.ndimage.filters.gaussian_laplace`, + except for a minus sign. + + Parameters + ---------- + width : number + Width of the filter kernel, defined as the standard deviation + of the Gaussian function from which it is derived. + x_size : odd int, optional + Size in x direction of the kernel array. Default = 8 * width. + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by linearly interpolating + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + + See Also + -------- + Box1DKernel, Gaussian1DKernel, Trapezoid1DKernel + + Examples + -------- + Kernel response: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from astropy.convolution import MexicanHat1DKernel + mexicanhat_1D_kernel = MexicanHat1DKernel(10) + plt.plot(mexicanhat_1D_kernel, drawstyle='steps') + plt.xlabel('x [pixels]') + plt.ylabel('value') + plt.show() + + """ + _is_bool = True + + def __init__(self, width, **kwargs): + amplitude = 1.0 / (np.sqrt(2 * np.pi) * width ** 3) + self._model = models.MexicanHat1D(amplitude, 0, width) + self._default_size = _round_up_to_odd_integer(8 * width) + super(MexicanHat1DKernel, self).__init__(**kwargs) + self._truncation = np.abs(self._array.sum() / self._array.size) + + +class MexicanHat2DKernel(Kernel2D): + """ + 2D Mexican hat filter kernel. + + The Mexican Hat, or inverted Gaussian-Laplace filter, is a + bandpass filter. It smoothes the data and removes slowly varying + or constant structures (e.g. Background). It is useful for peak or + multi-scale detection. + + This kernel is derived from a normalized Gaussian function, by + computing the second derivative. This results in an amplitude + at the kernels center of 1. / (pi * width ** 4). The normalization + is the same as for `scipy.ndimage.filters.gaussian_laplace`, except + for a minus sign. + + Parameters + ---------- + width : number + Width of the filter kernel, defined as the standard deviation + of the Gaussian function from which it is derived. + x_size : odd int, optional + Size in x direction of the kernel array. Default = 8 * width. + y_size : odd int, optional + Size in y direction of the kernel array. Default = 8 * width. + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by performing a bilinear interpolation + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + + See Also + -------- + Box2DKernel, Tophat2DKernel, MexicanHat2DKernel, Ring2DKernel, + TrapezoidDisk2DKernel, AiryDisk2DKernel + + Examples + -------- + Kernel response: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from astropy.convolution import MexicanHat2DKernel + mexicanhat_2D_kernel = MexicanHat2DKernel(10) + plt.imshow(mexicanhat_2D_kernel, interpolation='none', origin='lower') + plt.xlabel('x [pixels]') + plt.ylabel('y [pixels]') + plt.colorbar() + plt.show() + """ + _is_bool = False + + def __init__(self, width, **kwargs): + amplitude = 1.0 / (np.pi * width ** 4) + self._model = models.MexicanHat2D(amplitude, 0, 0, width) + self._default_size = _round_up_to_odd_integer(8 * width) + super(MexicanHat2DKernel, self).__init__(**kwargs) + self._truncation = np.abs(self._array.sum() / self._array.size) + + +class AiryDisk2DKernel(Kernel2D): + """ + 2D Airy disk kernel. + + This kernel models the diffraction pattern of a circular aperture. This + kernel is normalized to a peak value of 1. + + Parameters + ---------- + radius : float + The radius of the Airy disk kernel (radius of the first zero). + x_size : odd int, optional + Size in x direction of the kernel array. Default = 8 * radius. + y_size : odd int, optional + Size in y direction of the kernel array. Default = 8 * radius. + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by performing a bilinear interpolation + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + See Also + -------- + Box2DKernel, Tophat2DKernel, MexicanHat2DKernel, Ring2DKernel, + TrapezoidDisk2DKernel, AiryDisk2DKernel + + Examples + -------- + Kernel response: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from astropy.convolution import AiryDisk2DKernel + airydisk_2D_kernel = AiryDisk2DKernel(10) + plt.imshow(airydisk_2D_kernel, interpolation='none', origin='lower') + plt.xlabel('x [pixels]') + plt.ylabel('y [pixels]') + plt.colorbar() + plt.show() + """ + _is_bool = False + + def __init__(self, radius, **kwargs): + self._model = models.AiryDisk2D(1, 0, 0, radius) + self._default_size = _round_up_to_odd_integer(8 * radius) + super(AiryDisk2DKernel, self).__init__(**kwargs) + self.normalize() + self._truncation = None + + +class Model1DKernel(Kernel1D): + """ + Create kernel from 1D model. + + The model has to be centered on x = 0. + + Parameters + ---------- + model : `~astropy.modeling.Fittable1DModel` + Kernel response function model + x_size : odd int, optional + Size in x direction of the kernel array. Default = 8 * width. + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by linearly interpolating + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + Raises + ------ + TypeError + If model is not an instance of `~astropy.modeling.Fittable1DModel` + + See also + -------- + Model2DKernel : Create kernel from `~astropy.modeling.Fittable2DModel` + CustomKernel : Create kernel from list or array + + Examples + -------- + Define a Gaussian1D model: + + >>> from astropy.modeling.models import Gaussian1D + >>> from astropy.convolution.kernels import Model1DKernel + >>> gauss = Gaussian1D(1, 0, 2) + + And create a custom one dimensional kernel from it: + + >>> gauss_kernel = Model1DKernel(gauss, x_size=9) + + This kernel can now be used like a usual Astropy kernel. + """ + _separable = False + _is_bool = False + + def __init__(self, model, **kwargs): + if isinstance(model, Fittable1DModel): + self._model = model + else: + raise TypeError("Must be Fittable1DModel") + super(Model1DKernel, self).__init__(**kwargs) + + +class Model2DKernel(Kernel2D): + """ + Create kernel from 2D model. + + The model has to be centered on x = 0 and y = 0. + + Parameters + ---------- + model : `~astropy.modeling.Fittable2DModel` + Kernel response function model + x_size : odd int, optional + Size in x direction of the kernel array. Default = 8 * width. + y_size : odd int, optional + Size in y direction of the kernel array. Default = 8 * width. + mode : str, optional + One of the following discretization modes: + * 'center' (default) + Discretize model by taking the value + at the center of the bin. + * 'linear_interp' + Discretize model by performing a bilinear interpolation + between the values at the corners of the bin. + * 'oversample' + Discretize model by taking the average + on an oversampled grid. + * 'integrate' + Discretize model by integrating the + model over the bin. + factor : number, optional + Factor of oversampling. Default factor = 10. + + Raises + ------ + TypeError + If model is not an instance of `~astropy.modeling.Fittable2DModel` + + See also + -------- + Model1DKernel : Create kernel from `~astropy.modeling.Fittable1DModel` + CustomKernel : Create kernel from list or array + + Examples + -------- + Define a Gaussian2D model: + + >>> from astropy.modeling.models import Gaussian2D + >>> from astropy.convolution.kernels import Model2DKernel + >>> gauss = Gaussian2D(1, 0, 0, 2, 2) + + And create a custom two dimensional kernel from it: + + >>> gauss_kernel = Model2DKernel(gauss, x_size=9) + + This kernel can now be used like a usual astropy kernel. + + """ + _is_bool = False + _separable = False + + def __init__(self, model, **kwargs): + self._separable = False + if isinstance(model, Fittable2DModel): + self._model = model + else: + raise TypeError("Must be Fittable2DModel") + super(Model2DKernel, self).__init__(**kwargs) + + +class PSFKernel(Kernel2D): + """ + Initialize filter kernel from astropy PSF instance. + """ + _separable = False + + def __init__(self): + raise NotImplementedError('Not yet implemented') + + +class CustomKernel(Kernel): + """ + Create filter kernel from list or array. + + Parameters + ---------- + array : list or array + Filter kernel array. Size must be odd. + + Raises + ------ + TypeError + If array is not a list or array. + KernelSizeError + If array size is even. + + See also + -------- + Model2DKernel, Model1DKernel + + Examples + -------- + Define one dimensional array: + + >>> from astropy.convolution.kernels import CustomKernel + >>> import numpy as np + >>> array = np.array([1, 2, 3, 2, 1]) + >>> kernel = CustomKernel(array) + >>> kernel.dimension + 1 + + Define two dimensional array: + + >>> array = np.array([[1, 1, 1], [1, 2, 1], [1, 1, 1]]) + >>> kernel = CustomKernel(array) + >>> kernel.dimension + 2 + """ + def __init__(self, array): + self.array = array + super(CustomKernel, self).__init__(self._array) + + @property + def array(self): + """ + Filter kernel array. + """ + return self._array + + @array.setter + def array(self, array): + """ + Filter kernel array setter + """ + if isinstance(array, np.ndarray): + self._array = array.astype(np.float64) + elif isinstance(array, list): + self._array = np.array(array, dtype=np.float64) + else: + raise TypeError("Must be list or array.") + + # Check if array is odd in all axes + odd = np.all([axes_size % 2 != 0 for axes_size in self.shape]) + if not odd: + raise KernelSizeError("Kernel size must be odd in all axes.") + + # Check if array is bool + ones = self._array == 1. + zeros = self._array == 0 + self._is_bool = np.all(np.logical_or(ones, zeros)) + + self._truncation = 0.0 diff --git a/astropy/convolution/setup_package.py b/astropy/convolution/setup_package.py new file mode 100644 index 0000000..3cd9f7c --- /dev/null +++ b/astropy/convolution/setup_package.py @@ -0,0 +1,5 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + + +def requires_2to3(): + return False diff --git a/astropy/convolution/tests/__init__.py b/astropy/convolution/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/astropy/convolution/tests/test_convolve.py b/astropy/convolution/tests/test_convolve.py new file mode 100644 index 0000000..36af9c2 --- /dev/null +++ b/astropy/convolution/tests/test_convolve.py @@ -0,0 +1,487 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import numpy as np + +from ...tests.helper import pytest + +from ..convolve import convolve + +from numpy.testing import assert_array_almost_equal_nulp + +VALID_DTYPES = [] +for dtype_array in ['>f4', 'f8', 'f4', 'f8', 'f4', 'f8', 'f4', 'f8', 'x, z, x))*2 ) + + @pytest.mark.parametrize(option_names, options) + def test_uniform_3x3(self, boundary, interpolate_nan, normalize_kernel, + ignore_edge_zeros): + ''' + Test that the different modes are producing the correct results using + a 3x3 uniform kernel. + ''' + + x = np.array([[0., 0., 3.], + [1., 0., 0.], + [0., 2., 0.]], dtype='float64') + + y = np.array([[1., 1., 1.], + [1., 1., 1.], + [1., 1., 1.]], dtype='float64') + + z = convolve_fft(x, y, boundary=boundary, + interpolate_nan=interpolate_nan, + normalize_kernel=normalize_kernel, + ignore_edge_zeros=ignore_edge_zeros) + + w = np.array([[4., 6., 4.], + [6., 9., 6.], + [4., 6., 4.]], dtype='float64') + answer_dict = { + 'sum': np.array([[1., 4., 3.], + [3., 6., 5.], + [3., 3., 2.]], dtype='float64'), + 'sum_wrap': np.array([[6., 6., 6.], + [6., 6., 6.], + [6., 6., 6.]], dtype='float64'), + } + answer_dict['average'] = answer_dict['sum'] / w + answer_dict['average_wrap'] = answer_dict['sum_wrap'] / 9. + answer_dict['average_withzeros'] = answer_dict['sum'] / 9. + answer_dict['sum_withzeros'] = answer_dict['sum'] + + if normalize_kernel: + answer_key = 'average' + else: + answer_key = 'sum' + + if boundary == 'wrap': + answer_key += '_wrap' + elif not ignore_edge_zeros: + answer_key += '_withzeros' + + a = answer_dict[answer_key] + # for reasons unknown, the Windows FFT returns an answer for the [0, 0] + # component that is EXACTLY 10*np.spacing + assert np.all(np.abs(z - a) <= np.spacing(np.where(z > a, z, a)) * 10) + + @pytest.mark.parametrize(option_names, options) + def test_unity_3x3_withnan(self, boundary, interpolate_nan, + normalize_kernel, ignore_edge_zeros): + ''' + Test that a 3x3 unit kernel returns the same array (except when + boundary is None). This version includes a NaN value in the original + array. + ''' + + x = np.array([[1., 2., 3.], + [4., np.nan, 6.], + [7., 8., 9.]], dtype='float64') + + y = np.array([[0., 0., 0.], + [0., 1., 0.], + [0., 0., 0.]], dtype='float64') + + z = convolve_fft(x, y, boundary=boundary, + interpolate_nan=interpolate_nan, + normalize_kernel=normalize_kernel, + ignore_edge_zeros=ignore_edge_zeros, + ) + + a = x + a[1, 1] = 0 + + # for whatever reason, numpy's fft has very limited precision, and + # the comparison fails unless you cast the float64 to a float16 + if hasattr(np, 'float16'): + assert_array_almost_equal_nulp(np.asarray(z, dtype=np.float16), + np.asarray(a, dtype=np.float16), 10) + assert np.all(np.abs(z - a) < 1e-14) + + @pytest.mark.parametrize(option_names, options) + def test_uniform_3x3_withnan(self, boundary, interpolate_nan, + normalize_kernel, ignore_edge_zeros): + ''' + Test that the different modes are producing the correct results using + a 3x3 uniform kernel. This version includes a NaN value in the + original array. + ''' + + x = np.array([[0., 0., 3.], + [1., np.nan, 0.], + [0., 2., 0.]], dtype='float64') + + y = np.array([[1., 1., 1.], + [1., 1., 1.], + [1., 1., 1.]], dtype='float64') + + z = convolve_fft(x, y, boundary=boundary, + interpolate_nan=interpolate_nan, + normalize_kernel=normalize_kernel, + ignore_edge_zeros=ignore_edge_zeros, + ) + + w_n = np.array([[3., 5., 3.], + [5., 8., 5.], + [3., 5., 3.]], dtype='float64') + w_z = np.array([[4., 6., 4.], + [6., 9., 6.], + [4., 6., 4.]], dtype='float64') + answer_dict = { + 'sum': np.array([[1., 4., 3.], + [3., 6., 5.], + [3., 3., 2.]], dtype='float64'), + 'sum_wrap': np.array([[6., 6., 6.], + [6., 6., 6.], + [6., 6., 6.]], dtype='float64'), + } + answer_dict['average'] = answer_dict['sum'] / w_z + answer_dict['average_ignan'] = answer_dict['sum'] / w_n + answer_dict['average_wrap_ignan'] = answer_dict['sum_wrap'] / 8. + answer_dict['average_wrap'] = answer_dict['sum_wrap'] / 9. + answer_dict['average_withzeros'] = answer_dict['sum'] / 9. + answer_dict['average_withzeros_ignan'] = answer_dict['sum'] / 8. + answer_dict['sum_withzeros'] = answer_dict['sum'] + answer_dict['sum_ignan'] = answer_dict['sum'] + answer_dict['sum_withzeros_ignan'] = answer_dict['sum'] + answer_dict['sum_wrap_ignan'] = answer_dict['sum_wrap'] + + if normalize_kernel: + answer_key = 'average' + else: + answer_key = 'sum' + + if boundary == 'wrap': + answer_key += '_wrap' + elif not ignore_edge_zeros: + answer_key += '_withzeros' + + if interpolate_nan: + answer_key += '_ignan' + + a = answer_dict[answer_key] + # for reasons unknown, the Windows FFT returns an answer for the [0, 0] + # component that is EXACTLY 10*np.spacing + assert np.all(np.abs(z - a) <= np.spacing(np.where(z > a, z, a)) * 10) + + def test_big_fail(self): + """ Test that convolve_fft raises an exception if a too-large array is passed in """ + + with pytest.raises(ValueError) as ex: + # while a good idea, this approach did not work; it actually writes to disk + #arr = np.memmap('file.np', mode='w+', shape=(512, 512, 512), dtype=np.complex) + # this just allocates the memory but never touches it; it's better: + arr = np.empty([512, 512, 512], dtype=np.complex) + # note 512**3 * 16 bytes = 2.0 GB + convolve_fft(arr, arr) diff --git a/astropy/convolution/tests/test_convolve_kernels.py b/astropy/convolution/tests/test_convolve_kernels.py new file mode 100644 index 0000000..2d6ea35 --- /dev/null +++ b/astropy/convolution/tests/test_convolve_kernels.py @@ -0,0 +1,124 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import itertools + +import numpy as np +from numpy.testing import assert_almost_equal + +from ..convolve import convolve, convolve_fft +from ..kernels import Gaussian2DKernel, Box2DKernel, Tophat2DKernel +from ...tests.helper import pytest + + +SHAPES_ODD = [[15, 15], [31, 31]] +SHAPES_EVEN = [[8, 8], [16, 16], [32, 32]] +WIDTHS = [2, 3, 4, 5] + +KERNELS = [] + +for shape in SHAPES_ODD: + for width in WIDTHS: + + KERNELS.append(Gaussian2DKernel(width, + x_size=shape[0], + y_size=shape[1], + mode='oversample', + factor=10)) + + KERNELS.append(Box2DKernel(width, + x_size=shape[0], + y_size=shape[1], + mode='oversample', + factor=10)) + + KERNELS.append(Tophat2DKernel(width, + x_size=shape[0], + y_size=shape[1], + mode='oversample', + factor=10)) + + +class Test2DConvolutions(object): + + @pytest.mark.parametrize('kernel', KERNELS) + def test_centered_makekernel(self, kernel): + """ + Test smoothing of an image with a single positive pixel + """ + + shape = kernel.array.shape + + x = np.zeros(shape) + xslice = [slice(sh // 2, sh // 2 + 1) for sh in shape] + x[xslice] = 1.0 + + c2 = convolve_fft(x, kernel, boundary='fill') + c1 = convolve(x, kernel, boundary='fill') + + assert_almost_equal(c1, c2, decimal=12) + + @pytest.mark.parametrize('kernel', KERNELS) + def test_random_makekernel(self, kernel): + """ + Test smoothing of an image made of random noise + """ + + shape = kernel.array.shape + + x = np.random.randn(*shape) + + c2 = convolve_fft(x, kernel, boundary='fill') + c1 = convolve(x, kernel, boundary='fill') + + # not clear why, but these differ by a couple ulps... + assert_almost_equal(c1, c2, decimal=12) + + @pytest.mark.parametrize(('shape', 'width'), list(itertools.product(SHAPES_ODD, WIDTHS))) + def test_uniform_smallkernel(self, shape, width): + """ + Test smoothing of an image with a single positive pixel + + Uses a simple, small kernel + """ + + if width % 2 == 0: + # convolve does not accept odd-shape kernels + return + + kernel = np.ones([width, width]) + + x = np.zeros(shape) + xslice = [slice(sh // 2, sh // 2 + 1) for sh in shape] + x[xslice] = 1.0 + + c2 = convolve_fft(x, kernel, boundary='fill') + c1 = convolve(x, kernel, boundary='fill') + + assert_almost_equal(c1, c2, decimal=12) + + @pytest.mark.parametrize(('shape', 'width'), list(itertools.product(SHAPES_ODD, [1, 3, 5]))) + def test_smallkernel_Box2DKernel(self, shape, width): + """ + Test smoothing of an image with a single positive pixel + + Compares a small uniform kernel to the Box2DKernel + """ + + kernel1 = np.ones([width, width]) / np.float(width) ** 2 + kernel2 = Box2DKernel(width, mode='oversample', factor=10) + + x = np.zeros(shape) + xslice = [slice(sh // 2, sh // 2 + 1) for sh in shape] + x[xslice] = 1.0 + + c2 = convolve_fft(x, kernel2, boundary='fill') + c1 = convolve_fft(x, kernel1, boundary='fill') + + assert_almost_equal(c1, c2, decimal=12) + + c2 = convolve(x, kernel2, boundary='fill') + c1 = convolve(x, kernel1, boundary='fill') + + assert_almost_equal(c1, c2, decimal=12) diff --git a/astropy/convolution/tests/test_convolve_speeds.py b/astropy/convolution/tests/test_convolve_speeds.py new file mode 100644 index 0000000..7e8fb79 --- /dev/null +++ b/astropy/convolution/tests/test_convolve_speeds.py @@ -0,0 +1,184 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import timeit + +import numpy as np # pylint: disable=W0611 + +# largest image size to use for "linear" and fft convolutions +max_exponents_linear = {1: 15, 2: 7, 3: 5} +max_exponents_fft = {1: 15, 2: 10, 3: 7} + +if __name__ == "__main__": + for ndims in [1, 2, 3]: + print("\n%i-dimensional arrays ('n' is the size of the image AND the kernel)" % ndims) + print(" ".join(["%17s" % n for n in ("n", "convolve", "convolve_fft")])) + + for ii in range(3, max_exponents_fft[ndims]): + # array = np.random.random([2**ii]*ndims) + # test ODD sizes too + if ii < max_exponents_fft[ndims]: + setup = (""" +import numpy as np +from astropy.convolution.convolve import convolve +from astropy.convolution.convolve import convolve_fft +array = np.random.random([%i]*%i) +kernel = np.random.random([%i]*%i)""") % (2 ** ii - 1, ndims, 2 ** ii - 1, ndims) + + print("%16i:" % (int(2 ** ii - 1)), end=' ') + + if ii <= max_exponents_linear[ndims]: + for ffttype, extra in zip(("", "_fft"), + ("", "fft_pad=False")): + statement = "convolve%s(array, kernel, boundary='fill', %s)" % (ffttype, extra) + besttime = min(timeit.Timer(stmt=statement, setup=setup).repeat(3, 10)) + print("%17f" % (besttime), end=' ') + else: + print("%17s" % "skipped", end=' ') + statement = "convolve_fft(array, kernel, boundary='fill')" + besttime = min(timeit.Timer(stmt=statement, setup=setup).repeat(3, 10)) + print("%17f" % (besttime), end=' ') + + print() + + setup = (""" +import numpy as np +from astropy.convolution.convolve import convolve +from astropy.convolution.convolve import convolve_fft +array = np.random.random([%i]*%i) +kernel = np.random.random([%i]*%i)""") % (2 ** ii - 1, ndims, 2 ** ii - 1, ndims) + + print("%16i:" % (int(2 ** ii)), end=' ') + + if ii <= max_exponents_linear[ndims]: + for ffttype in ("", "_fft"): + statement = "convolve%s(array, kernel, boundary='fill')" % ffttype + besttime = min(timeit.Timer(stmt=statement, setup=setup).repeat(3, 10)) + print("%17f" % (besttime), end=' ') + else: + print("%17s" % "skipped", end=' ') + statement = "convolve_fft(array, kernel, boundary='fill')" + besttime = min(timeit.Timer(stmt=statement, setup=setup).repeat(3, 10)) + print("%17f" % (besttime), end=' ') + + print() + +""" +Unfortunately, these tests are pretty strongly inconclusive + +RESULTS on a 2011 Mac Air: +1-dimensional arrays ('n' is the size of the image AND the kernel) + n convolve convolve_fftnp convolve_fftw convolve_fftsp + 7: 0.000408 0.002334 0.005571 0.002677 + 8: 0.000399 0.002818 0.006505 0.003094 + 15: 0.000361 0.002491 0.005648 0.002678 + 16: 0.000371 0.002997 0.005983 0.003036 + 31: 0.000535 0.002450 0.005988 0.002880 + 32: 0.000452 0.002618 0.007102 0.004366 + 63: 0.000509 0.002876 0.008003 0.002981 + 64: 0.000453 0.002706 0.005520 0.003049 + 127: 0.000801 0.004080 0.008513 0.003932 + 128: 0.000749 0.003332 0.006236 0.003159 + 255: 0.002453 0.003111 0.007518 0.003564 + 256: 0.002478 0.003341 0.006325 0.004290 + 511: 0.008394 0.006224 0.010247 0.005991 + 512: 0.007934 0.003764 0.006840 0.004106 + 1023: 0.028741 0.007538 0.009591 0.007696 + 1024: 0.027900 0.004871 0.009628 0.005118 + 2047: 0.106323 0.021575 0.022041 0.020682 + 2048: 0.108916 0.008107 0.011049 0.007596 + 4095: 0.411936 0.021675 0.019761 0.020939 + 4096: 0.408992 0.018870 0.016663 0.012890 + 8191: 1.664517 8.278320 0.073001 7.803563 + 8192: 1.657573 0.037967 0.034227 0.028390 + 16383: 6.654678 0.251661 0.202271 0.222171 + 16384: 6.611977 0.073630 0.067616 0.055591 + +2-dimensional arrays ('n' is the size of the image AND the kernel) + n convolve convolve_fftnp convolve_fftw convolve_fftsp + 7: 0.000552 0.003524 0.006667 0.004318 + 8: 0.000646 0.004443 0.007354 0.003958 + 15: 0.002986 0.005093 0.012941 0.005951 + 16: 0.003549 0.005688 0.008818 0.006300 + 31: 0.074360 0.033973 0.031800 0.036937 + 32: 0.077338 0.017708 0.025637 0.011883 + 63: 0.848471 0.057407 0.052192 0.053213 + 64: 0.773061 0.029657 0.033409 0.028230 + 127: 14.656414 1.005329 0.402113 0.955279 + 128: 15.867796 0.266233 0.268551 0.237930 + 255: skipped 1.715546 1.566876 1.745338 + 256: skipped 1.515616 1.268220 1.036881 + 511: skipped 4.066155 4.303350 3.930661 + 512: skipped 3.976139 4.337525 3.968935 + +3-dimensional arrays ('n' is the size of the image AND the kernel) + n convolve convolve_fftnp convolve_fftw convolve_fftsp + 7: 0.009239 0.012957 0.011957 0.015997 + 8: 0.012405 0.011328 0.011677 0.012283 + 15: 0.772434 0.075621 0.056711 0.079508 + 16: 0.964635 0.105846 0.072811 0.104611 + 31: 62.824051 2.295193 1.189505 2.351136 + 32: 79.507060 1.169182 0.821779 1.275770 + 63: skipped 11.250225 10.982726 10.585744 + 64: skipped 10.013558 11.507645 12.665557 + + + +On a 2009 Mac Pro: +1-dimensional arrays ('n' is the size of the image AND the kernel) + n convolve convolve_fftnp convolve_fftw convolve_fftsp + 7: 0.000360 0.002269 0.004986 0.002476 + 8: 0.000361 0.002468 0.005242 0.002696 + 15: 0.000364 0.002255 0.005244 0.002471 + 16: 0.000365 0.002506 0.005286 0.002727 + 31: 0.000385 0.002380 0.005422 0.002588 + 32: 0.000385 0.002531 0.005543 0.002737 + 63: 0.000474 0.002407 0.005392 0.002637 + 64: 0.000484 0.002602 0.005631 0.002823 + 127: 0.000752 0.004122 0.007827 0.003966 + 128: 0.000757 0.002763 0.005844 0.002958 + 255: 0.004316 0.003258 0.006566 0.003324 + 256: 0.004354 0.003180 0.006120 0.003245 + 511: 0.011517 0.007158 0.009898 0.006238 + 512: 0.011482 0.003873 0.006777 0.003820 + 1023: 0.034105 0.009211 0.009468 0.008260 + 1024: 0.034609 0.005504 0.008399 0.005080 + 2047: 0.113620 0.028097 0.020662 0.021603 + 2048: 0.112828 0.008403 0.010939 0.007331 + 4095: 0.403373 0.023211 0.018767 0.020065 + 4096: 0.403316 0.017550 0.017853 0.013651 + 8191: 1.519329 8.454573 0.211436 7.212381 + 8192: 1.519082 0.033148 0.030370 0.025905 + 16383: 5.887481 0.317428 0.153344 0.237119 + 16384: 5.888222 0.069379 0.065264 0.052847 + +2-dimensional arrays ('n' is the size of the image AND the kernel) + n convolve convolve_fftnp convolve_fftw convolve_fftsp + 7: 0.000474 0.003470 0.006131 0.003503 + 8: 0.000503 0.003565 0.006400 0.003586 + 15: 0.002011 0.004481 0.007825 0.004496 + 16: 0.002236 0.004744 0.007078 0.004680 + 31: 0.027291 0.019433 0.014841 0.018034 + 32: 0.029283 0.009244 0.010161 0.008964 + 63: 0.445680 0.038171 0.026753 0.037404 + 64: 0.460616 0.028128 0.029487 0.029149 + 127: 7.003774 0.925921 0.282591 0.762671 + 128: 7.063657 0.110838 0.104402 0.133523 + 255: skipped 0.804682 0.708849 0.869368 + 256: skipped 0.797800 0.721042 0.880848 + 511: skipped 3.643626 3.687562 4.584770 + 512: skipped 3.715215 4.893539 5.538462 + +3-dimensional arrays ('n' is the size of the image AND the kernel) + n convolve convolve_fftnp convolve_fftw convolve_fftsp + 7: 0.004520 0.011519 0.009464 0.012335 + 8: 0.006422 0.010294 0.010220 0.011711 + 15: 0.329566 0.060978 0.045495 0.073692 + 16: 0.405275 0.069999 0.040659 0.086114 + 31: 24.935228 1.654920 0.710509 1.773879 + 32: 27.524226 0.724053 0.543507 1.027568 + 63: skipped 8.982771 12.407683 16.900078 + 64: skipped 8.956070 11.934627 17.296447 + +""" diff --git a/astropy/convolution/tests/test_discretize.py b/astropy/convolution/tests/test_discretize.py new file mode 100644 index 0000000..c798a28 --- /dev/null +++ b/astropy/convolution/tests/test_discretize.py @@ -0,0 +1,106 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import itertools + +import numpy as np +from numpy.testing import assert_allclose + +from ...tests.helper import pytest + +from ..utils import discretize_model +from ...modeling.functional_models import ( + Gaussian1D, Box1D, MexicanHat1D, Trapezoid1D, + Gaussian2D, Box2D, MexicanHat2D) +from ...modeling.tests.example_models import models_1D, models_2D +from ...modeling.tests.test_models import create_model + +try: + import scipy + HAS_SCIPY = True +except ImportError: + HAS_SCIPY = False + + +modes = ['center', 'linear_interp', 'oversample'] +test_models_1D = [Gaussian1D, Box1D, MexicanHat1D] +test_models_2D = [Gaussian2D, Box2D, MexicanHat2D] + + +@pytest.mark.parametrize(('model_class', 'mode'), list(itertools.product(test_models_1D, modes))) +def test_pixel_sum_1D(model_class, mode): + """ + Test if the sum of all pixels corresponds nearly to the integral. + """ + if model_class == Box1D and mode == "center": + pytest.skip("Non integrating mode. Skip integral test.") + parameters = models_1D[model_class] + model = create_model(model_class, parameters) + + values = discretize_model(model, models_1D[model_class]['x_lim'], mode=mode) + assert_allclose(values.sum(), models_1D[model_class]['integral'], atol=0.0001) + + +@pytest.mark.parametrize('mode', modes) +def test_gaussian_eval_1D(mode): + """ + Discretize Gaussian with different modes and check + if result is at least similar to Gaussian1D.eval(). + """ + model = Gaussian1D(1, 0, 20) + x = np.arange(-100, 101) + values = model(x) + disc_values = discretize_model(model, (-100, 101), mode=mode) + assert_allclose(values, disc_values, atol=0.001) + + +@pytest.mark.parametrize(('model_class', 'mode'), list(itertools.product(test_models_2D, modes))) +def test_pixel_sum_2D(model_class, mode): + """ + Test if the sum of all pixels corresponds nearly to the integral. + """ + if model_class == Box2D and mode == "center": + pytest.skip("Non integrating mode. Skip integral test.") + + parameters = models_2D[model_class] + model = create_model(model_class, parameters) + + values = discretize_model(model, models_2D[model_class]['x_lim'], + models_2D[model_class]['y_lim'], mode=mode) + assert_allclose(values.sum(), models_2D[model_class]['integral'], atol=0.0001) + + +@pytest.mark.parametrize('mode', modes) +def test_gaussian_eval_2D(mode): + """ + Discretize Gaussian with different modes and check + if result is at least similar to Gaussian1D.eval() + """ + model = Gaussian2D(1, 0, 0, 20, 20) + x = np.arange(-100, 101) + y = np.arange(-100, 101) + x, y = np.meshgrid(x, y) + values = model(x, y) + disc_values = discretize_model(model, (-100, 101), (-100, 101), mode=mode) + assert_allclose(values, disc_values, atol=0.001) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_subpixel_gauss_1D(): + """ + Test subpixel accuracy of the oversample mode with gaussian 1D model. + """ + gauss_1D = Gaussian1D(1, 0, 0.1) + values = discretize_model(gauss_1D, (-1, 2), mode='integrate', factor=100) + assert_allclose(values.sum(), np.sqrt(2 * np.pi) * 0.1, atol=0.00001) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_subpixel_gauss_2D(): + """ + Test subpixel accuracy of the oversample mode with gaussian 2D model. + """ + gauss_2D = Gaussian2D(1, 0, 0, 0.1, 0.1) + values = discretize_model(gauss_2D, (-1, 2), (-1, 2), mode='integrate', factor=100) + assert_allclose(values.sum(), 2 * np.pi * 0.01, atol=0.00001) diff --git a/astropy/convolution/tests/test_kernel_class.py b/astropy/convolution/tests/test_kernel_class.py new file mode 100644 index 0000000..b33159b --- /dev/null +++ b/astropy/convolution/tests/test_kernel_class.py @@ -0,0 +1,424 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import itertools + +import numpy as np +from numpy.testing import assert_almost_equal + +from ...tests.helper import pytest +from ..convolve import convolve, convolve_fft +from ..kernels import ( + Gaussian1DKernel, Gaussian2DKernel, Box1DKernel, Box2DKernel, + Trapezoid1DKernel, TrapezoidDisk2DKernel, MexicanHat1DKernel, + Tophat2DKernel, MexicanHat2DKernel, AiryDisk2DKernel, Ring2DKernel, + CustomKernel, Model1DKernel, Model2DKernel) + +from ..utils import KernelSizeError +from ...modeling.models import Box2D, Gaussian1D, Gaussian2D + +try: + from scipy.ndimage import filters + HAS_SCIPY = True +except ImportError: + HAS_SCIPY = False + +WIDTHS_ODD = [3, 5, 7, 9] +WIDTHS_EVEN = [2, 4, 8, 16] +MODES = ['center', 'linear_interp', 'oversample', 'integrate'] +KERNEL_TYPES = [Gaussian1DKernel, Gaussian2DKernel, + Box1DKernel, Box2DKernel, + Trapezoid1DKernel, TrapezoidDisk2DKernel, + MexicanHat1DKernel, Tophat2DKernel, AiryDisk2DKernel, Ring2DKernel] + + +NUMS = [1, 1., np.float(1.), np.float32(1.), np.float64(1.)] + + +# Test data +delta_pulse_1D = np.zeros(81) +delta_pulse_1D[40] = 1 + +delta_pulse_2D = np.zeros((81, 81)) +delta_pulse_2D[40, 40] = 1 + +random_data_1D = np.random.rand(61) +random_data_2D = np.random.rand(61, 61) + + +class TestKernels(object): + """ + Test class for the built-in convolution kernels. + """ + + @pytest.mark.skipif('not HAS_SCIPY') + @pytest.mark.parametrize(('width'), WIDTHS_ODD) + def test_scipy_filter_gaussian(self, width): + """ + Test GaussianKernel against SciPy ndimage gaussian filter. + """ + gauss_kernel_1D = Gaussian1DKernel(width) + gauss_kernel_1D.normalize() + gauss_kernel_2D = Gaussian2DKernel(width) + gauss_kernel_2D.normalize() + + astropy_1D = convolve(delta_pulse_1D, gauss_kernel_1D, boundary='fill') + astropy_2D = convolve(delta_pulse_2D, gauss_kernel_2D, boundary='fill') + + scipy_1D = filters.gaussian_filter(delta_pulse_1D, width) + scipy_2D = filters.gaussian_filter(delta_pulse_2D, width) + + assert_almost_equal(astropy_1D, scipy_1D, decimal=12) + assert_almost_equal(astropy_2D, scipy_2D, decimal=12) + + @pytest.mark.skipif('not HAS_SCIPY') + @pytest.mark.parametrize(('width'), WIDTHS_ODD) + def test_scipy_filter_gaussian_laplace(self, width): + """ + Test MexicanHat kernels against SciPy ndimage gaussian laplace filters. + """ + mexican_kernel_1D = MexicanHat1DKernel(width) + mexican_kernel_2D = MexicanHat2DKernel(width) + + astropy_1D = convolve(delta_pulse_1D, mexican_kernel_1D, boundary='fill') + astropy_2D = convolve(delta_pulse_2D, mexican_kernel_2D, boundary='fill') + + # The Laplace of Gaussian filter is an inverted Mexican Hat + # filter. + scipy_1D = -filters.gaussian_laplace(delta_pulse_1D, width) + scipy_2D = -filters.gaussian_laplace(delta_pulse_2D, width) + + # There is a slight deviation in the normalization. They differ by a + # factor of ~1.0000284132604045. The reason is not known. + assert_almost_equal(astropy_1D, scipy_1D, decimal=5) + assert_almost_equal(astropy_2D, scipy_2D, decimal=5) + + @pytest.mark.parametrize(('kernel_type', 'width'), list(itertools.product(KERNEL_TYPES, WIDTHS_ODD))) + def test_delta_data(self, kernel_type, width): + """ + Test smoothing of an image with a single positive pixel + """ + if kernel_type == AiryDisk2DKernel and not HAS_SCIPY: + pytest.skip("Omitting AiryDisk2DKernel, which requires SciPy") + if not kernel_type == Ring2DKernel: + kernel = kernel_type(width) + else: + kernel = kernel_type(width, width * 0.2) + + if kernel.dimension == 1: + c1 = convolve_fft(delta_pulse_1D, kernel, boundary='fill') + c2 = convolve(delta_pulse_1D, kernel, boundary='fill') + assert_almost_equal(c1, c2, decimal=12) + else: + c1 = convolve_fft(delta_pulse_2D, kernel, boundary='fill') + c2 = convolve(delta_pulse_2D, kernel, boundary='fill') + assert_almost_equal(c1, c2, decimal=12) + + @pytest.mark.parametrize(('kernel_type', 'width'), list(itertools.product(KERNEL_TYPES, WIDTHS_ODD))) + def test_random_data(self, kernel_type, width): + """ + Test smoothing of an image made of random noise + """ + if kernel_type == AiryDisk2DKernel and not HAS_SCIPY: + pytest.skip("Omitting AiryDisk2DKernel, which requires SciPy") + if not kernel_type == Ring2DKernel: + kernel = kernel_type(width) + else: + kernel = kernel_type(width, width * 0.2) + + if kernel.dimension == 1: + c1 = convolve_fft(random_data_1D, kernel, boundary='fill') + c2 = convolve(random_data_1D, kernel, boundary='fill') + assert_almost_equal(c1, c2, decimal=12) + else: + c1 = convolve_fft(random_data_2D, kernel, boundary='fill') + c2 = convolve(random_data_2D, kernel, boundary='fill') + assert_almost_equal(c1, c2, decimal=12) + + @pytest.mark.parametrize(('width'), WIDTHS_ODD) + def test_uniform_smallkernel(self, width): + """ + Test smoothing of an image with a single positive pixel + + Instead of using kernel class, uses a simple, small kernel + """ + kernel = np.ones([width, width]) + + c2 = convolve_fft(delta_pulse_2D, kernel, boundary='fill') + c1 = convolve(delta_pulse_2D, kernel, boundary='fill') + assert_almost_equal(c1, c2, decimal=12) + + @pytest.mark.parametrize(('width'), WIDTHS_ODD) + def test_smallkernel_vs_Box2DKernel(self, width): + """ + Test smoothing of an image with a single positive pixel + """ + kernel1 = np.ones([width, width]) / width ** 2 + kernel2 = Box2DKernel(width) + + c2 = convolve_fft(delta_pulse_2D, kernel2, boundary='fill') + c1 = convolve_fft(delta_pulse_2D, kernel1, boundary='fill') + + assert_almost_equal(c1, c2, decimal=12) + + def test_convolve_1D_kernels(self): + """ + Check if convolving two kernels with each other works correctly. + """ + gauss_1 = Gaussian1DKernel(3) + gauss_2 = Gaussian1DKernel(4) + test_gauss_3 = Gaussian1DKernel(5) + + gauss_3 = convolve(gauss_1, gauss_2) + assert np.all(np.abs((gauss_3 - test_gauss_3).array) < 0.01) + + def test_convolve_2D_kernels(self): + """ + Check if convolving two kernels with each other works correctly. + """ + gauss_1 = Gaussian2DKernel(3) + gauss_2 = Gaussian2DKernel(4) + test_gauss_3 = Gaussian2DKernel(5) + + gauss_3 = convolve(gauss_1, gauss_2) + assert np.all(np.abs((gauss_3 - test_gauss_3).array) < 0.01) + + @pytest.mark.parametrize(('number'), NUMS) + def test_multiply_scalar(self, number): + """ + Check if multiplying a kernel with a scalar works correctly. + """ + gauss = Gaussian1DKernel(3) + gauss_new = number * gauss + assert_almost_equal(gauss_new.array, gauss.array * number, decimal=12) + + @pytest.mark.parametrize(('number'), NUMS) + def test_multiply_scalar_type(self, number): + """ + Check if multiplying a kernel with a scalar works correctly. + """ + gauss = Gaussian1DKernel(3) + gauss_new = number * gauss + assert type(gauss_new) == Gaussian1DKernel + + @pytest.mark.parametrize(('number'), NUMS) + def test_rmultiply_scalar_type(self, number): + """ + Check if multiplying a kernel with a scalar works correctly. + """ + gauss = Gaussian1DKernel(3) + gauss_new = gauss * number + assert type(gauss_new) == Gaussian1DKernel + + def test_model_1D_kernel(self): + """ + Check Model1DKernel against Gaussian1Dkernel + """ + stddev = 5. + gauss = Gaussian1D(1. / np.sqrt(2 * np.pi * stddev**2), 0, stddev) + model_gauss_kernel = Model1DKernel(gauss, x_size=21) + gauss_kernel = Gaussian1DKernel(stddev, x_size=21) + assert_almost_equal(model_gauss_kernel.array, gauss_kernel.array, + decimal=12) + + def test_model_2D_kernel(self): + """ + Check Model2DKernel against Gaussian2Dkernel + """ + stddev = 5. + gauss = Gaussian2D(1. / (2 * np.pi * stddev**2), 0, 0, stddev, stddev) + model_gauss_kernel = Model2DKernel(gauss, x_size=21) + gauss_kernel = Gaussian2DKernel(stddev, x_size=21) + assert_almost_equal(model_gauss_kernel.array, gauss_kernel.array, + decimal=12) + + def test_custom_1D_kernel(self): + """ + Check CustomKernel against Box1DKernel. + """ + #Define one dimensional array: + array = np.ones(5) + custom = CustomKernel(array) + custom.normalize() + box = Box1DKernel(5) + + c2 = convolve(delta_pulse_1D, custom, boundary='fill') + c1 = convolve(delta_pulse_1D, box, boundary='fill') + assert_almost_equal(c1, c2, decimal=12) + + def test_custom_2D_kernel(self): + """ + Check CustomKernel against Box2DKernel. + """ + #Define one dimensional array: + array = np.ones((5, 5)) + custom = CustomKernel(array) + custom.normalize() + box = Box2DKernel(5) + + c2 = convolve(delta_pulse_2D, custom, boundary='fill') + c1 = convolve(delta_pulse_2D, box, boundary='fill') + assert_almost_equal(c1, c2, decimal=12) + + def test_custom_1D_kernel_list(self): + """ + Check if CustomKernel works with lists. + """ + custom = CustomKernel([1, 1, 1, 1, 1]) + assert custom.is_bool == True + + def test_custom_2D_kernel_list(self): + """ + Check if CustomKernel works with lists. + """ + custom = CustomKernel([[1, 1, 1], + [1, 1, 1], + [1, 1, 1]]) + assert custom.is_bool == True + + def test_custom_1D_kernel_zerosum(self): + """ + Check if CustomKernel works when the input array/list + sums to zero. + """ + custom = CustomKernel([-2, -1, 0, 1, 2]) + assert custom.truncation == 0. + assert custom.normalization == np.inf + + def test_custom_2D_kernel_zerosum(self): + """ + Check if CustomKernel works when the input array/list + sums to zero. + """ + custom = CustomKernel([[0, -1, 0], + [-1, 4, -1], + [0, -1, 0]]) + assert custom.truncation == 0. + assert custom.normalization == np.inf + + def test_custom_kernel_odd_error(self): + """ + Check if CustomKernel raises if the array size is odd. + """ + with pytest.raises(KernelSizeError): + custom = CustomKernel([1, 1, 1, 1]) + + def test_add_1D_kernels(self): + """ + Check if adding of two 1D kernels works. + """ + box_1 = Box1DKernel(5) + box_2 = Box1DKernel(3) + box_3 = Box1DKernel(1) + box_sum_1 = box_1 + box_2 + box_3 + box_sum_2 = box_2 + box_3 + box_1 + box_sum_3 = box_3 + box_1 + box_2 + ref = [1/5., 1/5. + 1/3., 1 + 1/3. + 1/5., 1/5. + 1/3., 1/5.] + assert_almost_equal(box_sum_1.array, ref, decimal=12) + assert_almost_equal(box_sum_2.array, ref, decimal=12) + assert_almost_equal(box_sum_3.array, ref, decimal=12) + + # Assert that the kernels haven't changed + assert_almost_equal(box_1.array, [0.2, 0.2, 0.2, 0.2, 0.2], decimal=12) + assert_almost_equal(box_2.array, [1/3., 1/3., 1/3.], decimal=12) + assert_almost_equal(box_3.array, [1], decimal=12) + + def test_add_2D_kernels(self): + """ + Check if adding of two 1D kernels works. + """ + box_1 = Box2DKernel(3) + box_2 = Box2DKernel(1) + box_sum_1 = box_1 + box_2 + box_sum_2 = box_2 + box_1 + ref = [[1 / 9., 1 / 9., 1 / 9.], + [1 / 9., 1 + 1 / 9., 1 / 9.], + [1 / 9., 1 / 9., 1 / 9.]] + ref_1 = [[1 / 9., 1 / 9., 1 / 9.], + [1 / 9., 1 / 9., 1 / 9.], + [1 / 9., 1 / 9., 1 / 9.]] + assert_almost_equal(box_2.array, [[1]], decimal=12) + assert_almost_equal(box_1.array, ref_1, decimal=12) + assert_almost_equal(box_sum_1.array, ref, decimal=12) + assert_almost_equal(box_sum_2.array, ref, decimal=12) + + def test_Gaussian1DKernel_even_size(self): + """ + Check if even size for GaussianKernel works. + """ + gauss = Gaussian1DKernel(3, x_size=10) + assert gauss.array.size == 10 + + def test_Gaussian2DKernel_even_size(self): + """ + Check if even size for GaussianKernel works. + """ + gauss = Gaussian2DKernel(3, x_size=10, y_size=10) + assert gauss.array.shape == (10, 10) + + def test_normalize_peak(self): + """ + Check if normalize works with peak mode. + """ + custom = CustomKernel([1, 2, 3, 2, 1]) + custom.normalize(mode='peak') + assert custom.array.max() == 1 + + def test_check_kernel_attributes(self): + """ + Check if kernel attributes are correct. + """ + box = Box2DKernel(5) + + # Check truncation + assert box.truncation == 0 + + # Check model + assert isinstance(box.model, Box2D) + + # Check center + assert box.center == [2, 2] + + # Check normalization + assert_almost_equal(box.normalization, 1., decimal=12) + + # Check seperability + assert box.separable + + @pytest.mark.parametrize(('kernel_type', 'mode'), list(itertools.product(KERNEL_TYPES, MODES))) + def test_dicretize_modes(self, kernel_type, mode): + """ + Check if the different modes result in kernels that work with convolve. + Use only small kernel width, to make the test pass quickly. + """ + if kernel_type == AiryDisk2DKernel and not HAS_SCIPY: + pytest.skip("Omitting AiryDisk2DKernel, which requires SciPy") + if not kernel_type == Ring2DKernel: + kernel = kernel_type(3) + else: + kernel = kernel_type(3, 3 * 0.2) + + if kernel.dimension == 1: + c1 = convolve_fft(delta_pulse_1D, kernel, boundary='fill') + c2 = convolve(delta_pulse_1D, kernel, boundary='fill') + assert_almost_equal(c1, c2, decimal=12) + else: + c1 = convolve_fft(delta_pulse_2D, kernel, boundary='fill') + c2 = convolve(delta_pulse_2D, kernel, boundary='fill') + assert_almost_equal(c1, c2, decimal=12) + + @pytest.mark.parametrize(('width'), WIDTHS_EVEN) + def test_box_kernels_even_size(self, width): + """ + Check if BoxKernel work properly with even sizes. + """ + kernel_1D = Box1DKernel(width) + assert kernel_1D.shape[0] % 2 != 0 + assert kernel_1D.array.sum() == 1. + + kernel_2D = Box2DKernel(width) + assert np.all([_ % 2 != 0 for _ in kernel_2D.shape]) + assert kernel_2D.array.sum() == 1. + + diff --git a/astropy/convolution/utils.py b/astropy/convolution/utils.py new file mode 100644 index 0000000..8de4b9c --- /dev/null +++ b/astropy/convolution/utils.py @@ -0,0 +1,275 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import numpy as np + +from ..modeling.core import Fittable1DModel, Fittable2DModel + + +__all__ = ['discretize_model'] + + +class DiscretizationError(Exception): + """ + Called when discretization of models goes wrong. + """ + + +class KernelSizeError(Exception): + """ + Called when size of kernels is even. + """ + + +def add_kernel_arrays_1D(array_1, array_2): + """ + Add two 1D kernel arrays of different size. + + The arrays are added with the centers lying upon each other. + """ + if array_1.size > array_2.size: + new_array = array_1.copy() + center = array_1.size // 2 + slice_ = slice(center - array_2.size // 2, + center + array_2.size // 2 + 1) + new_array[slice_] += array_2 + return new_array + elif array_2.size > array_1.size: + new_array = array_2.copy() + center = array_2.size // 2 + slice_ = slice(center - array_1.size // 2, + center + array_1.size // 2 + 1) + new_array[slice_] += array_1 + return new_array + return array_2 + array_1 + + +def add_kernel_arrays_2D(array_1, array_2): + """ + Add two 2D kernel arrays of different size. + + The arrays are added with the centers lying upon each other. + """ + if array_1.size > array_2.size: + new_array = array_1.copy() + center = [axes_size // 2 for axes_size in array_1.shape] + slice_x = slice(center[1] - array_2.shape[1] // 2, + center[1] + array_2.shape[1] // 2 + 1) + slice_y = slice(center[0] - array_2.shape[0] // 2, + center[0] + array_2.shape[0] // 2 + 1) + new_array[slice_y, slice_x] += array_2 + return new_array + elif array_2.size > array_1.size: + new_array = array_2.copy() + center = [axes_size // 2 for axes_size in array_2.shape] + slice_x = slice(center[1] - array_1.shape[1] // 2, + center[1] + array_1.shape[1] // 2 + 1) + slice_y = slice(center[0] - array_1.shape[0] // 2, + center[0] + array_1.shape[0] // 2 + 1) + new_array[slice_y, slice_x] += array_1 + return new_array + return array_2 + array_1 + + +def discretize_model(model, x_range, y_range=None, mode='center', factor=10): + """ + Function to evaluate analytical models on a grid. + + Parameters + ---------- + model : `~astropy.modeling.FittableModel` + Model to be evaluated. + x_range : tuple + x range in which the model is evaluated. + y_range : tuple, optional + y range in which the model is evaluated. + Necessary only for 2D models. + mode : str, optional + One of the following modes: + * ``'center'`` (default) + Discretize model by taking the value + at the center of the bin. + * ``'linear_interp'`` + Discretize model by linearly interpolating + between the values at the corners of the bin. + For 2D models interpolation is bilinear. + * ``'oversample'`` + Discretize model by taking the average + on an oversampled grid. + * ``'integrate'`` + Discretize model by integrating the model + over the bin using `scipy.integrate.quad`. + Very slow. + factor : float or int + Factor of oversampling. Default = 10. + + Returns + ------- + array : `numpy.array` + Model value array + + Notes + ----- + The ``oversample`` mode allows to conserve the integral on a subpixel + scale. Here is the example of a normalized Gaussian1D: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + import numpy as np + from astropy.modeling.models import Gaussian1D + from astropy.convolution.utils import discretize_model + gauss_1D = Gaussian1D(1 / (0.5 * np.sqrt(2 * np.pi)), 0, 0.5) + y_center = discretize_model(gauss_1D, (-2, 3), mode='center') + y_corner = discretize_model(gauss_1D, (-2, 3), mode='linear_interp') + y_oversample = discretize_model(gauss_1D, (-2, 3), mode='oversample') + plt.plot(y_center, label='center sum = {0:3f}'.format(y_center.sum())) + plt.plot(y_corner, label='linear_interp sum = {0:3f}'.format(y_corner.sum())) + plt.plot(y_oversample, label='oversample sum = {0:3f}'.format(y_oversample.sum())) + plt.xlabel('pixels') + plt.ylabel('value') + plt.legend() + plt.show() + + + """ + if isinstance(model, Fittable2DModel) and y_range is None: + raise Exception("Please specify y range.") + if mode == "center": + if isinstance(model, Fittable1DModel): + return discretize_center_1D(model, x_range) + if isinstance(model, Fittable2DModel): + return discretize_center_2D(model, x_range, y_range) + elif mode == "linear_interp": + if isinstance(model, Fittable1DModel): + return discretize_linear_1D(model, x_range) + if isinstance(model, Fittable2DModel): + return discretize_bilinear_2D(model, x_range, y_range) + elif mode == "oversample": + if isinstance(model, Fittable1DModel): + return discretize_oversample_1D(model, x_range, factor) + if isinstance(model, Fittable2DModel): + return discretize_oversample_2D(model, x_range, y_range, factor) + elif mode == "integrate": + if isinstance(model, Fittable1DModel): + return discretize_integrate_1D(model, x_range) + if isinstance(model, Fittable2DModel): + return discretize_integrate_2D(model, x_range, y_range) + else: + raise DiscretizationError('Invalid mode.') + + +def discretize_center_1D(model, x_range): + """ + Discretize model by taking the value at the center of the bin. + """ + x = np.arange(*x_range) + return model(x) + + +def discretize_center_2D(model, x_range, y_range): + """ + Discretize model by taking the value at the center of the pixel. + """ + x = np.arange(*x_range) + y = np.arange(*y_range) + x, y = np.meshgrid(x, y) + return model(x, y) + + +def discretize_linear_1D(model, x_range): + """ + Discretize model by performing a linear interpolation. + """ + # Evaluate model 0.5 pixel outside the boundaries + x = np.arange(x_range[0] - 0.5, x_range[1] + 0.5) + values_intermediate_grid = model(x) + return 0.5 * (values_intermediate_grid[1:] + values_intermediate_grid[:-1]) + + +def discretize_bilinear_2D(model, x_range, y_range): + """ + Discretize model by performing a bilinear interpolation. + """ + # Evaluate model 0.5 pixel outside the boundaries + x = np.arange(x_range[0] - 0.5, x_range[1] + 0.5) + y = np.arange(y_range[0] - 0.5, y_range[1] + 0.5) + x, y = np.meshgrid(x, y) + values_intermediate_grid = model(x, y) + + # Mean in y direction + values = 0.5 * (values_intermediate_grid[1:, :] + + values_intermediate_grid[:-1, :]) + # Mean in x direction + values = 0.5 * (values[:, 1:] + + values[:, :-1]) + return values + + +def discretize_oversample_1D(model, x_range, factor=10): + """ + Discretize model by taking the average on an oversampled grid. + """ + # Evaluate model on oversampled grid + x = np.arange(x_range[0] - 0.5 * (1 - 1 / factor), + x_range[1] + 0.5 * (1 + 1 / factor), 1. / factor) + + values = model(x) + + # Reshape and compute mean + values = np.reshape(values, (x.size // factor, factor)) + return values.mean(axis=1)[:-1] + + +def discretize_oversample_2D(model, x_range, y_range, factor=10): + """ + Discretize model by taking the average on an oversampled grid. + """ + # Evaluate model on oversampled grid + x = np.arange(x_range[0] - 0.5 * (1 - 1 / factor), + x_range[1] + 0.5 * (1 + 1 / factor), 1. / factor) + + y = np.arange(y_range[0] - 0.5 * (1 - 1 / factor), + y_range[1] + 0.5 * (1 + 1 / factor), 1. / factor) + x_grid, y_grid = np.meshgrid(x, y) + values = model(x_grid, y_grid) + + # Reshape and compute mean + shape = (y.size // factor, factor, x.size // factor, factor) + values = np.reshape(values, shape) + return values.mean(axis=3).mean(axis=1)[:-1, :-1] + + +def discretize_integrate_1D(model, x_range): + """ + Discretize model by integrating numerically the model over the bin. + """ + from scipy.integrate import quad + # Set up grid + x = np.arange(x_range[0] - 0.5, x_range[1] + 0.5) + values = np.array([]) + + # Integrate over all bins + for i in range(x.size - 1): + values = np.append(values, quad(model, x[i], x[i + 1])[0]) + return values + + +def discretize_integrate_2D(model, x_range, y_range): + """ + Discretize model by integrating the model over the pixel. + """ + from scipy.integrate import dblquad + # Set up grid + x = np.arange(x_range[0] - 0.5, x_range[1] + 0.5) + y = np.arange(y_range[0] - 0.5, y_range[1] + 0.5) + values = np.empty((y.size - 1, x.size - 1)) + + # Integrate over all pixels + for i in range(x.size - 1): + for j in range(y.size - 1): + values[j, i] = dblquad(model, x[i], x[i + 1], + lambda x: y[j], lambda x: y[j + 1])[0] + return values diff --git a/astropy/coordinates/__init__.py b/astropy/coordinates/__init__.py new file mode 100644 index 0000000..cfb1722 --- /dev/null +++ b/astropy/coordinates/__init__.py @@ -0,0 +1,23 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This subpackage contains classes and functions for celestial coordinates +of astronomical objects. It also contains a framework for conversions +between coordinate systems. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from .errors import * +from .angles import * +from .baseframe import * +from .distances import * +from .earth import * +from .transformations import * +from .builtin_frames import * +from .name_resolve import * +from .matching import * +from .representation import * +from .sky_coordinate import * + +__doc__ += builtin_frames._transform_graph_docs diff --git a/astropy/coordinates/angle_lextab.py b/astropy/coordinates/angle_lextab.py new file mode 100644 index 0000000..92fdc11 --- /dev/null +++ b/astropy/coordinates/angle_lextab.py @@ -0,0 +1,14 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +# angle_lextab.py. This file automatically created by PLY (version 3.4). Don't edit! +_tabversion = '3.4' +_lextokens = {'DEGREE': 1, 'HOUR': 1, 'SIMPLE_UNIT': 1, 'SIGN': 1, 'SECOND': 1, 'COLON': 1, 'UINT': 1, 'MINUTE': 1, 'UFLOAT': 1} +_lexreflags = 0 +_lexliterals = '' +_lexstateinfo = {'INITIAL': 'inclusive'} + +_lexstatere = {'INITIAL': [('(?P((\\d+\\.\\d*)|(\\.\\d+))([eE][+-\u2212]?\\d+)?)|(?P\\d+)|(?P[+\u2212-])|(?P(?:hectoradian)|(?:petaradian)|(?:hrad)|(?:zeptoradian)|(?:arcsec)|(?:aarcmin)|(?:deciarcminute)|(?:Parcsec)|(?:kiloarcsecond)|(?:exaarcsecond)|(?:deciradian)|(?:yoctoarcminute)|(?:prad)|(?:Parcmin)|(?:yottaradian)|(?:marcmin)|(?:aarcsec)|(?:milliarcsecond)|(?:milliradian)|(?:parcsec)|(?:arad)|(?:uarcmin)|(?:parcmin)|(?:decaarcminute)|(?:dekaarcsecond)|(?:nrad)|(?:Tarcsec)|(?:attoarcsecond)|(?:Trad)|(?:krad)|(?:Zarcmin)|(?:Earcmin)|(?:exaarcminute)|(?:farcmin)|(?:Prad)|(?:dekaarcminute)|(?:Earcsec)|(?:Yarcsec)|(?:Garcmin)|(?:daarcmin)|(?:kiloradian)|(?:nanoarcminute)|(?:kiloarcminute)|(?:megaradian)|(?:yarcmin)|(?:Zarcsec)|(?:teraarcsecond)|(?:femtoarcsecond)|(?:Marcsec)|(?:arcminute)|(?:zettaarcsecond)|(?:yoctoarcsecond)|(?:arcsecond)|(?:decaarcsecond)|(?:zeptoarcminute)|(?:attoradian)|(?:Grad)|(?:Garcsec)|(?:microarcminute)|(?:marcsec)|(?:picoarcminute)|(?:teraradian)|(?:narcsec)|(?:centiradian)|(?:zettaradian)|(?:darad)|(?:microradian)|(?:gigaarcsecond)|(?:decaradian)|(?:exaradian)|(?:centiarcminute)|(?:cy)|(?:femtoradian)|(?:mrad)|(?:femtoarcminute)|(?:yottaarcsecond)|(?:centiarcsecond)|(?:gigaradian)|(?:zettaarcminute)|(?:gigaarcminute)|(?:yoctoradian)|(?:crad)|(?:picoradian)|(?:zrad)|(?:dekaradian)|(?:narcmin)|(?:farcsec)|(?:Erad)|(?:radian)|(?:urad)|(?:nanoarcsecond)|(?:microarcsecond)|(?:hectoarcsecond)|(?:petaarcsecond)|(?:deciarcsecond)|(?:nanoradian)|(?:Mrad)|(?:carcmin)|(?:yottaarcminute)|(?:rad)|(?:uas)|(?:harcmin)|(?:megaarcsecond)|(?:yrad)|(?:Zrad)|(?:carcsec)|(?:Marcmin)|(?:Yarcmin)|(?:zeptoarcsecond)|(?:zarcmin)|(?:darcmin)|(?:karcmin)|(?:attoarcminute)|(?:daarcsec)|(?:Tarcmin)|(?:yarcsec)|(?:mas)|(?:frad)|(?:petaarcminute)|(?:teraarcminute)|(?:harcsec)|(?:uarcsec)|(?:drad)|(?:arcmin)|(?:milliarcminute)|(?:darcsec)|(?:hectoarcminute)|(?:zarcsec)|(?:cycle)|(?:karcsec)|(?:megaarcminute)|(?:picoarcsecond)|(?:Yrad))|(?Pm(in(ute(s)?)?)?|\u2032|\\\'|\u1d50)|(?Ps(ec(ond(s)?)?)?|\u2033|\\"|\u02e2)|(?Pd(eg(ree(s)?)?)?|\xb0)|(?Phour(s)?|h(r)?|\u02b0)|(?P:)', [None, ('t_UFLOAT', 'UFLOAT'), None, None, None, None, ('t_UINT', 'UINT'), ('t_SIGN', 'SIGN'), ('t_SIMPLE_UNIT', 'SIMPLE_UNIT'), (None, 'MINUTE'), None, None, None, (None, 'SECOND'), None, None, None, (None, 'DEGREE'), None, None, None, (None, 'HOUR'), None, None, (None, 'COLON')])]} +_lexstateignore = {'INITIAL': ' '} +_lexstateerrorf = {'INITIAL': 't_error'} diff --git a/astropy/coordinates/angle_parsetab.py b/astropy/coordinates/angle_parsetab.py new file mode 100644 index 0000000..fd86ad5 --- /dev/null +++ b/astropy/coordinates/angle_parsetab.py @@ -0,0 +1,65 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +# astropy/coordinates/angle_parsetab.py +# This file is automatically generated. Do not edit. +_tabversion = '3.2' + +_lr_method = 'LALR' + +_lr_signature = b"'\xecW6\xc1\x00\x86\x9f\x84\x0er\xa4\xfb\x91\xa6;" + +_lr_action_items = {'HOUR':([2,8,10,17,18,21,25,26,27,28,34,],[-15,12,-14,20,-16,-12,-13,-9,-8,-10,-11,]),'DEGREE':([2,8,10,17,18,21,25,26,27,28,34,],[-15,13,-14,19,-16,-12,-13,-9,-8,-10,-11,]),'SIGN':([0,],[5,]),'SECOND':([2,8,10,17,18,21,25,26,27,28,32,33,34,],[-15,14,-14,-17,-16,-12,-13,-9,-8,-10,35,36,-11,]),'COLON':([17,28,],[22,31,]),'UINT':([0,5,11,17,19,20,21,22,29,30,31,],[-7,-6,17,21,23,24,26,28,26,26,26,]),'SIMPLE_UNIT':([2,8,10,17,18,21,25,26,27,28,34,],[-15,15,-14,-17,-16,-12,-13,-9,-8,-10,-11,]),'UFLOAT':([0,5,11,21,29,30,31,],[-7,-6,18,27,27,27,27,]),'MINUTE':([2,8,10,17,18,21,23,24,25,26,27,28,34,],[-15,16,-14,-17,-16,-12,29,30,-13,-9,-8,-10,-11,]),'$end':([1,2,3,4,6,7,8,9,10,12,13,14,15,16,17,18,19,20,21,23,24,25,26,27,28,29,30,32,33,34,35,36,],[-4,-15,0,-5,-3,-1,-30,-2,-14,-23,-29,-32,-31,-33,-17,-16,-24,-18,-12,-25,-19,-13,-9,-8,-10,-26,-20,-27,-21,-11,-28,-22,]),} + +_lr_action = { } +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = { } + _lr_action[_x][_k] = _y +del _lr_action_items + +_lr_goto_items = {'arcminute':([0,],[1,]),'angle':([0,],[3,]),'simple':([0,],[4,]),'arcsecond':([0,],[6,]),'hms':([0,],[7,]),'generic':([0,],[8,]),'dms':([0,],[9,]),'colon':([0,],[10,]),'spaced':([0,],[2,]),'sign':([0,],[11,]),'ufloat':([21,29,30,31,],[25,32,33,34,]),} + +_lr_goto = { } +for _k, _v in _lr_goto_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_goto: _lr_goto[_x] = { } + _lr_goto[_x][_k] = _y +del _lr_goto_items +_lr_productions = [ + ("S' -> angle","S'",1,None,None,None), + ('angle -> hms','angle',1,'p_angle','astropy/coordinates/angle_utilities.py',115), + ('angle -> dms','angle',1,'p_angle','astropy/coordinates/angle_utilities.py',116), + ('angle -> arcsecond','angle',1,'p_angle','astropy/coordinates/angle_utilities.py',117), + ('angle -> arcminute','angle',1,'p_angle','astropy/coordinates/angle_utilities.py',118), + ('angle -> simple','angle',1,'p_angle','astropy/coordinates/angle_utilities.py',119), + ('sign -> SIGN','sign',1,'p_sign','astropy/coordinates/angle_utilities.py',125), + ('sign -> ','sign',0,'p_sign','astropy/coordinates/angle_utilities.py',126), + ('ufloat -> UFLOAT','ufloat',1,'p_ufloat','astropy/coordinates/angle_utilities.py',135), + ('ufloat -> UINT','ufloat',1,'p_ufloat','astropy/coordinates/angle_utilities.py',136), + ('colon -> sign UINT COLON UINT','colon',4,'p_colon','astropy/coordinates/angle_utilities.py',142), + ('colon -> sign UINT COLON UINT COLON ufloat','colon',6,'p_colon','astropy/coordinates/angle_utilities.py',143), + ('spaced -> sign UINT UINT','spaced',3,'p_spaced','astropy/coordinates/angle_utilities.py',152), + ('spaced -> sign UINT UINT ufloat','spaced',4,'p_spaced','astropy/coordinates/angle_utilities.py',153), + ('generic -> colon','generic',1,'p_generic','astropy/coordinates/angle_utilities.py',162), + ('generic -> spaced','generic',1,'p_generic','astropy/coordinates/angle_utilities.py',163), + ('generic -> sign UFLOAT','generic',2,'p_generic','astropy/coordinates/angle_utilities.py',164), + ('generic -> sign UINT','generic',2,'p_generic','astropy/coordinates/angle_utilities.py',165), + ('hms -> sign UINT HOUR','hms',3,'p_hms','astropy/coordinates/angle_utilities.py',174), + ('hms -> sign UINT HOUR UINT','hms',4,'p_hms','astropy/coordinates/angle_utilities.py',175), + ('hms -> sign UINT HOUR UINT MINUTE','hms',5,'p_hms','astropy/coordinates/angle_utilities.py',176), + ('hms -> sign UINT HOUR UINT MINUTE ufloat','hms',6,'p_hms','astropy/coordinates/angle_utilities.py',177), + ('hms -> sign UINT HOUR UINT MINUTE ufloat SECOND','hms',7,'p_hms','astropy/coordinates/angle_utilities.py',178), + ('hms -> generic HOUR','hms',2,'p_hms','astropy/coordinates/angle_utilities.py',179), + ('dms -> sign UINT DEGREE','dms',3,'p_dms','astropy/coordinates/angle_utilities.py',192), + ('dms -> sign UINT DEGREE UINT','dms',4,'p_dms','astropy/coordinates/angle_utilities.py',193), + ('dms -> sign UINT DEGREE UINT MINUTE','dms',5,'p_dms','astropy/coordinates/angle_utilities.py',194), + ('dms -> sign UINT DEGREE UINT MINUTE ufloat','dms',6,'p_dms','astropy/coordinates/angle_utilities.py',195), + ('dms -> sign UINT DEGREE UINT MINUTE ufloat SECOND','dms',7,'p_dms','astropy/coordinates/angle_utilities.py',196), + ('dms -> generic DEGREE','dms',2,'p_dms','astropy/coordinates/angle_utilities.py',197), + ('simple -> generic','simple',1,'p_simple','astropy/coordinates/angle_utilities.py',210), + ('simple -> generic SIMPLE_UNIT','simple',2,'p_simple','astropy/coordinates/angle_utilities.py',211), + ('arcsecond -> generic SECOND','arcsecond',2,'p_arcsecond','astropy/coordinates/angle_utilities.py',220), + ('arcminute -> generic MINUTE','arcminute',2,'p_arcminute','astropy/coordinates/angle_utilities.py',226), +] diff --git a/astropy/coordinates/angle_utilities.py b/astropy/coordinates/angle_utilities.py new file mode 100644 index 0000000..ce3e478 --- /dev/null +++ b/astropy/coordinates/angle_utilities.py @@ -0,0 +1,684 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This module contains utility functions that are for internal use in +astropy.coordinates.angles. Mainly they are conversions from one format +of data to another. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import os +from warnings import warn + +import numpy as np + +from .errors import (IllegalHourWarning, IllegalHourError, + IllegalMinuteWarning, IllegalMinuteError, + IllegalSecondWarning, IllegalSecondError) +from ..utils import format_exception +from .. import units as u + + +class _AngleParser(object): + """ + Parses the various angle formats including: + + * 01:02:30.43 degrees + * 1 2 0 hours + * 1°2′3″ + * 1d2m3s + * -1h2m3s + + This class should not be used directly. Use `parse_angle` + instead. + """ + def __init__(self): + if '_parser' not in _AngleParser.__dict__: + _AngleParser._parser, _AngleParser._lexer = self._make_parser() + + @classmethod + def _get_simple_unit_names(cls): + simple_units = set( + u.radian.find_equivalent_units(include_prefix_units=True)) + simple_units.remove(u.deg) + simple_units.remove(u.hourangle) + simple_unit_names = set() + for unit in simple_units: + simple_unit_names.update(unit.names) + return list(simple_unit_names) + + @classmethod + def _make_parser(cls): + from ..extern.ply import lex, yacc + + # List of token names. + tokens = ( + 'SIGN', + 'UINT', + 'UFLOAT', + 'COLON', + 'DEGREE', + 'HOUR', + 'MINUTE', + 'SECOND', + 'SIMPLE_UNIT' + ) + + # NOTE THE ORDERING OF THESE RULES IS IMPORTANT!! + # Regular expression rules for simple tokens + def t_UFLOAT(t): + r'((\d+\.\d*)|(\.\d+))([eE][+-−]?\d+)?' + # The above includes Unicode "MINUS SIGN" \u2212. It is + # important to include the hyphen last, or the regex will + # treat this as a range. + t.value = float(t.value.replace('−', '-')) + return t + + def t_UINT(t): + r'\d+' + t.value = int(t.value) + return t + + def t_SIGN(t): + r'[+−-]' + # The above include Unicode "MINUS SIGN" \u2212. It is + # important to include the hyphen last, or the regex will + # treat this as a range. + if t.value == '+': + t.value = 1.0 + else: + t.value = -1.0 + return t + + def t_SIMPLE_UNIT(t): + t.value = u.Unit(t.value) + return t + t_SIMPLE_UNIT.__doc__ = '|'.join( + '(?:{0})'.format(x) for x in cls._get_simple_unit_names()) + + t_COLON = ':' + t_DEGREE = r'd(eg(ree(s)?)?)?|°' + t_HOUR = r'hour(s)?|h(r)?|ʰ' + t_MINUTE = r'm(in(ute(s)?)?)?|′|\'|ᵐ' + t_SECOND = r's(ec(ond(s)?)?)?|″|\"|ˢ' + + # A string containing ignored characters (spaces) + t_ignore = ' ' + + # Error handling rule + def t_error(t): + raise ValueError( + "Invalid character at col {0}".format(t.lexpos)) + + # Build the lexer + try: + from . import angle_lextab + lexer = lex.lex(optimize=True, lextab=angle_lextab) + except ImportError: + lexer = lex.lex(optimize=True, lextab='angle_lextab', + outputdir=os.path.dirname(__file__)) + + def p_angle(p): + ''' + angle : hms + | dms + | arcsecond + | arcminute + | simple + ''' + p[0] = p[1] + + def p_sign(p): + ''' + sign : SIGN + | + ''' + if len(p) == 2: + p[0] = p[1] + else: + p[0] = 1.0 + + def p_ufloat(p): + ''' + ufloat : UFLOAT + | UINT + ''' + p[0] = float(p[1]) + + def p_colon(p): + ''' + colon : sign UINT COLON UINT + | sign UINT COLON UINT COLON ufloat + ''' + if len(p) == 5: + p[0] = (p[1] * p[2], p[4], 0.0) + elif len(p) == 7: + p[0] = (p[1] * p[2], p[4], p[6]) + + def p_spaced(p): + ''' + spaced : sign UINT UINT + | sign UINT UINT ufloat + ''' + if len(p) == 4: + p[0] = (p[1] * p[2], p[3], 0.0) + elif len(p) == 5: + p[0] = (p[1] * p[2], p[3], p[4]) + + def p_generic(p): + ''' + generic : colon + | spaced + | sign UFLOAT + | sign UINT + ''' + if len(p) == 2: + p[0] = p[1] + else: + p[0] = p[1] * p[2] + + def p_hms(p): + ''' + hms : sign UINT HOUR + | sign UINT HOUR UINT + | sign UINT HOUR UINT MINUTE + | sign UINT HOUR UINT MINUTE ufloat + | sign UINT HOUR UINT MINUTE ufloat SECOND + | generic HOUR + ''' + if len(p) == 3: + p[0] = (p[1], u.hourangle) + elif len(p) == 4: + p[0] = (p[1] * p[2], u.hourangle) + elif len(p) in (5, 6): + p[0] = ((p[1] * p[2], p[4], 0.0), u.hourangle) + elif len(p) in (7, 8): + p[0] = ((p[1] * p[2], p[4], p[6]), u.hourangle) + + def p_dms(p): + ''' + dms : sign UINT DEGREE + | sign UINT DEGREE UINT + | sign UINT DEGREE UINT MINUTE + | sign UINT DEGREE UINT MINUTE ufloat + | sign UINT DEGREE UINT MINUTE ufloat SECOND + | generic DEGREE + ''' + if len(p) == 3: + p[0] = (p[1], u.degree) + elif len(p) == 4: + p[0] = (p[1] * p[2], u.degree) + elif len(p) in (5, 6): + p[0] = ((p[1] * p[2], p[4], 0.0), u.degree) + elif len(p) in (7, 8): + p[0] = ((p[1] * p[2], p[4], p[6]), u.degree) + + def p_simple(p): + ''' + simple : generic + | generic SIMPLE_UNIT + ''' + if len(p) == 2: + p[0] = (p[1], None) + else: + p[0] = (p[1], p[2]) + + def p_arcsecond(p): + ''' + arcsecond : generic SECOND + ''' + p[0] = (p[1], u.arcsecond) + + def p_arcminute(p): + ''' + arcminute : generic MINUTE + ''' + p[0] = (p[1], u.arcminute) + + def p_error(p): + raise ValueError + + try: + from . import angle_parsetab + parser = yacc.yacc(debug=False, tabmodule=angle_parsetab, + write_tables=False) + except ImportError: + parser = yacc.yacc(debug=False, tabmodule='angle_parsetab', + outputdir=os.path.dirname(__file__)) + + return parser, lexer + + def parse(self, angle, unit, debug=False): + try: + found_angle, found_unit = self._parser.parse( + angle, lexer=self._lexer, debug=debug) + except ValueError as e: + if str(e): + raise ValueError("{0} in angle {1!r}".format( + str(e), angle)) + else: + raise ValueError( + "Syntax error parsing angle {0!r}".format(angle)) + + if unit is None and found_unit is None: + raise u.UnitsError("No unit specified") + + return found_angle, found_unit + + +def _check_hour_range(hrs): + """ + Checks that the given value is in the range (-24, 24). + """ + if np.any(np.abs(hrs) == 24.): + warn(IllegalHourWarning(hrs, 'Treating as 24 hr')) + elif np.any(hrs < -24.) or np.any(hrs > 24.): + raise IllegalHourError(hrs) + + +def _check_minute_range(m): + """ + Checks that the given value is in the range [0,60]. If the value + is equal to 60, then a warning is raised. + """ + if np.any(m == 60.): + warn(IllegalMinuteWarning(m, 'Treating as 0 min, +1 hr/deg')) + elif np.any(m < -60.) or np.any(m > 60.): + # "Error: minutes not in range [-60,60) ({0}).".format(min)) + raise IllegalMinuteError(m) + + +def _check_second_range(sec): + """ + Checks that the given value is in the range [0,60]. If the value + is equal to 60, then a warning is raised. + """ + if np.any(sec == 60.): + warn(IllegalSecondWarning(sec, 'Treating as 0 sec, +1 min')) + elif np.any(sec < -60.) or np.any(sec > 60.): + # "Error: seconds not in range [-60,60) ({0}).".format(sec)) + raise IllegalSecondError(sec) + + +def check_hms_ranges(h, m, s): + """ + Checks that the given hour, minute and second are all within + reasonable range. + """ + _check_hour_range(h) + _check_minute_range(m) + _check_second_range(s) + return None + + +def parse_angle(angle, unit=None, debug=False): + """ + Parses an input string value into an angle value. + + Parameters + ---------- + angle : str + A string representing the angle. May be in one of the following forms: + + * 01:02:30.43 degrees + * 1 2 0 hours + * 1°2′3″ + * 1d2m3s + * -1h2m3s + + unit : `~astropy.units.UnitBase` instance, optional + The unit used to interpret the string. If ``unit`` is not + provided, the unit must be explicitly represented in the + string, either at the end or as number separators. + + debug : bool, optional + If `True`, print debugging information from the parser. + + Returns + ------- + value, unit : tuple + ``value`` is the value as a floating point number or three-part + tuple, and ``unit`` is a `Unit` instance which is either the + unit passed in or the one explicitly mentioned in the input + string. + """ + return _AngleParser().parse(angle, unit, debug=debug) + + +def degrees_to_dms(d): + """ + Convert a floating-point degree value into a ``(degree, arcminute, + arcsecond)`` tuple. + """ + sign = np.copysign(1.0, d) + + (df, d) = np.modf(np.abs(d)) # (degree fraction, degree) + (mf, m) = np.modf(df * 60.) # (minute fraction, minute) + s = mf * 60. + + return np.floor(sign * d), sign * np.floor(m), sign * s + + +def dms_to_degrees(d, m, s): + """ + Convert degrees, arcminute, arcsecond to a float degrees value. + """ + + _check_minute_range(m) + _check_second_range(s) + + # determine sign + sign = np.copysign(1.0, d) + + # TODO: This will fail if d or m have values after the decimal + # place + + try: + d = np.floor(np.abs(np.asarray(d))) + m = np.floor(np.abs(np.asarray(m))) + s = np.abs(s) + except ValueError: + raise ValueError(format_exception( + "{func}: dms values ({1[0]},{2[1]},{3[2]}) could not be " + "converted to numbers.", d, m, s)) + + return sign * (d + m / 60. + s / 3600.) + + +def hms_to_hours(h, m, s): + """ + Convert hour, minute, second to a float hour value. + """ + + check_hms_ranges(h, m, s) + + # determine sign + sign = np.copysign(1.0, h) + + # TODO: This will fail if d or m have values after the decimal + # place + + try: + h = np.floor(np.abs(h)) + m = np.floor(np.abs(m)) + s = np.abs(s) + except ValueError: + raise ValueError(format_exception( + "{func}: HMS values ({1[0]},{2[1]},{3[2]}) could not be " + "converted to numbers.", h, m, s)) + + return sign * (h + m / 60. + s / 3600.) + + +def hms_to_degrees(h, m, s): + """ + Convert hour, minute, second to a float degrees value. + """ + + return hms_to_hours(h, m, s) * 15. + + +def hms_to_radians(h, m, s): + """ + Convert hour, minute, second to a float radians value. + """ + + return u.degree.to(u.radian, hms_to_degrees(h, m, s)) + + +def hms_to_dms(h, m, s): + """ + Convert degrees, arcminutes, arcseconds to an ``(hour, minute, second)`` + tuple. + """ + + return degrees_to_dms(hms_to_degrees(h, m, s)) + + +def hours_to_decimal(h): + """ + Convert any parseable hour value into a float value. + """ + from . import angles + return angles.Angle(h, unit=u.hourangle).hour + + +def hours_to_radians(h): + """ + Convert an angle in Hours to Radians. + """ + + return u.hourangle.to(u.radian, h) + + +def hours_to_hms(h): + """ + Convert an floating-point hour value into an ``(hour, minute, + second)`` tuple. + """ + + sign = np.copysign(1.0, h) + + (hf, h) = np.modf(np.abs(h)) # (degree fraction, degree) + (mf, m) = np.modf(hf * 60.0) # (minute fraction, minute) + s = mf * 60.0 + + return (np.floor(sign * h), sign * np.floor(m), sign * s) + + +def radians_to_degrees(r): + """ + Convert an angle in Radians to Degrees. + """ + return u.radian.to(u.degree, r) + + +def radians_to_hours(r): + """ + Convert an angle in Radians to Hours. + """ + return u.radian.to(u.hourangle, r) + + +def radians_to_hms(r): + """ + Convert an angle in Radians to an ``(hour, minute, second)`` tuple. + """ + + hours = radians_to_hours(r) + return hours_to_hms(hours) + + +def radians_to_dms(r): + """ + Convert an angle in Radians to an ``(degree, arcminute, + arcsecond)`` tuple. + """ + + degrees = u.radian.to(u.degree, r) + return degrees_to_dms(degrees) + + +def sexagesimal_to_string(values, precision=None, pad=False, sep=(':',), + fields=3): + """ + Given an already separated tuple of sexagesimal values, returns + a string. + + See `hours_to_string` and `degrees_to_string` for a higher-level + interface to this functionality. + """ + + # If the coordinates are negative, we need to take the absolute value of + # the (arc)minutes and (arc)seconds. We need to use np.abs because abs(-0) + # is -0. + values = (values[0], np.abs(values[1]), np.abs(values[2])) + + if pad: + # Check to see if values[0] is negative, using np.copysign to handle -0 + if np.copysign(1.0, values[0]) == -1: + pad = 3 + else: + pad = 2 + else: + pad = 0 + + if not isinstance(sep, tuple): + sep = tuple(sep) + + if fields < 1 or fields > 3: + raise ValueError( + "fields must be 1, 2, or 3") + + if not sep: # empty string, False, or None, etc. + sep = ('', '', '') + elif len(sep) == 1: + if fields == 3: + sep = sep + (sep[0], '') + elif fields == 2: + sep = sep + ('', '') + else: + sep = ('', '', '') + elif len(sep) == 2: + sep = sep + ('',) + elif len(sep) != 3: + raise ValueError( + "Invalid separator specification for converting angle to string.") + + # Simplify the expression based on the requested precision. For + # example, if the seconds will round up to 60, we should convert + # it to 0 and carry upwards. If the field is hidden (by the + # fields kwarg) we round up around the middle, 30.0. + if precision is None: + rounding_thresh = 60.0 - (10.0 ** -4) + else: + rounding_thresh = 60.0 - (10.0 ** -precision) + + values = list(values) + if fields == 3 and values[2] >= rounding_thresh: + values[2] = 0.0 + values[1] += 1.0 + elif fields < 3 and values[2] >= 30.0: + values[1] += 1.0 + + if fields >= 2 and int(values[1]) >= 60.0: + values[1] = 0.0 + values[0] += 1.0 + elif fields < 2 and int(values[1]) >= 30.0: + values[0] += 1.0 + + literal = [] + last_value = '' + literal.append('{0:0{pad}.0f}{sep[0]}') + if fields >= 2: + literal.append('{1:02d}{sep[1]}') + if fields == 3: + if precision is None: + last_value = '{0:.4f}'.format(abs(values[2])) + last_value = last_value.rstrip('0').rstrip('.') + else: + last_value = '{0:.{precision}f}'.format( + abs(values[2]), precision=precision) + if len(last_value) == 1 or last_value[1] == '.': + last_value = '0' + last_value + literal.append('{last_value}{sep[2]}') + literal = ''.join(literal) + return literal.format(values[0], int(abs(values[1])), abs(values[2]), + sep=sep, pad=pad, + last_value=last_value) + + +def hours_to_string(h, precision=5, pad=False, sep=('h', 'm', 's'), + fields=3): + """ + Takes a decimal hour value and returns a string formatted as hms with + separator specified by the 'sep' parameter. + + ``h`` must be a scalar. + """ + h, m, s = hours_to_hms(h) + return sexagesimal_to_string((h, m, s), precision=precision, pad=pad, + sep=sep, fields=fields) + + +def degrees_to_string(d, precision=5, pad=False, sep=':', fields=3): + """ + Takes a decimal hour value and returns a string formatted as dms with + separator specified by the 'sep' parameter. + + ``d`` must be a scalar. + """ + d, m, s = degrees_to_dms(d) + return sexagesimal_to_string((d, m, s), precision=precision, pad=pad, + sep=sep, fields=fields) + + +def angular_separation(lon1, lat1, lon2, lat2): + """ + Angular separation between two points on a sphere. + + Parameters + ---------- + lon1, lat1, lon2, lat2 : `Angle`, `~astropy.units.Quantity` or float + Longitude and latitude of the two points. Quantities should be in + angular units; floats in radians. + + Returns + ------- + angular separation : `~astropy.units.Quantity` or float + Type depends on input; `Quantity` in angular units, or float in + radians. + + Notes + ----- + The angular separation is calculated using the Vincenty formula [1]_, + which is slighly more complex and computationally expensive than + some alternatives, but is stable at at all distances, including the + poles and antipodes. + + .. [1] http://en.wikipedia.org/wiki/Great-circle_distance + """ + + sdlon = np.sin(lon2 - lon1) + cdlon = np.cos(lon2 - lon1) + slat1 = np.sin(lat1) + slat2 = np.sin(lat2) + clat1 = np.cos(lat1) + clat2 = np.cos(lat2) + + num1 = clat2 * sdlon + num2 = clat1 * slat2 - slat1 * clat2 * cdlon + denominator = slat1 * slat2 + clat1 * clat2 * cdlon + + return np.arctan2(np.sqrt(num1 ** 2 + num2 ** 2), denominator) + + +def position_angle(lon1, lat1, lon2, lat2): + """ + Position Angle (East of North) between two points on a sphere. + + Parameters + ---------- + lon1, lat1, lon2, lat2 : `Angle`, `~astropy.units.Quantity` or float + Longitude and latitude of the two points. Quantities should be in + angular units; floats in radians. + + Returns + ------- + pa : `~astropy.coordinates.Angle` + The (positive) position angle of the vector pointing from position 1 to + position 2. If any of the angles are arrays, this will contain an array + following the appropriate `numpy` broadcasting rules. + + """ + from .angles import Angle + + deltalon = lon2 - lon1 + colat = np.cos(lat2) + + x = np.sin(lat2) * np.cos(lat1) - colat * np.sin(lat1) * np.cos(deltalon) + y = np.sin(deltalon) * colat + + return Angle(np.arctan2(y, x)).wrap_at(360*u.deg) diff --git a/astropy/coordinates/angles.py b/astropy/coordinates/angles.py new file mode 100644 index 0000000..72353c3 --- /dev/null +++ b/astropy/coordinates/angles.py @@ -0,0 +1,768 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This module contains the fundamental classes used for representing +coordinates in astropy. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import math +from collections import namedtuple + +import numpy as np + +from ..extern import six +from . import angle_utilities as util +from .. import units as u +from ..utils import deprecated, isiterable + + +__all__ = ['Angle', 'Latitude', 'Longitude'] + + +TWOPI = math.pi * 2.0 # no need to calculate this all the time + +# these are used by the `hms` and `dms` attributes +hms_tuple = namedtuple('hms_tuple', ('h', 'm', 's')) +dms_tuple = namedtuple('dms_tuple', ('d', 'm', 's')) +signed_dms_tuple = namedtuple('signed_dms_tuple', ('sign', 'd', 'm', 's')) + + +class Angle(u.Quantity): + """ + One or more angular value(s) with units equivalent to radians or degrees. + + An angle can be specified either as an array, scalar, tuple (see + below), string, `~astropy.units.Quantity` or another + :class:`~astropy.coordinates.Angle`. + + The input parser is flexible and supports a variety of formats:: + + Angle('10.2345d') + Angle(['10.2345d', '-20d']) + Angle('1:2:30.43 degrees') + Angle('1 2 0 hours') + Angle(np.arange(1, 8), unit=u.deg) + Angle(u'1°2′3″') + Angle('1d2m3.4s') + Angle('-1h2m3s') + Angle((-1, 2, 3), unit=u.deg) # (d, m, s) + Angle(10.2345 * u.deg) + Angle(Angle(10.2345 * u.deg)) + + Parameters + ---------- + angle : `~numpy.array`, scalar, `~astropy.units.Quantity`, :class:`~astropy.coordinates.Angle` + The angle value. If a tuple, will be interpreted as ``(h, m, + s)`` or ``(d, m, s)`` depending on ``unit``. If a string, it + will be interpreted following the rules described above. + + If ``angle`` is a sequence or array of strings, the resulting + values will be in the given ``unit``, or if `None` is provided, + the unit will be taken from the first given value. + + unit : `~astropy.units.UnitBase`, str, optional + The unit of the value specified for the angle. This may be + any string that `~astropy.units.Unit` understands, but it is + better to give an actual unit object. Must be an angular + unit. + + dtype : `~numpy.dtype`, optional + See `~astropy.units.Quantity`. + + copy : bool, optional + See `~astropy.units.Quantity`. + + Raises + ------ + `~astropy.units.UnitsError` + If a unit is not provided or it is not an angular unit. + """ + _include_easy_conversion_members = True + + def __new__(cls, angle, unit=None, dtype=None, copy=True): + unit = cls._convert_unit_to_angle_unit(unit) + if (unit is not None and not unit.is_equivalent(u.radian)): + raise u.UnitsError("Requested unit {0} is not convertible to an " + "angle".format(unit)) + + if isinstance(angle, u.Quantity): + # This includes Angle subclasses as well + if unit is not None: + angle = angle.to(unit).value + else: + unit = angle.unit + unit = cls._convert_unit_to_angle_unit(unit) + if not unit.is_equivalent(u.radian): + raise u.UnitsError( + "Given quantity {0} is not convertible to an " + "angle".format(angle)) + + angle = angle.value + else: + # this does nothing if it's not a tuple + angle = cls._tuple_to_float(angle, unit) + + if isinstance(angle, six.string_types): + angle, new_unit = util.parse_angle(angle, unit) + if new_unit is not None and unit is None: + unit = new_unit + angle = cls._tuple_to_float(angle, unit) + if new_unit is not None and unit is not None and new_unit != unit: + angle = new_unit.to(unit, angle) + elif (isiterable(angle) and + not (isinstance(angle, np.ndarray) and + angle.dtype.kind not in 'SUVO')): + angle = [Angle(x, unit) for x in angle] + if unit is None: + unit = angle[0].unit + angle = [x.to(unit) for x in angle] + + self = super(Angle, cls).__new__(cls, angle, unit, dtype=dtype, + copy=copy) + + if self.unit is u.dimensionless_unscaled: + raise u.UnitsError("No unit was given - must be some kind of angle") + elif not self.unit.is_equivalent(u.radian): + raise u.UnitsError("Unit {0} is not an angle".format(self.unit)) + + if self.dtype.kind not in 'iuf': + raise TypeError("Unsupported dtype for " + "Angle:'{0}'".format(angle.dtype)) + + return self + + @staticmethod + def _tuple_to_float(angle, unit): + """ + Converts an angle represented as a 3-tuple into a floating + point number in the given unit. + """ + if isinstance(angle, tuple): + # TODO: Numpy array of tuples? + if unit is u.hourangle: + util.check_hms_ranges(*angle) + angle = util.hms_to_hours(*angle) + elif unit is u.degree: + angle = util.dms_to_degrees(*angle) + else: + raise u.UnitsError( + "Can not parse '{0}' as unit '{1}'".format( + angle, unit)) + return angle + + @staticmethod + def _convert_unit_to_angle_unit(unit): + if unit is not None: + unit = u.Unit(unit) + + if unit is u.hour: + unit = u.hourangle + return unit + + def __quantity_subclass__(self, unit): + unit = self._convert_unit_to_angle_unit(unit) + if unit.is_equivalent(u.radian): + return Angle, True + else: + return super(Angle, self).__quantity_subclass__(unit)[0], False + + @property + def hour(self): + """ + The angle's value in hours (read-only property). + """ + return self.hourangle + + @property + def hms(self): + """ + The angle's value in hours, as a named tuple with ``(h, m, s)`` + members. (This is a read-only property.) + """ + return hms_tuple(*util.hours_to_hms(self.hourangle)) + + @property + def dms(self): + """ + The angle's value in degrees, as a named tuple with ``(d, m, s)`` + members. (This is a read-only property.) + """ + return dms_tuple(*util.degrees_to_dms(self.degree)) + + @property + def signed_dms(self): + """ + The angle's value in degrees, as a named tuple with ``(sign, d, m, s)`` + members. The ``d``, ``m``, ``s`` are thus always positive, and the sign of + the angle is given by ``sign``. (This is a read-only property.) + + This is primarily intented for use with `dms` to generate string + representations of coordinates that are correct for negative angles. + """ + return signed_dms_tuple(np.sign(self.degree), + *util.degrees_to_dms(np.abs(self.degree))) + + def to_string(self, unit=None, decimal=False, sep='fromunit', + precision=None, alwayssign=False, pad=False, + fields=3, format=None): + """ A string representation of the angle. + + Parameters + ---------- + unit : `~astropy.units.UnitBase`, optional + Specifies the unit. Must be an angular unit. If not + provided, the unit used to initialize the angle will be + used. + + decimal : bool, optional + If `True`, a decimal respresentation will be used, otherwise + the returned string will be in sexagesimal form. + + sep : str, optional + The separator between numbers in a sexagesimal + representation. E.g., if it is ':', the result is + ``'12:41:11.1241'``. Also accepts 2 or 3 separators. E.g., + ``sep='hms'`` would give the result ``'12h41m11.1241s'``, or + sep='-:' would yield ``'11-21:17.124'``. Alternatively, the + special string 'fromunit' means 'dms' if the unit is + degrees, or 'hms' if the unit is hours. + + precision : int, optional + The level of decimal precision. If ``decimal`` is `True`, + this is the raw precision, otherwise it gives the + precision of the last place of the sexagesimal + representation (seconds). If `None`, or not provided, the + number of decimal places is determined by the value, and + will be between 0-8 decimal places as required. + + alwayssign : bool, optional + If `True`, include the sign no matter what. If `False`, + only include the sign if it is negative. + + pad : bool, optional + If `True`, include leading zeros when needed to ensure a + fixed number of characters for sexagesimal representation. + + fields : int, optional + Specifies the number of fields to display when outputting + sexagesimal notation. For example: + + - fields == 1: ``'5d'`` + - fields == 2: ``'5d45m'`` + - fields == 3: ``'5d45m32.5s'`` + + By default, all fields are displayed. + + format : str, optional + The format of the result. If not provided, an unadorned + string is returned. Supported values are: + + - 'latex': Return a LaTeX-formatted string + + - 'unicode': Return a string containing non-ASCII unicode + characters, such as the degree symbol + + Returns + ------- + strrepr : str + A string representation of the angle. + + """ + if unit is None: + unit = self.unit + unit = self._convert_unit_to_angle_unit(unit) + + separators = { + None: { + u.degree: 'dms', + u.hourangle: 'hms'}, + 'latex': { + u.degree: [r'^\circ', r'{}^\prime', r'{}^{\prime\prime}'], + u.hourangle: [r'^\mathrm{h}', r'^\mathrm{m}', r'^\mathrm{s}']}, + 'unicode': { + u.degree: '°′″', + u.hourangle: 'ʰᵐˢ'} + } + + if sep == 'fromunit': + if format not in separators: + raise ValueError("Unknown format '{0}'".format(format)) + seps = separators[format] + if unit in seps: + sep = seps[unit] + + # Create an iterator so we can format each element of what + # might be an array. + if unit is u.degree: + if decimal: + values = self.degree + if precision is not None: + func = ("{0:0." + str(precision) + "f}").format + else: + func = '{0:g}'.format + else: + if sep == 'fromunit': + sep = 'dms' + values = self.degree + func = lambda x: util.degrees_to_string( + x, precision=precision, sep=sep, pad=pad, + fields=fields) + + elif unit is u.hourangle: + if decimal: + values = self.hour + if precision is not None: + func = ("{0:0." + str(precision) + "f}").format + else: + func = '{0:g}'.format + else: + if sep == 'fromunit': + sep = 'hms' + values = self.hour + func = lambda x: util.hours_to_string( + x, precision=precision, sep=sep, pad=pad, + fields=fields) + + elif unit.is_equivalent(u.radian): + if decimal: + values = self.to(unit).value + if precision is not None: + func = ("{0:1." + str(precision) + "f}").format + else: + func = "{0:g}".format + elif sep == 'fromunit': + values = self.to(unit).value + unit_string = unit.to_string(format=format) + if format == 'latex': + unit_string = unit_string[1:-1] + + if precision is not None: + def plain_unit_format(val): + return ("{0:0." + str(precision) + "f}{1}").format( + val, unit_string) + func = plain_unit_format + else: + def plain_unit_format(val): + return "{0:g}{1}".format(val, unit_string) + func = plain_unit_format + else: + raise ValueError( + "'{0}' can not be represented in sexagesimal " + "notation".format( + unit.name)) + + else: + raise u.UnitsError( + "The unit value provided is not an angular unit.") + + def do_format(val): + s = func(float(val)) + if alwayssign and not s.startswith('-'): + s = '+' + s + if format == 'latex': + s = '${0}$'.format(s) + return s + + format_ufunc = np.vectorize(do_format, otypes=[np.object]) + result = format_ufunc(values) + if result.ndim == 0: + result = result[()] + return result + + def wrap_at(self, wrap_angle, inplace=False): + """ + Wrap the `Angle` object at the given ``wrap_angle``. + + This method forces all the angle values to be within a contiguous + 360 degree range so that ``wrap_angle - 360d <= angle < + wrap_angle``. By default a new Angle object is returned, but if the + ``inplace`` argument is `True` then the `Angle` object is wrapped in + place and nothing is returned. + + For instance:: + + >>> from astropy.coordinates import Angle + >>> import astropy.units as u + >>> a = Angle([-20.0, 150.0, 350.0] * u.deg) + + >>> a.wrap_at(360 * u.deg).degree # Wrap into range 0 to 360 degrees + array([ 340., 150., 350.]) + + >>> a.wrap_at('180d', inplace=True) # Wrap into range -180 to 180 degrees + >>> a.degree + array([ -20., 150., -10.]) + + Parameters + ---------- + wrap_angle : str, `Angle`, angular `~astropy.units.Quantity` + Specifies a single value for the wrap angle. This can be any + object that can initialize an `Angle` object, e.g. ``'180d'``, + ``180 * u.deg``, or ``Angle(180, unit=u.deg)``. + + inplace : bool + If `True` then wrap the object in place instead of returning + a new `Angle` + + Returns + ------- + out : Angle or `None` + If ``inplace is False`` (default), return new `Angle` object + with angles wrapped accordingly. Otherwise wrap in place and + return `None`. + """ + wrap_angle = Angle(wrap_angle) # Convert to an Angle + wrapped = np.mod(self - wrap_angle, 360.0 * u.deg) - (360.0 * u.deg - wrap_angle) + + if inplace: + self[()] = wrapped + else: + return wrapped + + def is_within_bounds(self, lower=None, upper=None): + """ + Check if all angle(s) satisfy ``lower <= angle < upper`` + + If ``lower`` is not specified (or `None`) then no lower bounds check is + performed. Likewise ``upper`` can be left unspecified. For example:: + + >>> from astropy.coordinates import Angle + >>> import astropy.units as u + >>> a = Angle([-20, 150, 350] * u.deg) + >>> a.is_within_bounds('0d', '360d') + False + >>> a.is_within_bounds(None, '360d') + True + >>> a.is_within_bounds(-30 * u.deg, None) + True + + Parameters + ---------- + lower : str, `Angle`, angular `~astropy.units.Quantity`, `None` + Specifies lower bound for checking. This can be any object + that can initialize an `Angle` object, e.g. ``'180d'``, + ``180 * u.deg``, or ``Angle(180, unit=u.deg)``. + upper : str, `Angle`, angular `~astropy.units.Quantity`, `None` + Specifies upper bound for checking. This can be any object + that can initialize an `Angle` object, e.g. ``'180d'``, + ``180 * u.deg``, or ``Angle(180, unit=u.deg)``. + + Returns + ------- + is_within_bounds : bool + `True` if all angles satisfy ``lower <= angle < upper`` + """ + ok = True + if lower is not None: + ok &= np.all(Angle(lower) <= self) + if ok and upper is not None: + ok &= np.all(self < Angle(upper)) + return bool(ok) + + @deprecated("0.3", name="format", alternative="to_string") + def format(self, unit=u.degree, decimal=False, sep='fromunit', precision=5, + alwayssign=False, pad=False): + return self.to_string( + unit=unit, decimal=decimal, sep=sep, precision=precision, + alwayssign=alwayssign, pad=pad) + + def __str__(self): + return str(self.to_string()) + + def _repr_latex_(self): + return str(self.to_string(format='latex')) + + +class Latitude(Angle): + """ + Latitude-like angle(s) which must be in the range -90 to +90 deg. + + A Latitude object is distinguished from a pure + :class:`~astropy.coordinates.Angle` by virtue of being constrained + so that:: + + -90.0 * u.deg <= angle(s) <= +90.0 * u.deg + + Any attempt to set a value outside that range will result in a + `~.exceptions.ValueError`. + + The input angle(s) can be specified either as an array, list, + scalar, tuple (see below), string, + :class:`~astropy.units.Quantity` or another + :class:`~astropy.coordinates.Angle`. + + The input parser is flexible and supports all of the input formats + supported by :class:`~astropy.coordinates.Angle`. + + Parameters + ---------- + angle : array, list, scalar, `~astropy.units.Quantity`, `Angle`. The + angle value(s). If a tuple, will be interpreted as ``(h, m, s)`` or + ``(d, m, s)`` depending on ``unit``. If a string, it will be + interpreted following the rules described for + :class:`~astropy.coordinates.Angle`. + + If ``angle`` is a sequence or array of strings, the resulting + values will be in the given ``unit``, or if `None` is provided, + the unit will be taken from the first given value. + + unit : :class:`~astropy.units.UnitBase`, str, optional + The unit of the value specified for the angle. This may be + any string that `~astropy.units.Unit` understands, but it is + better to give an actual unit object. Must be an angular + unit. + + Raises + ------ + `~astropy.units.UnitsError` + If a unit is not provided or it is not an angular unit. + `TypeError` + If the angle parameter is an instance of :class:`~astropy.coordinates.Longitude`. + """ + def __new__(cls, angle, unit=None, **kwargs): + # Forbid creating a Lat from a Long. + if isinstance(angle, Longitude): + raise TypeError("A Latitude angle cannot be created from a Longitude angle") + self = super(Latitude, cls).__new__(cls, angle, unit=unit, **kwargs) + self._validate_angles() + return self + + def _validate_angles(self, angles=None): + """Check that angles are between -90 and 90 degrees. + If not given, the check is done on the object iself""" + # Convert the lower and upper bounds to the "native" unit of + # this angle. This limits multiplication to two values, + # rather than the N values in `self.value`. Also, the + # comparison is performed on raw arrays, rather than Quantity + # objects, for speed. + if angles is None: + angles = self + lower = u.degree.to(angles.unit, -90.0) + upper = u.degree.to(angles.unit, 90.0) + if np.any(angles.value < lower) or np.any(angles.value > upper): + raise ValueError('Latitude angle(s) must be within -90 deg <= angle <= 90 deg, ' + 'got {0}'.format(angles.to(u.degree))) + + def __setitem__(self, item, value): + # Forbid assigning a Long to a Lat. + if isinstance(value, Longitude): + raise TypeError("A Longitude angle cannot be assigned to a Latitude angle") + # first check bounds + self._validate_angles(value) + super(Latitude, self).__setitem__(item, value) + + # Any calculation should drop to Angle + def __array_wrap__(self, obj, context=None): + obj = super(Angle, self).__array_wrap__(obj, context=context) + + if isinstance(obj, Angle): + return obj.view(Angle) + + return obj + + +class Longitude(Angle): + """ + Longitude-like angle(s) which are wrapped within a contiguous 360 degree range. + + A ``Longitude`` object is distinguished from a pure + :class:`~astropy.coordinates.Angle` by virtue of a ``wrap_angle`` + property. The ``wrap_angle`` specifies that all angle values + represented by the object will be in the range:: + + wrap_angle - 360 * u.deg <= angle(s) < wrap_angle + + The default ``wrap_angle`` is 360 deg. Setting ``wrap_angle=180 * + u.deg`` would instead result in values between -180 and +180 deg. + Setting the ``wrap_angle`` attribute of an existing ``Longitude`` + object will result in re-wrapping the angle values in-place. + + The input angle(s) can be specified either as an array, list, + scalar, tuple, string, :class:`~astropy.units.Quantity` + or another :class:`~astropy.coordinates.Angle`. + + The input parser is flexible and supports all of the input formats + supported by :class:`~astropy.coordinates.Angle`. + + Parameters + ---------- + angle : array, list, scalar, `~astropy.units.Quantity`, + :class:`~astropy.coordinates.Angle` The angle value(s). If a tuple, + will be interpreted as ``(h, m s)`` or ``(d, m, s)`` depending + on ``unit``. If a string, it will be interpreted following the + rules described for :class:`~astropy.coordinates.Angle`. + + If ``angle`` is a sequence or array of strings, the resulting + values will be in the given ``unit``, or if `None` is provided, + the unit will be taken from the first given value. + + unit : :class:`~astropy.units.UnitBase`, str, optional + The unit of the value specified for the angle. This may be + any string that `~astropy.units.Unit` understands, but it is + better to give an actual unit object. Must be an angular + unit. + + wrap_angle : :class:`~astropy.coordinates.Angle` or equivalent, or None + Angle at which to wrap back to ``wrap_angle - 360 deg``. + If ``None`` (default), it will be taken to be 360 deg unless ``angle`` + has a ``wrap_angle`` attribute already (i.e., is a ``Longitude``), + in which case it will be taken from there. + + Raises + ------ + `~astropy.units.UnitsError` + If a unit is not provided or it is not an angular unit. + `TypeError` + If the angle parameter is an instance of :class:`~astropy.coordinates.Latitude`. + """ + + _wrap_angle = None + + def __new__(cls, angle, unit=None, wrap_angle=None, **kwargs): + # Forbid creating a Long from a Lat. + if isinstance(angle, Latitude): + raise TypeError("A Longitude angle cannot be created from a Latitude angle") + self = super(Longitude, cls).__new__(cls, angle, unit=unit, **kwargs) + self.wrap_angle = (wrap_angle if wrap_angle is not None + else getattr(angle, 'wrap_angle', 360 * u.deg)) + return self + + def __setitem__(self, item, value): + # Forbid assigning a Lat to a Long. + if isinstance(value, Latitude): + raise TypeError("A Latitude angle cannot be assigned to a Longitude angle") + super(Longitude, self).__setitem__(item, value) + self._wrap_internal() + + def _wrap_internal(self): + """ + Wrap the internal values in the Longitude object. Using the + :meth:`~astropy.coordinates.Angle.wrap_at` method causes + recursion. + """ + # Convert the wrap angle and 360 degrees to the native unit of + # this Angle, then do all the math on raw Numpy arrays rather + # than Quantity objects for speed. + a360 = u.degree.to(self.unit, 360.0) + wrap_angle = self.wrap_angle.to(self.unit).value + wrap_angle_floor = wrap_angle - a360 + self_angle = self.value + # Do the wrapping, but only if any angles need to be wrapped + if np.any(self_angle < wrap_angle_floor) or np.any(self_angle >= wrap_angle): + wrapped = np.mod(self_angle - wrap_angle, a360) + wrap_angle_floor + value = u.Quantity(wrapped, self.unit) + super(Longitude, self).__setitem__((), value) + + @property + def wrap_angle(self): + return self._wrap_angle + + @wrap_angle.setter + def wrap_angle(self, value): + self._wrap_angle = Angle(value) + self._wrap_internal() + + def __array_finalize__(self, obj): + super(Longitude, self).__array_finalize__(obj) + self._wrap_angle = getattr(obj, '_wrap_angle', None) + + # Any calculation should drop to Angle + def __array_wrap__(self, obj, context=None): + obj = super(Angle, self).__array_wrap__(obj, context=context) + + if isinstance(obj, Angle): + return obj.view(Angle) + + return obj + +#<----------------------------------Rotations---------------------------------> + + +def rotation_matrix(angle, axis='z', unit=None): + """ + Generate a 3x3 cartesian rotation matrix in for rotation about + a particular axis. + + Parameters + ---------- + angle : convertible to `Angle` + The amount of rotation this matrix should represent. + + axis : str or 3-sequence + Either ``'x'``, ``'y'``, ``'z'``, or a (x,y,z) specifying an + axis to rotate about. If ``'x'``, ``'y'``, or ``'z'``, the + rotation sense is counterclockwise looking down the + axis + (e.g. positive rotations obey left-hand-rule). + + unit : UnitBase, optional + If ``angle`` does not have associated units, they are in this + unit. If neither are provided, it is assumed to be degrees. + + Returns + ------- + rmat: `numpy.matrix` + A unitary rotation matrix. + """ + if unit is None: + unit = u.degree + + angle = Angle(angle, unit=unit) + + s = np.sin(angle) + c = np.cos(angle) + + # use optimized implementations for x/y/z + if axis == 'z': + return np.matrix(((c, s, 0), + (-s, c, 0), + (0, 0, 1))) + elif axis == 'y': + return np.matrix(((c, 0, -s), + (0, 1, 0), + (s, 0, c))) + elif axis == 'x': + return np.matrix(((1, 0, 0), + (0, c, s), + (0, -s, c))) + else: + axis = np.asarray(axis) + axis = axis / np.sqrt((axis * axis).sum()) + + R = np.diag((c, c, c)) + R += np.outer(axis, axis) * (1. - c) + axis *= s + R += np.array([[0., axis[2], -axis[1]], + [-axis[2], 0., axis[0]], + [axis[1], -axis[0], 0.]]) + return R.view(np.matrix) + + +def angle_axis(matrix): + """ + Computes the angle of rotation and the rotation axis for a given rotation + matrix. + + Parameters + ---------- + matrix : array-like + A 3 x 3 unitary rotation matrix. + + Returns + ------- + angle : `Angle` + The angle of rotation for this matrix. + + axis : array (length 3) + The (normalized) axis of rotation for this matrix. + """ + m = np.asmatrix(matrix) + if m.shape != (3, 3): + raise ValueError('matrix is not 3x3') + + axis = np.array((m[2, 1] - m[1, 2], m[0, 2] - m[2, 0], m[1, 0] - m[0, 1])) + r = np.sqrt((axis * axis).sum()) + angle = np.arctan2(r, np.trace(m) - 1) + + return Angle(angle, u.radian), -axis / r diff --git a/astropy/coordinates/baseframe.py b/astropy/coordinates/baseframe.py new file mode 100644 index 0000000..1ef7f04 --- /dev/null +++ b/astropy/coordinates/baseframe.py @@ -0,0 +1,1003 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" +Framework and base classes for coordinate frames/"low-level" coordinate classes. +""" +from __future__ import (absolute_import, unicode_literals, division, + print_function) + +# Standard library +import inspect +import warnings +from copy import deepcopy +from collections import namedtuple + +# Dependencies + +# Project +from ..utils.compat.misc import override__dir__ +from ..extern import six +from ..utils.compat.odict import OrderedDict +from ..utils.exceptions import AstropyDeprecationWarning +from .. import units as u +from ..utils import OrderedDict +from .transformations import TransformGraph +from .representation import (BaseRepresentation, CartesianRepresentation, + SphericalRepresentation, UnitSphericalRepresentation, + REPRESENTATION_CLASSES) + +__all__ = ['BaseCoordinateFrame', 'frame_transform_graph', 'GenericFrame', 'FrameAttribute', + 'TimeFrameAttribute', 'RepresentationMapping'] + +# the graph used for all transformations between frames +frame_transform_graph = TransformGraph() + + +def _get_repr_cls(value): + """Return a valid representation class from ``value`` or raise exception.""" + if value in REPRESENTATION_CLASSES: + value = REPRESENTATION_CLASSES[value] + try: + assert issubclass(value, BaseRepresentation) # value might not be a class, so use try + except: + raise ValueError('representation is {0!r} but must be a BaseRepresentation class ' + ' or one of the string aliases {1}' + .format(value, list(REPRESENTATION_CLASSES))) + return value + + +class FrameMeta(type): + def __new__(cls, name, parents, clsdct): + if 'default_representation' in clsdct: + def_repr = clsdct.pop('default_representation') + found_def_repr = True + else: + def_repr = None + found_def_repr = False + + if 'frame_specific_representation_info' in clsdct: + repr_info = clsdct.pop('frame_specific_representation_info') + found_repr_info = True + else: + repr_info = None + found_repr_info = False + + # somewhat hacky, but this is the best way to get the MRO according to + # https://mail.python.org/pipermail/python-list/2002-December/167861.html + mro = super(FrameMeta, cls).__new__(cls, name, parents, clsdct).__mro__ + parent_clsdcts = [c.__dict__ for c in mro] + + #now look through the whole MRO for the class attributes, raw + # for frame_attr_names, and leading underscore for others + for clsdcti in parent_clsdcts: + if not found_def_repr and '_default_representation' in clsdcti: + def_repr = clsdcti['_default_representation'] + found_def_repr = True + if not found_repr_info and '_frame_specific_representation_info' in clsdcti: + repr_info = clsdcti['_frame_specific_representation_info'] + found_repr_info = True + + if found_def_repr and found_repr_info: + break + else: + raise ValueError('Could not find all expected BaseCoordinateFrame ' + 'class attributes. Are you mis-using FrameMeta?') + + # Make read-only properties for the frame class attributes that should + # be read-only to make them immutable after creation. + # We copy attributes instead of linking to make sure there's no + # accidental cross-talk between classes + clsdct['_default_representation'] = def_repr + clsdct['default_representation'] = FrameMeta.readonly_prop_factory('default_representation') + + clsdct['_frame_specific_representation_info'] = deepcopy(repr_info) + clsdct['frame_specific_representation_info'] = FrameMeta.readonly_prop_factory('frame_specific_representation_info') + + # now set the frame name as lower-case class name, if it isn't explicit + if 'name' not in clsdct: + clsdct['name'] = name.lower() + + return super(FrameMeta, cls).__new__(cls, name, parents, clsdct) + + @staticmethod + def readonly_prop_factory(attrnm): + def getter(self): + return getattr(self, '_' + attrnm) + return property(getter) + + +class FrameAttribute(object): + """A non-mutable data descriptor to hold a frame attribute. + + This class must be used to define frame attributes (e.g. ``equinox`` or + ``obstime``) that are included in a frame class definition. + + Examples + -------- + The `~astropy.coordinates.FK4` class uses the following class attributes:: + + class FK4(BaseCoordinateFrame): + equinox = TimeFrameAttribute(default=_EQUINOX_B1950) + obstime = TimeFrameAttribute(default=None, secondary_attribute='equinox') + + This means that ``equinox`` and ``obstime`` are available to be set as + keyword arguments when creating an ``FK4`` class instance and are then + accessible as instance attributes. The instance value for the attribute + must be stored in ``'_' + `` by the frame ``__init__`` + method. + + Note in this example that ``equinox`` and ``obstime`` are time attributes + and use the ``TimeAttributeFrame`` class. This subclass overrides the + ``convert_input`` method to validate and convert inputs into a ``Time`` + object. + + Parameters + ---------- + default : object + Default value for the attribute if not provided + secondary_attribute : str + Name of a secondary instance attribute which supplies the value + if ``default is None`` and no value was supplied during initialization. + + Returns + ------- + frame_attr : descriptor + A new data descriptor to hold a frame attribute + """ + + _nextid = 1 + """ + Used to ascribe some ordering to FrameAttribute instances so that the + order they were assigned in a class body can be determined. + """ + + def __init__(self, default=None, secondary_attribute=''): + self.default = default + self.secondary_attribute = secondary_attribute + + # Use FrameAttribute._nextid explicitly so that subclasses of + # FrameAttribute use the same counter + self._order = FrameAttribute._nextid + FrameAttribute._nextid += 1 + + def convert_input(self, value): + """ + Validate the input ``value`` and convert to expected attribute class. + + The base method here does nothing, but subclasses can implement this + as needed. The method should catch any internal exceptions and raise + ValueError with an informative message. + + The method returns the validated input along with a boolean that indicates + whether the input value was actually converted. If the input value was + already the correct type then the ``converted`` return value should be + ``False``. + + Parameters + ---------- + value : object + Input value to be converted. + + Returns + ------- + output_value + The ``value`` converted to the correct type (or just ``value`` if + ``converted`` is False) + converted : bool + True if the conversion was actually performed, False otherwise. + + Raises + ------ + ValueError + If the input is not valid for this attribute. + """ + return value, False + + def __get__(self, instance, frame_cls=None): + if not hasattr(self, 'name'): + # Find attribute name of self by finding this object in the frame class + # which is requesting this attribute or any of its superclasses. + for mro_cls in frame_cls.__mro__: + for name, val in mro_cls.__dict__.items(): + if val is self: + self.name = name + break + if hasattr(self, 'name'): # Can't nicely break out of two loops + break + else: + # Cannot think of a way to actually raise this exception. This instance + # containing this code must be in the class dict in order to get excecuted + # by attribute access. But leave this here just in case... + raise AttributeError('Unexpected inability to locate descriptor') + + out = None + + if instance is not None: + out = getattr(instance, '_' + self.name, None) + if out is None and self.default is None: + out = getattr(instance, self.secondary_attribute, None) + + if out is None: + out = self.default + + out, converted = self.convert_input(out) + if instance is not None and converted: + setattr(instance, '_' + self.name, out) + + return out + + def __set__(self, instance, val): + raise AttributeError('Cannot set frame attribute') + + +class TimeFrameAttribute(FrameAttribute): + """ + Frame attribute descriptor for quantities that are Time objects. + See the `~astropy.coordinates.FrameAttribute` API doc for further + information. + + Parameters + ---------- + default : object + Default value for the attribute if not provided + secondary_attribute : str + Name of a secondary instance attribute which supplies the value + if ``default is None`` and no value was supplied during initialization. + + Returns + ------- + frame_attr : descriptor + A new data descriptor to hold a frame attribute + """ + def convert_input(self, value): + """ + Convert input value to a Time object and validate by running through the + Time constructor. Also check that the input was a scalar. + + Parameters + ---------- + value : object + Input value to be converted. + + Returns + ------- + out, converted : correctly-typed object, boolean + Tuple consisting of the correctly-typed object and a boolean which + indicates if conversion was actually performed. + + Raises + ------ + ValueError + If the input is not valid for this attribute. + """ + from ..time import Time + + if value is None: + return None, False + + if isinstance(value, Time): + out = value + converted = False + else: + try: + out = Time(value) + except Exception as err: + raise ValueError('Invalid time input {0}={1!r}\n{2}' + .format(self.name, value, err)) + converted = True + + if not out.isscalar: + raise ValueError('Time input {0}={1!r} must be a single (scalar) value' + .format(self.name, value)) + + return out, converted + + +class RepresentationMapping(namedtuple('RepresentationMapping', + ['reprname', 'framename', 'defaultunit'])): + """ + This `~collections.namedtuple` is used with the + ``frame_specific_representation_info`` attribute to tell frames what + attribute names (and default units) to use for a particular representation. + ``reprname`` and ``framename`` should be strings, while ``defaultunit`` can + be either an astropy unit, the string ``'recommended'`` (to use whatever the + representation's ``recommended_units`` is), or None (to indicate that no + unit mapping should be done). + """ + def __new__(cls, reprname, framename, defaultunit='recommended'): + # this trick just provides some defaults + return super(RepresentationMapping, cls).__new__(cls, reprname, + framename, defaultunit) + + +@six.add_metaclass(FrameMeta) +class BaseCoordinateFrame(object): + """ + The base class for coordinate frames. + + This class is intended to be subclassed to create instances of specific + systems. Subclasses can implement the following attributes: + + * `default_representation` + A subclass of `~astropy.coordinates.BaseRepresentation` that will be + treated as the default representation of this frame. This is the + representation assumed by default when the frame is created. + + * `~astropy.coordinates.FrameAttribute` class attributes + Frame attributes such as ``FK4.equinox`` or ``FK4.obstime`` are defined + using a descriptor class. See the narrative documentation or + built-in classes code for details. + + * `frame_specific_representation_info` + A dictionary mapping the name or class of a representation to a list + of `~astropy.coordinates.RepresentationMapping` objects that tell what + names and default units should be used on this frame for the components + of that representation. + + """ + + default_representation = None + frame_specific_representation_info = {} # specifies special names/units for representation attributes + + # This __new__ provides for backward-compatibility with pre-0.4 API. + # TODO: remove in 1.0 + def __new__(cls, *args, **kwargs): + + # Only do backward-compatibility if frame is previously defined one + frame_name = cls.__name__.lower() + if frame_name not in ['altaz', 'fk4', 'fk4noeterms', 'fk5', 'galactic', 'icrs']: + return super(BaseCoordinateFrame, cls).__new__(cls) + + use_skycoord = False + + if len(args) > 1 or (len(args) == 1 and not isinstance(args[0], BaseRepresentation)): + for arg in args: + if (not isinstance(arg, u.Quantity) + and not isinstance(arg, BaseRepresentation)): + msg = ('Initializing frame classes like "{0}" using string ' + 'or other non-Quantity arguments is deprecated, and ' + 'will be removed in the next version of Astropy. ' + 'Instead, you probably want to use the SkyCoord ' + 'class with the "system={1}" keyword, or if you ' + 'really want to use the low-level frame classes, ' + 'create it with an Angle or Quantity.') + + warnings.warn(msg.format(cls.__name__, + cls.__name__.lower()), + AstropyDeprecationWarning) + use_skycoord = True + break + + if 'unit' in kwargs and not use_skycoord: + warnings.warn("Initializing frames using the ``unit`` argument is " + "now deprecated. Use SkyCoord or pass Quantity " + " instances to frames instead.", AstropyDeprecationWarning) + use_skycoord = True + + if not use_skycoord: + representation = _get_repr_cls(kwargs.get('representation', + cls._default_representation)) + for key in cls._get_representation_info()[representation]['names']: + if key in kwargs: + if not isinstance(kwargs[key], u.Quantity): + warnings.warn("Initializing frames using non-Quantity " + "arguments is now deprecated. Use " + "SkyCoord or pass Quantity instances " + "instead.", AstropyDeprecationWarning) + use_skycoord = True + break + + if use_skycoord: + kwargs['frame'] = frame_name + from .sky_coordinate import SkyCoord + return SkyCoord(*args, **kwargs) + else: + return super(BaseCoordinateFrame, cls).__new__(cls) + + def __init__(self, *args, **kwargs): + self._attr_names_with_defaults = [] + + if 'representation' in kwargs: + self.representation = kwargs.pop('representation') + + representation_data = None # if not set below, this is a frame with no data + + for fnm, fdefault in self.get_frame_attr_names().items(): + # Read-only frame attributes are defined as FrameAttribue + # descriptors which are not settable, so set 'real' attributes as + # the name prefaced with an underscore. + + if fnm in kwargs: + value = kwargs.pop(fnm) + setattr(self, '_' + fnm, value) + else: + setattr(self, '_' + fnm, fdefault) + self._attr_names_with_defaults.append(fnm) + + # Validate input by getting the attribute here. + getattr(self, fnm) + + pref_rep = self.representation + + args = list(args) # need to be able to pop them + if (len(args) > 0) and (isinstance(args[0], BaseRepresentation) or + args[0] is None): + representation_data = args.pop(0) + if len(args) > 0: + raise TypeError('Cannot create a frame with both a ' + 'representation and other positional arguments') + + elif self.representation: + repr_kwargs = {} + for nmkw, nmrep in self.representation_component_names.items(): + if len(args) > 0: + #first gather up positional args + repr_kwargs[nmrep] = args.pop(0) + elif nmkw in kwargs: + repr_kwargs[nmrep] = kwargs.pop(nmkw) + + #special-case the Spherical->UnitSpherical if no `distance` + #TODO: possibly generalize this somehow? + + if repr_kwargs: + if repr_kwargs.get('distance', True) is None: + del repr_kwargs['distance'] + if (self.representation == SphericalRepresentation and + 'distance' not in repr_kwargs): + representation_data = UnitSphericalRepresentation(**repr_kwargs) + else: + representation_data = self.representation(**repr_kwargs) + + if len(args) > 0: + raise TypeError(self.__class__.__name__ + '.__init__ had {0} ' + 'remaining unprocessed arguments'.format(len(args))) + if kwargs: + raise TypeError('Coordinate frame got unexpected keywords: ' + + str(kwargs.keys())) + + self._data = representation_data + + # We do ``is not None`` because self._data might evaluate to false for + # empty arrays or data == 0 + if self._data is not None: + self._rep_cache = dict() + self._rep_cache[self._data.__class__.__name__, False] = self._data + + @property + def data(self): + """ + The coordinate data for this object. If this frame has no data, an + `~.exceptions.ValueError` will be raised. Use `has_data` to + check if data is present on this frame object. + """ + if self._data is None: + raise ValueError('The frame object "{0}" does not have associated ' + 'data'.format(repr(self))) + return self._data + + @property + def has_data(self): + """ + True if this frame has `data`, False otherwise. + """ + return self._data is not None + + def __len__(self): + return len(self.data) + + def __nonzero__(self): # Py 2.x + return self.isscalar or len(self) != 0 + + def __bool__(self): # Py 3.x + return self.isscalar or len(self) != 0 + + @property + def shape(self): + return self.data.shape + + @property + def isscalar(self): + return self.data.isscalar + + @classmethod + def get_frame_attr_names(cls): + seen = set() + attributes = [] + for mro_cls in cls.__mro__: + for name, val in mro_cls.__dict__.items(): + if isinstance(val, FrameAttribute) and name not in seen: + seen.add(name) + # Add the sort order, name, and actual value of the frame + # attribute in question + attributes.append((val._order, name, + getattr(mro_cls, name))) + + # Sort by the frame attribute order + attributes.sort(key=lambda a: a[0]) + return OrderedDict((a[1], a[2]) for a in attributes) + + @property + def representation(self): + """ + The representation of the data in this frame, as a class that is + subclassed from `~astropy.coordinates.BaseRepresentation`. Can + also be *set* using the string name of the representation. + """ + if not hasattr(self, '_representation'): + self._representation = self.default_representation + return self._representation + + @representation.setter + def representation(self, value): + self._representation = _get_repr_cls(value) + + @classmethod + def _get_representation_info(cls): + # This exists as a class method only to support handling frame inputs + # without units, which are deprecated and will be removed. This can be + # moved into the representation_info property at that time. + + repr_attrs = {} + for repr_cls in REPRESENTATION_CLASSES.values(): + repr_attrs[repr_cls] = {'names': [], 'units': []} + for c in repr_cls.attr_classes.keys(): + repr_attrs[repr_cls]['names'].append(c) + rec_unit = repr_cls.recommended_units.get(c, None) + repr_attrs[repr_cls]['units'].append(rec_unit) + + for repr_cls, mappings in cls._frame_specific_representation_info.items(): + # keys may be a class object or a name + repr_cls = _get_repr_cls(repr_cls) + + # take the 'names' and 'units' tuples from repr_attrs, + # and then use the RepresentationMapping objects + # to update as needed for this frame. + nms = repr_attrs[repr_cls]['names'] + uns = repr_attrs[repr_cls]['units'] + comptomap = dict([(m.reprname, m) for m in mappings]) + for i, c in enumerate(repr_cls.attr_classes.keys()): + if c in comptomap: + mapp = comptomap[c] + nms[i] = mapp.framename + # need the isinstance because otherwise if it's a unit it + # will try to compare to the unit string representation + if not (isinstance(mapp.defaultunit, six.string_types) and + mapp.defaultunit == 'recommended'): + uns[i] = mapp.defaultunit + # else we just leave it as recommended_units says above + # Convert to tuples so that this can't mess with frame internals + repr_attrs[repr_cls]['names'] = tuple(nms) + repr_attrs[repr_cls]['units'] = tuple(uns) + + return repr_attrs + + @property + def representation_info(self): + """ + A dictionary with the information of what attribute names for this frame + apply to particular representations. + """ + return self._get_representation_info() + + @property + def representation_component_names(self): + out = OrderedDict() + if self.representation is None: + return out + data_names = self.representation.attr_classes.keys() + repr_names = self.representation_info[self.representation]['names'] + for repr_name, data_name in zip(repr_names, data_names): + out[repr_name] = data_name + return out + + @property + def representation_component_units(self): + out = OrderedDict() + if self.representation is None: + return out + repr_attrs = self.representation_info[self.representation] + repr_names = repr_attrs['names'] + repr_units = repr_attrs['units'] + for repr_name, repr_unit in zip(repr_names, repr_units): + if repr_unit: + out[repr_name] = repr_unit + return out + + + def realize_frame(self, representation): + """ + Generates a new frame *with new data* from another frame (which may or + may not have data). + + Parameters + ---------- + representation : BaseRepresentation + The representation to use as the data for the new frame. + + Returns + ------- + frameobj : same as this frame + A new object with the same frame attributes as this one, but + with the ``representation`` as the data. + """ + frattrs = dict([(nm, getattr(self, nm)) for nm in self.get_frame_attr_names() + if nm not in self._attr_names_with_defaults]) + return self.__class__(representation, **frattrs) + + def represent_as(self, new_representation, in_frame_units=False): + """ + Generate and return a new representation of this frame's `data`. + + Parameters + ---------- + new_representation : subclass of BaseRepresentation or string + The type of representation to generate. May be a *class* + (not an instance), or the string name of the representation + class. + + in_frame_units : bool + Force the representation units to match the specified units + particular to this frame + + Returns + ------- + newrep : whatever ``new_representation`` is + A new representation object of this frame's `data`. + + Raises + ------ + AttributeError + If this object had no `data` + + Examples + -------- + >>> from astropy import units as u + >>> from astropy.coordinates import ICRS, CartesianRepresentation + >>> coord = ICRS(0*u.deg, 0*u.deg) + >>> coord.represent_as(CartesianRepresentation) + + """ + new_representation = _get_repr_cls(new_representation) + + cached_repr = self._rep_cache.get((new_representation.__name__, in_frame_units)) + if not cached_repr: + data = self.data.represent_as(new_representation) + + # If the new representation is known to this frame and has a defined + # set of names and units, then use that. + new_attrs = self.representation_info.get(new_representation) + if new_attrs and in_frame_units: + datakwargs = dict((comp, getattr(data, comp)) for comp in data.components) + for comp, new_attr_unit in zip(data.components, new_attrs['units']): + if new_attr_unit: + datakwargs[comp] = datakwargs[comp].to(new_attr_unit) + data = data.__class__(**datakwargs) + + self._rep_cache[new_representation.__name__, in_frame_units] = data + + return self._rep_cache[new_representation.__name__, in_frame_units] + + def transform_to(self, new_frame): + """ + Transform this object's coordinate data to a new frame. + + Parameters + ---------- + new_frame : class or frame object or SkyCoord object + The frame to transform this coordinate frame into. + + Returns + ------- + transframe + A new object with the coordinate data represented in the + ``newframe`` system. + + Raises + ------ + ValueError + If there is no possible transformation route. + """ + from .errors import ConvertError + + if self._data is None: + raise ValueError('Cannot transform a frame with no data') + + if inspect.isclass(new_frame): + #means use the defaults for this class + new_frame = new_frame() + + if hasattr(new_frame, '_sky_coord_frame'): + # Input new_frame is not a frame instance or class and is most + # likely a SkyCoord object. + new_frame = new_frame._sky_coord_frame + + trans = frame_transform_graph.get_transform(self.__class__, + new_frame.__class__) + if trans is None: + if new_frame is self.__class__: + # no special transform needed, but should update frame info + return new_frame.realize_frame(self.data) + msg = 'Cannot transform from {0} to {1}' + raise ConvertError(msg.format(self.__class__, new_frame.__class__)) + return trans(self, new_frame) + + def is_transformable_to(self, new_frame): + """ + Determines if this coordinate frame can be transformed to another + given frame. + + Parameters + ---------- + new_frame : class or frame object + The proposed frame to transform into. + + Returns + ------- + transformable : bool or str + `True` if this can be transformed to ``new_frame``, `False` if + not, or the string 'same' if ``new_frame`` is the same system as + this object but no transformation is defined. + + Notes + ----- + A return value of 'same' means the transformation will work, but it will + just give back a copy of this object. The intended usage is:: + + if coord.is_transformable_to(some_unknown_frame): + coord2 = coord.transform_to(some_unknown_frame) + + This will work even if ``some_unknown_frame`` turns out to be the same + frame class as ``coord``. This is intended for cases where the frame is + the same regardless of the frame attributes (e.g. ICRS), but be aware + that it *might* also indicate that someone forgot to define the + transformation between two objects of the same frame class but with + different attributes. + + """ + + new_frame_cls = new_frame if inspect.isclass(new_frame) else new_frame.__class__ + trans = frame_transform_graph.get_transform(self.__class__, new_frame_cls) + + if trans is None: + if new_frame_cls is self.__class__: + return 'same' + else: + return False + else: + return True + + def is_frame_attr_default(self, attrnm): + """ + Determine whether or not a frame attribute has its value because it's + the default value, or because this frame was created with that value + explicitly requested. + + Parameters + ---------- + attrnm : str + The name of the attribute to check. + + Returns + ------- + isdefault : bool + True if the attribute ``attrnm`` has its value by default, False if it + was specified at creation of this frame. + """ + return attrnm in self._attr_names_with_defaults + + def __repr__(self): + frameattrs = ', '.join([attrnm + '=' + str(getattr(self, attrnm)) + for attrnm in self.get_frame_attr_names()]) + + if self.has_data: + if self.representation: + if (self.representation == SphericalRepresentation and + isinstance(self.data, UnitSphericalRepresentation)): + data = self.represent_as(UnitSphericalRepresentation, in_frame_units=True) + else: + data = self.represent_as(self.representation, in_frame_units=True) + + data_repr = repr(data) + for nmpref, nmrepr in self.representation_component_names.items(): + data_repr = data_repr.replace(nmrepr, nmpref) + + else: + data = self.data + data_repr = repr(self.data) + + if data_repr.startswith('<' + data.__class__.__name__): + # standard form from BaseRepresentation + if frameattrs: + frameattrs = frameattrs + ', ' + + #remove both the leading "<" and the space after the name + data_repr = data_repr[(len(data.__class__.__name__) + 2):] + + return '<{0} Coordinate: {1}{2}'.format(self.__class__.__name__, + frameattrs, data_repr) + else: + # should only happen if a representation has a non-standard + # __repr__ method, and we just punt to that + if frameattrs: + frameattrs = ': ' + frameattrs + ', ' + s = '<{0} Coordinate{1}Data:\n{2}>' + return s.format(self.__class__.__name__, frameattrs, data_repr) + else: + if frameattrs: + frameattrs = ': ' + frameattrs + return '<{0} Frame{1}>'.format(self.__class__.__name__, frameattrs) + + def __getitem__(self, view): + if self.has_data: + return self.realize_frame(self.data[view]) + else: + raise ValueError('Cannot index a frame with no data') + + @override__dir__ + def __dir__(self): + """ + Override the builtin `dir` behavior to include representation + names. + + TODO: dynamic representation transforms (i.e. include cylindrical et al.). + """ + dir_values = set(self.representation_component_names) + + return dir_values + + def __getattr__(self, attr): + """ + Allow access to attributes defined in self.representation_component_names. + + TODO: dynamic representation transforms (i.e. include cylindrical et al.). + """ + # attr == '_representation' is likely from the hasattr() test in the + # representation property which is used for self.representation_component_names. + # Prevent infinite recursion here. + if attr == '_representation' or attr not in self.representation_component_names: + raise AttributeError("'{0}' object has no attribute '{1}'" + .format(self.__class__.__name__, attr)) + + rep = self.represent_as(self.representation, in_frame_units=True) + val = getattr(rep, self.representation_component_names[attr]) + return val + + def __setattr__(self, attr, value): + repr_attr_names = [] + if hasattr(self, 'representation_info'): + for representation_attr in self.representation_info.values(): + repr_attr_names.extend(representation_attr['names']) + if attr in repr_attr_names: + raise AttributeError('Cannot set any frame attribute {0}'.format(attr)) + else: + super(BaseCoordinateFrame, self).__setattr__(attr, value) + + def separation(self, other): + """ + Computes on-sky separation between this coordinate and another. + + Parameters + ---------- + other : `~astropy.coordinates.BaseCoordinateFrame` + The coordinate to get the separation to. + + Returns + ------- + sep : `~astropy.coordinates.Angle` + The on-sky separation between this and the ``other`` coordinate. + + Notes + ----- + The separation is calculated using the Vincenty formula, which + is stable at all locations, including poles and antipodes [1]_. + + .. [1] http://en.wikipedia.org/wiki/Great-circle_distance + + """ + from .angle_utilities import angular_separation + from .angles import Angle + + self_unit_sph = self.represent_as(UnitSphericalRepresentation) + other_unit_sph = other.transform_to(self.__class__).represent_as(UnitSphericalRepresentation) + + # Get the separation as a Quantity, convert to Angle in degrees + sep = angular_separation(self_unit_sph.lon, self_unit_sph.lat, + other_unit_sph.lon, other_unit_sph.lat) + return Angle(sep, unit=u.degree) + + def separation_3d(self, other): + """ + Computes three dimensional separation between this coordinate + and another. + + Parameters + ---------- + other : `~astropy.coordinates.BaseCoordinateFrame` + The coordinate system to get the distance to. + + Returns + ------- + sep : `~astropy.coordinates.Distance` + The real-space distance between these two coordinates. + + Raises + ------ + ValueError + If this or the other coordinate do not have distances. + """ + from .distances import Distance + + if self.data.__class__ == UnitSphericalRepresentation: + raise ValueError('This object does not have a distance; cannot ' + 'compute 3d separation.') + + # do this first just in case the conversion somehow creates a distance + other_in_self_system = other.transform_to(self.__class__) + + if other_in_self_system.__class__ == UnitSphericalRepresentation: + raise ValueError('The other object does not have a distance; ' + 'cannot compute 3d separation.') + + dx = self.cartesian.x - other_in_self_system.cartesian.x + dy = self.cartesian.y - other_in_self_system.cartesian.y + dz = self.cartesian.z - other_in_self_system.cartesian.z + + distval = (dx.value ** 2 + dy.value ** 2 + dz.value ** 2) ** 0.5 + return Distance(distval, dx.unit) + + @property + def cartesian(self): + """ + Shorthand for a cartesian representation of the coordinates in this object. + """ + # TODO: if representations are updated to use a full transform graph, + # the representation aliases should not be hard-coded like this + return self.represent_as(CartesianRepresentation, in_frame_units=True) + + @property + def spherical(self): + """ + Shorthand for a spherical representation of the coordinates in this object. + """ + # TODO: if representations are updated to use a full transform graph, + # the representation aliases should not be hard-coded like this + return self.represent_as(SphericalRepresentation, in_frame_units=True) + + +class GenericFrame(BaseCoordinateFrame): + """ + A frame object that can't store data but can hold any arbitrary frame + attributes. Mostly useful as a utility for the high-level class to store + intermediate frame attributes. + + Parameters + ---------- + frame_attrs : dict + A dictionary of attributes to be used as the frame attributes for this + frame. + """ + name = None # it's not a "real" frame so it doesn't have a name + + def __init__(self, frame_attrs): + super(GenericFrame, self).__setattr__('_frame_attr_names', frame_attrs) + super(GenericFrame, self).__init__(None) + + for attrnm, attrval in frame_attrs.items(): + setattr(self, '_' + attrnm, attrval) + + def get_frame_attr_names(self): + return self._frame_attr_names + + def __getattr__(self, name): + if '_' + name in self.__dict__: + return getattr(self, '_' + name) + else: + raise AttributeError('no {0}'.format(name)) + + def __setattr__(self, name, value): + if name in self._frame_attr_names: + raise AttributeError("can't set frame attribute '{0}'".format(name)) + else: + super(GenericFrame, self).__setattr__(name, value) + + diff --git a/astropy/coordinates/builtin_frames.py b/astropy/coordinates/builtin_frames.py new file mode 100644 index 0000000..8f44e12 --- /dev/null +++ b/astropy/coordinates/builtin_frames.py @@ -0,0 +1,619 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" +This module contains the coordinate frames actually implemented by astropy. +""" +from __future__ import (absolute_import, unicode_literals, division, + print_function) + +# Standard library +import inspect + +# Dependencies +import numpy as np + +# Project +from ..extern import six +from ..utils.compat.odict import OrderedDict +from .. import units as u +from ..time import Time +from .angles import Angle +from .representation import (SphericalRepresentation, CartesianRepresentation, + UnitSphericalRepresentation) +from .baseframe import (BaseCoordinateFrame, frame_transform_graph, GenericFrame, + FrameAttribute, TimeFrameAttribute, + RepresentationMapping) +from .transformations import FunctionTransform, DynamicMatrixTransform + + +__all__ = ['ICRS', 'FK5', 'FK4', 'FK4NoETerms', 'Galactic', 'AltAz'] + +# The UTC time scale is not properly defined prior to 1960, so Time('B1950', +# scale='utc') will emit a warning. Instead, we use Time('B1950', scale='tai') +# which is equivalent, but does not emit a warning. +_EQUINOX_J2000 = Time('J2000', scale='utc') +_EQUINOX_B1950 = Time('B1950', scale='tai') + + +class ICRS(BaseCoordinateFrame): + """ + A coordinate or frame in the ICRS system. + + If you're looking for "J2000" coordinates, and aren't sure if you want to + use this or `FK5`, you probably want to use ICRS. It's more well-defined as + a catalog coordinate and is an inertial system, and is very close (within + tens of arcseconds) to J2000 equatorial. + + Parameters + ---------- + representation : `BaseRepresentation` or None + A representation object or None to have no data (or use the other keywords) + ra : `Angle`, optional, must be keyword + The RA for this object (``dec`` must also be given and ``representation`` + must be None). + dec : `Angle`, optional, must be keyword + The Declination for this object (``ra`` must also be given and + ``representation`` must be None). + distance : `~astropy.units.Quantity`, optional, must be keyword + The Distance for this object along the line-of-sight. + (``representation`` must be None). + """ + + frame_specific_representation_info = { + 'spherical': [RepresentationMapping('lon', 'ra'), + RepresentationMapping('lat', 'dec')] + } + frame_specific_representation_info['unitspherical'] = \ + frame_specific_representation_info['spherical'] + + default_representation = SphericalRepresentation + + @staticmethod + def _icrs_to_fk5_matrix(): + """ + B-matrix from USNO circular 179. Used by the ICRS->FK5 transformation + functions. + """ + from .angles import rotation_matrix + + eta0 = -19.9 / 3600000. + xi0 = 9.1 / 3600000. + da0 = -22.9 / 3600000. + + m1 = rotation_matrix(-eta0, 'x') + m2 = rotation_matrix(xi0, 'y') + m3 = rotation_matrix(da0, 'z') + + return m1 * m2 * m3 + +# define this because it only needs to be computed once +ICRS._ICRS_TO_FK5_J2000_MAT = ICRS._icrs_to_fk5_matrix() + + +class FK5(BaseCoordinateFrame): + """ + A coordinate or frame in the FK5 system. + + Parameters + ---------- + representation : `BaseRepresentation` or None + A representation object or None to have no data (or use the other keywords) + ra : `Angle`, optional, must be keyword + The RA for this object (``dec`` must also be given and ``representation`` + must be None). + dec : `Angle`, optional, must be keyword + The Declination for this object (``ra`` must also be given and + ``representation`` must be None). + distance : `~astropy.units.Quantity`, optional, must be keyword + The Distance for this object along the line-of-sight. + (``representation`` must be None). + equinox : `~astropy.time.Time`, optional, must be keyword + The equinox of this frame. + """ + frame_specific_representation_info = { + 'spherical': [RepresentationMapping('lon', 'ra'), + RepresentationMapping('lat', 'dec')] + } + frame_specific_representation_info['unitspherical'] = \ + frame_specific_representation_info['spherical'] + + default_representation = SphericalRepresentation + equinox = TimeFrameAttribute(default=_EQUINOX_J2000) + + @staticmethod + def _precession_matrix(oldequinox, newequinox): + """ + Compute and return the precession matrix for FK5 based on Capitaine et + al. 2003/IAU2006. Used inside some of the transformation functions. + + Parameters + ---------- + oldequinox : `~astropy.time.Time` + The equinox to precess from. + newequinox : `~astropy.time.Time` + The equinox to precess to. + + Returns + ------- + newcoord : array + The precession matrix to transform to the new equinox + """ + from .earth_orientation import precession_matrix_Capitaine + + return precession_matrix_Capitaine(oldequinox, newequinox) + + +# Has to be defined at module level, because `transform` needs an FK5 reference +@frame_transform_graph.transform(DynamicMatrixTransform, FK5, FK5) +def fk5_to_fk5(fk5coord1, fk5frame2): + return fk5coord1._precession_matrix(fk5coord1.equinox, fk5frame2.equinox) + + +class FK4(BaseCoordinateFrame): + """ + A coordinate or frame in the FK4 system. + + Parameters + ---------- + representation : `BaseRepresentation` or None + A representation object or None to have no data (or use the other keywords) + ra : `Angle`, optional, must be keyword + The RA for this object (``dec`` must also be given and ``representation`` + must be None). + dec : `Angle`, optional, must be keyword + The Declination for this object (``ra`` must also be given and + ``representation`` must be None). + distance : :class:`~astropy.units.Quantity`, optional, must be keyword + The Distance for this object along the line-of-sight. + (``representation`` must be None). + equinox : astropy.time.Time, optional, must be keyword + The equinox of this frame. + obstime : astropy.time.Time, optional, must be keyword + The time this frame was observed. If None, will be the same as + ``equinox``. + """ + frame_specific_representation_info = { + 'spherical': [RepresentationMapping('lon', 'ra'), + RepresentationMapping('lat', 'dec')] + } + frame_specific_representation_info['unitspherical'] = \ + frame_specific_representation_info['spherical'] + + default_representation = SphericalRepresentation + equinox = TimeFrameAttribute(default=_EQUINOX_B1950) + obstime = TimeFrameAttribute(default=None, secondary_attribute='equinox') + + +@frame_transform_graph.transform(FunctionTransform, FK4, FK4) +def fk4_to_fk4(fk4coord1, fk4frame2): + # deceptively complicated: need to transform to No E-terms FK4, precess, and + # then come back, because precession is non-trivial with E-terms + fnoe_w_eqx1 = fk4coord1.transform_to(FK4NoETerms(equinox=fk4coord1.equinox)) + fnoe_w_eqx2 = fnoe_w_eqx1.transform_to(FK4NoETerms(equinox=fk4frame2.equinox)) + return fnoe_w_eqx2.transform_to(fk4frame2) + + +class FK4NoETerms(BaseCoordinateFrame): + """ + A coordinate or frame in the FK4 system, but with the E-terms of aberration + removed. + + Parameters + ---------- + representation : `BaseRepresentation` or None + A representation object or None to have no data (or use the other keywords) + ra : `Angle`, optional, must be keyword + The RA for this object (``dec`` must also be given and ``representation`` + must be None). + dec : `Angle`, optional, must be keyword + The Declination for this object (``ra`` must also be given and + ``representation`` must be None). + distance : :class:`~astropy.units.Quantity`, optional, must be keyword + The Distance for this object along the line-of-sight. + (``representation`` must be None). + obstime : astropy.time.Time, optional, must be keyword + The time this frame was observed. If None, will be the same as + ``equinox``. + """ + frame_specific_representation_info = { + 'spherical': [RepresentationMapping('lon', 'ra'), + RepresentationMapping('lat', 'dec')] + } + frame_specific_representation_info['unitspherical'] = \ + frame_specific_representation_info['spherical'] + + default_representation = SphericalRepresentation + equinox = TimeFrameAttribute(default=_EQUINOX_B1950) + obstime = TimeFrameAttribute(default=None, secondary_attribute='equinox') + + @staticmethod + def _precession_matrix(oldequinox, newequinox): + """ + Compute and return the precession matrix for FK4 using Newcomb's method. + Used inside some of the transformation functions. + + Parameters + ---------- + oldequinox : `~astropy.time.Time` + The equinox to precess from. + newequinox : `~astropy.time.Time` + The equinox to precess to. + + Returns + ------- + newcoord : array + The precession matrix to transform to the new equinox + """ + from .earth_orientation import _precession_matrix_besselian + + return _precession_matrix_besselian(oldequinox.byear, newequinox.byear) + + @staticmethod + def _fk4_B_matrix(obstime): + """ + This is a correction term in the FK4 transformations because FK4 is a + rotating system - see Murray 89 eqn 29 + """ + # Note this is *julian century*, not besselian + T = (obstime.jyear - 1950.) / 100. + return _B1950_TO_J2000_M + _FK4_CORR * T + + +@frame_transform_graph.transform(DynamicMatrixTransform, FK4NoETerms, FK4NoETerms) +def fk4noe_to_fk4noe(fk4necoord1, fk4neframe2): + return fk4necoord1._precession_matrix(fk4necoord1.equinox, fk4neframe2.equinox) + + +class Galactic(BaseCoordinateFrame): + """ + Galactic Coordinates. + + Parameters + ---------- + representation : `BaseRepresentation` or None + A representation object or None to have no data (or use the other keywords) + l : `Angle`, optional, must be keyword + The Galactic longitude for this object (``b`` must also be given and + ``representation`` must be None). + b : `Angle`, optional, must be keyword + The Galactic latitude for this object (``l`` must also be given and + ``representation`` must be None). + distance : `~astropy.units.Quantity`, optional, must be keyword + The Distance for this object along the line-of-sight. + """ + + frame_specific_representation_info = { + 'spherical': [RepresentationMapping('lon', 'l'), + RepresentationMapping('lat', 'b')], + 'cartesian': [RepresentationMapping('x', 'w'), + RepresentationMapping('y', 'u'), + RepresentationMapping('z', 'v')] + } + frame_specific_representation_info['unitspherical'] = \ + frame_specific_representation_info['spherical'] + + default_representation = SphericalRepresentation + + # North galactic pole and zeropoint of l in FK4/FK5 coordinates. Needed for + # transformations to/from FK4/5 + # These are from Reid & Brunthaler 2004 + _ngp_J2000 = FK5(ra=192.859508*u.degree, dec=27.128336*u.degree) + _lon0_J2000 = Angle(122.932, u.degree) + # These are from the IAU's definition of galactic coordinates + _ngp_B1950 = FK4(ra=192.25*u.degree, dec=27.4*u.degree) + _lon0_B1950 = Angle(123, u.degree) + + +class AltAz(BaseCoordinateFrame): + """ + A coordinate or frame in the Altitude-Azimuth system (i.e., Horizontal + coordinates). + + .. warning:: + The AltAz class currently does not support any transformations. In a + future version, it will support the standard IAU2000 AltAz<->ICRS + transformations. It is provided right now as a placeholder for storing + as-observed horizontal coordinates. + + Parameters + ---------- + representation : `BaseRepresentation` or None + A representation object or None to have no data (or use the other keywords) + az : `Angle`, optional, must be keyword + The Azimuth for this object (``alt`` must also be given and + ``representation`` must be None). + alt : `Angle`, optional, must be keyword + The Altitude for this object (``az`` must also be given and + ``representation`` must be None). + distance : :class:`~astropy.units.Quantity`, optional, must be keyword + The Distance for this object along the line-of-sight. + """ + + frame_specific_representation_info = { + 'spherical': [RepresentationMapping('lon', 'az'), + RepresentationMapping('lat', 'alt')], + } + frame_specific_representation_info['unitspherical'] = \ + frame_specific_representation_info['spherical'] + + default_representation = SphericalRepresentation + equinox = TimeFrameAttribute(default=_EQUINOX_B1950) + location = FrameAttribute(default=None) + obstime = TimeFrameAttribute(default=None) + + def __init__(self, *args, **kwargs): + from warnings import warn + from astropy.utils.exceptions import AstropyWarning + + warn(AstropyWarning('The AltAz class currently does not support any ' + 'transformations. In a future version, it will ' + 'support the standard IAU2000 AltAz<->ICRS ' + 'transformations.')) + super(AltAz, self).__init__(*args, **kwargs) + + +# <--------------------------------transformations------------------------------> +# Transformations are defined here instead of in the classes themselves, because +# we need references to the various objects to give to the decorators. + +# ICRS to/from FK5 --------------------------------> +@frame_transform_graph.transform(DynamicMatrixTransform, ICRS, FK5) +def icrs_to_fk5(icrscoord, fk5frame): + # ICRS is by design very close to J2000 equinox + pmat = fk5frame._precession_matrix(_EQUINOX_J2000, fk5frame.equinox) + return pmat * icrscoord._ICRS_TO_FK5_J2000_MAT + + +# can't be static because the equinox is needed +@frame_transform_graph.transform(DynamicMatrixTransform, FK5, ICRS) +def fk5_to_icrs(fk5coord, icrsframe): + # ICRS is by design very close to J2000 equinox + pmat = fk5coord._precession_matrix(fk5coord.equinox, _EQUINOX_J2000) + return icrsframe._ICRS_TO_FK5_J2000_MAT.T * pmat + + +# FK4-NO-E to/from FK4 -----------------------------> + +# In the present framework, we include two coordinate classes for FK4 +# coordinates - one including the E-terms of aberration (FK4), and +# one not including them (FK4NoETerms). The following functions +# implement the transformation between these two. +def fk4_e_terms(equinox): + """ + Return the e-terms of aberation vector + + Parameters + ---------- + equinox : Time object + The equinox for which to compute the e-terms + """ + + from . import earth_orientation as earth + + # Constant of aberration at J2000 + k = 0.0056932 + + # Eccentricity of the Earth's orbit + e = earth.eccentricity(equinox.jd) + e = np.radians(e) + + # Mean longitude of perigee of the solar orbit + g = earth.mean_lon_of_perigee(equinox.jd) + g = np.radians(g) + + # Obliquity of the ecliptic + o = earth.obliquity(equinox.jd, algorithm=1980) + o = np.radians(o) + + return e * k * np.sin(g), \ + -e * k * np.cos(g) * np.cos(o), \ + -e * k * np.cos(g) * np.sin(o) + + +@frame_transform_graph.transform(FunctionTransform, FK4, FK4NoETerms) +def fk4_to_fk4_no_e(fk4coord, fk4noeframe): + from .representation import CartesianRepresentation, UnitSphericalRepresentation + + # Extract cartesian vector + c = fk4coord.cartesian.xyz + r = np.asarray(c.reshape((3, c.size // 3))) + + # Find distance (for re-normalization) + d_orig = np.sqrt(np.sum(r ** 2)) + + # Apply E-terms of aberration. Note that this depends on the equinox (not + # the observing time/epoch) of the coordinates. See issue #1496 for a + # discussion of this. + eterms_a = np.asarray(fk4_e_terms(fk4coord.equinox)) + r = r - eterms_a.reshape(3, 1) + np.dot(eterms_a, r) * r + + # Find new distance (for re-normalization) + d_new = np.sqrt(np.sum(r ** 2)) + + # Renormalize + r = r * d_orig / d_new + + subshape = c.shape[1:] + x = r[0].reshape(subshape) + y = r[1].reshape(subshape) + z = r[2].reshape(subshape) + + #now re-cast into an appropriate Representation, and precess if need be + if isinstance(fk4coord.data, UnitSphericalRepresentation): + representation = CartesianRepresentation(x=x*u.one, y=y*u.one, z=z*u.one) + representation = representation.represent_as(UnitSphericalRepresentation) + else: + representation = CartesianRepresentation(x=x*c.unit, y=y*c.unit, z=z*c.unit) + + # if no obstime was given in the new frame, use the old one for consistency + newobstime = fk4coord._obstime if fk4noeframe._obstime is None else fk4noeframe._obstime + + fk4noe = FK4NoETerms(representation, equinox=fk4coord.equinox, + obstime=newobstime) + if fk4coord.equinox != fk4noeframe.equinox: + #precession + fk4noe = fk4noe.transform_to(fk4noeframe) + return fk4noe + + +@frame_transform_graph.transform(FunctionTransform, FK4NoETerms, FK4) +def fk4_no_e_to_fk4(fk4noecoord, fk4frame): + from .representation import CartesianRepresentation, UnitSphericalRepresentation + + #first precess, if necessary + if fk4noecoord.equinox != fk4frame.equinox: + fk4noe_w_fk4equinox = FK4NoETerms(equinox=fk4frame.equinox, + obstime=fk4noecoord.obstime) + fk4noecoord = fk4noecoord.transform_to(fk4noe_w_fk4equinox) + + # Extract cartesian vector + c = fk4noecoord.cartesian.xyz + r = np.asarray(c.reshape((3, c.size // 3))) + + # Find distance (for re-normalization) + d_orig = np.sqrt(np.sum(r ** 2)) + + # Apply E-terms of aberration. Note that this depends on the equinox (not + # the observing time/epoch) of the coordinates. See issue #1496 for a + # discussion of this. + eterms_a = np.asarray(fk4_e_terms(fk4noecoord.equinox)) + r0 = r.copy() + for _ in range(10): + r = (eterms_a.reshape(3, 1) + r0) / (1. + np.dot(eterms_a, r)) + + # Find new distance (for re-normalization) + d_new = np.sqrt(np.sum(r ** 2)) + + # Renormalize + r = r * d_orig / d_new + + subshape = c.shape[1:] + x = r[0].reshape(subshape) + y = r[1].reshape(subshape) + z = r[2].reshape(subshape) + + #now re-cast into an appropriate Representation, and precess if need be + if isinstance(fk4noecoord.data, UnitSphericalRepresentation): + representation = CartesianRepresentation(x=x*u.one, y=y*u.one, z=z*u.one) + representation = representation.represent_as(UnitSphericalRepresentation) + else: + representation = CartesianRepresentation(x=x*c.unit, y=y*c.unit, z=z*c.unit) + + return fk4frame.realize_frame(representation) + +# FK5 to/from FK4 -------------------> + +# B1950->J2000 matrix from Murray 1989 A&A 218,325 eqn 28 +_B1950_TO_J2000_M = \ + np.mat([[0.9999256794956877, -0.0111814832204662, -0.0048590038153592], + [0.0111814832391717, 0.9999374848933135, -0.0000271625947142], + [0.0048590037723143, -0.0000271702937440, 0.9999881946023742]]) + +_FK4_CORR = \ + np.mat([[-0.0026455262, -1.1539918689, +2.1111346190], + [+1.1540628161, -0.0129042997, +0.0236021478], + [-2.1112979048, -0.0056024448, +0.0102587734]]) * 1.e-6 + + +# This transformation can't be static because the observation date is needed. +@frame_transform_graph.transform(DynamicMatrixTransform, FK4NoETerms, FK5) +def fk4_no_e_to_fk5(fk4noecoord, fk5frame): + # Correction terms for FK4 being a rotating system + B = FK4NoETerms._fk4_B_matrix(fk4noecoord.obstime) + + # construct both precession matricies - if the equinoxes are B1950 and + # J2000, these are just identity matricies + pmat1 = fk4noecoord._precession_matrix(fk4noecoord.equinox, _EQUINOX_B1950) + pmat2 = fk5frame._precession_matrix(_EQUINOX_J2000, fk5frame.equinox) + + return pmat2 * B * pmat1 + + +# This transformation can't be static because the observation date is needed. +@frame_transform_graph.transform(DynamicMatrixTransform, FK5, FK4NoETerms) +def fk5_to_fk4_no_e(fk5coord, fk4noeframe): + # Get transposed version of the rotating correction terms... so with the + # transpose this takes us from FK5/J200 to FK4/B1950 + B = FK4NoETerms._fk4_B_matrix(fk4noeframe.obstime).T + + # construct both precession matricies - if the equinoxes are B1950 and + # J2000, these are just identity matricies + pmat1 = fk5coord._precession_matrix(fk5coord.equinox, _EQUINOX_J2000) + pmat2 = fk4noeframe._precession_matrix(_EQUINOX_B1950, fk4noeframe.equinox) + + return pmat2 * B * pmat1 + + +# Galactic to/from FK4/FK5 -----------------------> +# can't be static because the equinox is needed +@frame_transform_graph.transform(DynamicMatrixTransform, FK5, Galactic) +def fk5_to_gal(fk5coord, galframe): + from .angles import rotation_matrix + + #need precess to J2000 first + pmat = fk5coord._precession_matrix(fk5coord.equinox, _EQUINOX_J2000) + mat1 = rotation_matrix(180 - Galactic._lon0_J2000.degree, 'z') + mat2 = rotation_matrix(90 - Galactic._ngp_J2000.dec.degree, 'y') + mat3 = rotation_matrix(Galactic._ngp_J2000.ra.degree, 'z') + + return mat1 * mat2 * mat3 * pmat + + +@frame_transform_graph.transform(DynamicMatrixTransform, Galactic, FK5) +def _gal_to_fk5(galcoord, fk5frame): + return fk5_to_gal(fk5frame, galcoord).T + + +@frame_transform_graph.transform(DynamicMatrixTransform, FK4NoETerms, Galactic) +def fk4_to_gal(fk4coords, galframe): + from .angles import rotation_matrix + + mat1 = rotation_matrix(180 - Galactic._lon0_B1950.degree, 'z') + mat2 = rotation_matrix(90 - Galactic._ngp_B1950.dec.degree, 'y') + mat3 = rotation_matrix(Galactic._ngp_B1950.ra.degree, 'z') + matprec = fk4coords._precession_matrix(fk4coords.equinox, _EQUINOX_B1950) + + return mat1 * mat2 * mat3 * matprec + + +@frame_transform_graph.transform(DynamicMatrixTransform, Galactic, FK4NoETerms) +def gal_to_fk4(galcoords, fk4frame): + return fk4_to_gal(fk4frame, galcoords).T + + +def _make_transform_graph_docs(): + """ + Generates a string for use with the coordinate package's docstring + to show the available transforms and coordinate systems + """ + from textwrap import dedent + + isclass = inspect.isclass + coosys = [item for item in list(six.itervalues(globals())) + if isclass(item) and issubclass(item, BaseCoordinateFrame)] + coosys.remove(BaseCoordinateFrame) + coosys.remove(GenericFrame) + graphstr = frame_transform_graph.to_dot_graph(addnodes=coosys) + + docstr = """ + The diagram below shows all of the coordinate systems built into the + `~astropy.coordinates` package, their aliases (useful for converting + other coordinates to them using attribute-style access) and the + pre-defined transformations between them. The user is free to + override any of these transformations by defining new transformations + between these systems, but the pre-defined transformations should be + sufficient for typical usage. + + The graph also indicates the priority for each transformation as a + number next to the arrow. These priorities are used to decide the + preferred order when two transformation paths have the same number + of steps. These priorities are defined such that the path with a + *smaller* total priority is favored. + + + .. graphviz:: + + """ + + return dedent(docstr) + ' ' + graphstr.replace('\n', '\n ') +_transform_graph_docs = _make_transform_graph_docs() diff --git a/astropy/coordinates/distances.py b/astropy/coordinates/distances.py new file mode 100644 index 0000000..3b08264 --- /dev/null +++ b/astropy/coordinates/distances.py @@ -0,0 +1,473 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This module contains the classes and utility functions for distance and +cartesian coordinates. +""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import math + +import numpy as np + +from .. import units as u +from ..utils import deprecated + +__all__ = ['Distance', 'CartesianPoints', 'cartesian_to_spherical', + 'spherical_to_cartesian'] + + +__doctest_requires__ = {'*': ['scipy.integrate']} + + +class Distance(u.Quantity): + """ + A one-dimensional distance. + + This can be initialized in one of four ways: + + * A distance ``value`` (array or float) and a ``unit`` + * A `~astropy.units.Quantity` object + * A redshift and (optionally) a cosmology. + * Providing a distance modulus + + Parameters + ---------- + value : scalar or `~astropy.units.Quantity`. + The value of this distance. + unit : `~astropy.units.UnitBase` + The units for this distance, *if* ``value`` is not a + `~astropy.units.Quantity`. Must have dimensions of distance. + z : float + A redshift for this distance. It will be converted to a distance + by computing the luminosity distance for this redshift given the + cosmology specified by ``cosmology``. Must be given as a keyword + argument. + cosmology : ``Cosmology`` or `None` + A cosmology that will be used to compute the distance from ``z``. + If `None`, the current cosmology will be used (see + `astropy.cosmology` for details). + distmod : float or `~astropy.units.Quantity` + The distance modulus for this distance. Note that if ``unit`` is not + provided, a guess will be made at the unit between AU, pc, kpc, and Mpc. + dtype : `~numpy.dtype`, optional + See `~astropy.units.Quantity`. + copy : bool, optional + See `~astropy.units.Quantity`. + order : {'C', 'F', 'A'}, optional + See `~astropy.units.Quantity`. + subok : bool, optional + See `~astropy.units.Quantity`. + ndmin : int, optional + See `~astropy.units.Quantity`. + allow_negative : bool, optional + Whether to allow negative distances (which are possible is some + cosmologies). Default: ``False``. + + Raises + ------ + `~astropy.units.UnitsError` + If the ``unit`` is not a distance. + ValueError + If value specified is less than 0 and ``allow_negative=False``. + + If ``z`` is provided with a ``unit`` or ``cosmology`` is provided + when ``z`` is *not* given, or ``value`` is given as well as ``z``. + + + Examples + -------- + >>> from astropy import units as u + >>> from astropy import cosmology + >>> from astropy.cosmology import WMAP5, WMAP7 + >>> cosmology.set_current(WMAP7) + >>> d1 = Distance(10, u.Mpc) + >>> d2 = Distance(40, unit=u.au) + >>> d3 = Distance(value=5, unit=u.kpc) + >>> d4 = Distance(z=0.23) + >>> d5 = Distance(z=0.23, cosmology=WMAP5) + >>> d6 = Distance(distmod=24.47) + >>> d7 = Distance(Distance(10 * u.Mpc)) + """ + + _include_easy_conversion_members = True + + def __new__(cls, value=None, unit=None, z=None, cosmology=None, + distmod=None, dtype=None, copy=True, order=None, + subok=False, ndmin=0, allow_negative=False): + + if z is not None: + if value is not None or distmod is not None: + raise ValueError('Should given only one of `value`, `z` ' + 'or `distmod` in Distance constructor.') + + if cosmology is None: + from ..cosmology import default_cosmology + cosmology = default_cosmology.get() + + value = cosmology.luminosity_distance(z) + # Continue on to take account of unit and other arguments + # but a copy is already made, so no longer necessary + copy = False + + else: + if cosmology is not None: + raise ValueError('A `cosmology` was given but `z` was not ' + 'provided in Distance constructor') + + if distmod is not None: + if value is not None: + raise ValueError('Should given only one of `value`, `z` ' + 'or `distmod` in Distance constructor.') + + value = cls._distmod_to_pc(distmod) + if unit is None: + # if the unit is not specified, guess based on the mean of + # the log of the distance + meanlogval = np.log10(value.value).mean() + if meanlogval > 6: + unit = u.Mpc + elif meanlogval > 3: + unit = u.kpc + elif meanlogval < -3: #~200 AU + unit = u.AU + else: + unit = u.pc + + # Continue on to take account of unit and other arguments + # but a copy is already made, so no longer necessary + copy = False + + elif value is None: + raise ValueError('None of `value`, `z`, or `distmod` were ' + 'given to Distance constructor') + + # now we have arguments like for a Quantity, so let it do the work + distance = super(Distance, cls).__new__( + cls, value, unit, dtype=dtype, copy=copy, order=order, + subok=subok, ndmin=ndmin) + + if not distance.unit.is_equivalent(u.m): + raise u.UnitsError('Unit "{0}" is not a length type'.format(unit)) + + if not allow_negative and np.any(distance.value < 0): + raise ValueError("Distance must be >= 0. Use the argument " + "'allow_negative=True' to allow negative values.") + + return distance + + def __quantity_subclass__(self, unit): + if unit.is_equivalent(u.m): + return Distance, True + else: + return super(Distance, self).__quantity_subclass__(unit)[0], False + + @property + def z(self): + """Short for ``self.compute_z()``""" + return self.compute_z() + + def compute_z(self, cosmology=None): + """ + The redshift for this distance assuming its physical distance is + a luminosity distance. + + Parameters + ---------- + cosmology : ``Cosmology`` or `None` + The cosmology to assume for this calculation, or `None` to use the + current cosmology (see `astropy.cosmology` for details). + + Returns + ------- + z : float + The redshift of this distance given the provided ``cosmology``. + """ + from ..cosmology import luminosity_distance + from scipy import optimize + + # FIXME: array: need to make this calculation more vector-friendly + + f = lambda z, d, cos: (luminosity_distance(z, cos).value - d) ** 2 + return optimize.brent(f, (self.Mpc, cosmology)) + + @property + def distmod(self): + """The distance modulus as a `~astropy.units.Quantity`""" + val = 5. * np.log10(self.to(u.pc).value) - 5. + return u.Quantity(val, u.mag) + + @classmethod + def _distmod_to_pc(cls, dm): + dm = u.Quantity(dm, u.mag) + return cls(10 ** ((dm.value + 5) / 5.), u.pc, copy=False) + + +@deprecated('v0.4', alternative='astropy.coordinates.CartesianRepresentation') +class CartesianPoints(u.Quantity): + """ + A cartesian representation of a point in three-dimensional space. + + Parameters + ---------- + x : `~astropy.units.Quantity` or array-like + The first cartesian coordinate or a single array or + `~astropy.units.Quantity` where the first dimension is length-3. + y : `~astropy.units.Quantity` or array-like, optional + The second cartesian coordinate. + z : `~astropy.units.Quantity` or array-like, optional + The third cartesian coordinate. + unit : `~astropy.units.UnitBase` object or `None` + The physical unit of the coordinate values. If ``x``, ``y``, or ``z`` + are quantities, they will be converted to this unit. + dtype : `~numpy.dtype`, optional + See `~astropy.units.Quantity`. Must be given as a keyword argument. + copy : bool, optional + See `~astropy.units.Quantity`. Must be given as a keyword argument. + + Raises + ------ + UnitsError + If the units on ``x``, ``y``, and ``z`` do not match or an invalid + unit is given. + ValueError + If ``y`` and ``z`` don't match ``x``'s shape or ``x`` is not length-3 + TypeError + If incompatible array types are passed into ``x``, ``y``, or ``z`` + + """ + + #this ensures that __array_wrap__ gets called for ufuncs even when + #where a quantity is first, like ``3*u.m + c`` + __array_priority__ = 10001 + + def __new__(cls, x, y=None, z=None, unit=None, dtype=None, copy=True): + if y is None and z is None: + if len(x) != 3: + raise ValueError('Input to CartesianPoints is not length 3') + + qarr = x + if unit is None and hasattr(qarr, 'unit'): + unit = qarr.unit # for when a Quantity is given + elif y is not None and z is not None: + if unit is None: + #they must all match units or this fails + for coo in (x, y, z): + if hasattr(coo, 'unit'): + if unit is not None and coo.unit != unit: + raise u.UnitsError('Units for `x`, `y`, and `z` do ' + 'not match in CartesianPoints ') + unit = coo.unit + #if `unit` is still None at this point, it means none were + #Quantties, which is fine, because it means the user wanted + #the unit to be None + else: + #convert any that are like a Quantity to the given unit + if hasattr(x, 'to'): + x = x.to(unit) + if hasattr(y, 'to'): + y = y.to(unit) + if hasattr(z, 'to'): + z = z.to(unit) + + qarr = [np.asarray(coo) for coo in (x, y, z)] + if not (qarr[0].shape == qarr[1].shape == qarr[2].shape): + raise ValueError("Shapes for `x`, `y`, and `z` don't match in " + "CartesianPoints") + #let the unit be whatever it is + else: + raise TypeError('Must give all of `x`, `y`, and `z` or just array in ' + 'CartesianPoints') + try: + unit = _convert_to_and_validate_length_unit(unit, True) + except TypeError as e: + raise u.UnitsError(str(e)) + + try: + qarr = np.asarray(qarr) + except ValueError as e: + raise TypeError(str(e)) + + if qarr.dtype.kind not in 'iuf': + raise TypeError("Unsupported dtype '{0}'".format(qarr.dtype)) + + return u.Quantity.__new__(cls, qarr, unit, dtype=dtype, copy=copy) + + def __quantity_subclass__(self, unit): + if unit.is_equivalent(u.m): + return CartesianPoints, True + else: + return u.Quantity.__quantity_subclass__(unit)[0], False + + def __array_wrap__(self, obj, context=None): + #always convert to CartesianPoints because all operations that would + #screw up the units are killed by _convert_to_and_validate_length_unit + obj = u.Quantity.__array_wrap__(obj, context=context) + + #always prefer self's unit, if possible + if obj.unit.is_equivalent(self.unit): + return obj.to(self.unit) + else: + return obj + + @property + def x(self): + """ + The second cartesian coordinate as a `~astropy.units.Quantity`. + """ + return self.view(u.Quantity)[0] + + @property + def y(self): + """ + The second cartesian coordinate as a `~astropy.units.Quantity`. + """ + return self.view(u.Quantity)[1] + + @property + def z(self): + """ + The third cartesian coordinate as a `~astropy.units.Quantity`. + """ + return self.view(u.Quantity)[2] + + def to_spherical(self): + """ + Converts to the spherical representation of this point. + + Returns + ------- + r : `~astropy.units.Quantity` + The radial coordinate (in the same units as this `CartesianPoints`). + lat : `~astropy.units.Quantity` + The spherical coordinates latitude. + lon : `~astropy.units.Quantity` + The spherical coordinates longitude. + + """ + from .angles import Latitude, Longitude + + rarr, latarr, lonarr = cartesian_to_spherical(self.x, self.y, self.z) + + r = Distance(rarr, unit=self.unit) + lat = Latitude(latarr, unit=u.radian) + lon = Longitude(lonarr, unit=u.radian) + + return r, lat, lon + + +def _convert_to_and_validate_length_unit(unit, allow_dimensionless=False): + """ + raises UnitsError if not a length unit + """ + try: + unit = u.Unit(unit) + assert (unit.is_equivalent(u.kpc) or + allow_dimensionless and unit == u.dimensionless_unscaled) + except (TypeError, AssertionError): + raise u.UnitsError('Unit "{0}" is not a length type'.format(unit)) + + return unit + +#<------------transformation-related utility functions-----------------> + + +def cartesian_to_spherical(x, y, z): + """ + Converts 3D rectangular cartesian coordinates to spherical polar + coordinates. + + Note that the resulting angles are latitude/longitude or + elevation/azimuthal form. I.e., the origin is along the equator + rather than at the north pole. + + .. note:: + This is a low-level function used internally in + `astropy.coordinates`. It is provided for users if they really + want to use it, but it is recommended that you use the + `astropy.coordinates` coordinate systems. + + Parameters + ---------- + x : scalar or array-like + The first cartesian coordinate. + y : scalar or array-like + The second cartesian coordinate. + z : scalar or array-like + The third cartesian coordinate. + + Returns + ------- + r : float or array + The radial coordinate (in the same units as the inputs). + lat : float or array + The latitude in radians + lon : float or array + The longitude in radians + """ + + xsq = x ** 2 + ysq = y ** 2 + zsq = z ** 2 + + r = (xsq + ysq + zsq) ** 0.5 + s = (xsq + ysq) ** 0.5 + + if np.isscalar(x) and np.isscalar(y) and np.isscalar(z): + lon = math.atan2(y, x) + lat = math.atan2(z, s) + else: + lon = np.arctan2(y, x) + lat = np.arctan2(z, s) + + return r, lat, lon + + +def spherical_to_cartesian(r, lat, lon): + """ + Converts spherical polar coordinates to rectangular cartesian + coordinates. + + Note that the input angles should be in latitude/longitude or + elevation/azimuthal form. I.e., the origin is along the equator + rather than at the north pole. + + .. note:: + This is a low-level function used internally in + `astropy.coordinates`. It is provided for users if they really + want to use it, but it is recommended that you use the + `astropy.coordinates` coordinate systems. + + Parameters + ---------- + r : scalar or array-like + The radial coordinate (in the same units as the inputs). + lat : scalar or array-like + The latitude in radians + lon : scalar or array-like + The longitude in radians + + Returns + ------- + x : float or array + The first cartesian coordinate. + y : float or array + The second cartesian coordinate. + z : float or array + The third cartesian coordinate. + + + """ + + if np.isscalar(r) and np.isscalar(lat) and np.isscalar(lon): + x = r * math.cos(lat) * math.cos(lon) + y = r * math.cos(lat) * math.sin(lon) + z = r * math.sin(lat) + else: + x = r * np.cos(lat) * np.cos(lon) + y = r * np.cos(lat) * np.sin(lon) + z = r * np.sin(lat) + + return x, y, z diff --git a/astropy/coordinates/earth.py b/astropy/coordinates/earth.py new file mode 100644 index 0000000..2667ed8 --- /dev/null +++ b/astropy/coordinates/earth.py @@ -0,0 +1,282 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import numpy as np +from .. import units as u +from ..utils import OrderedDict +from . import Longitude, Latitude + +try: + # Not guaranteed available at setup time. + from ..time import erfa_time +except ImportError: + if not _ASTROPY_SETUP_: + raise + +__all__ = ['EarthLocation'] + +# translation between ellipsoid names and corresponding number used in ERFA +ELLIPSOIDS = OrderedDict([('WGS84', 1), ('GRS80', 2), ('WGS72', 3)]) + + +def _check_ellipsoid(ellipsoid=None, default='WGS84'): + if ellipsoid is None: + ellipsoid = default + if ellipsoid not in ELLIPSOIDS: + raise ValueError('Ellipsoid {0} not among known ones ({1})' + .format(ellipsoid, ELLIPSOIDS.keys())) + return ellipsoid + + +class EarthLocation(u.Quantity): + """ + Location on Earth. + + Initialization is first attempted assuming geocentric (x, y, z) coordinates + are given; if that fails, another attempt is made assuming geodetic + coordinates (longitude, latitude, height above a reference ellipsoid). + Internally, the coordinates are stored as geocentric. + + To ensure a specific type of coordinates is used, use the corresponding + class methods (`from_geocentric` and `from_geodetic`) or initialize the + arguments with names (``x``, ``y``, ``z`` for geocentric; ``lon``, ``lat``, + ``height`` for geodetic). See the class methods for details. + + Notes + ----- + For conversion to and from geodetic coordinates, the ERFA routines + ``gc2gd`` and ``gd2gc`` are used. See https://github.com/liberfa/erfa + """ + + _ellipsoid = 'WGS84' + _location_dtype = np.dtype({'names': ['x', 'y', 'z'], + 'formats': [np.float64]*3}) + _array_dtype = np.dtype((np.float64, (3,))) + + def __new__(cls, *args, **kwargs): + try: + self = cls.from_geocentric(*args, **kwargs) + except (u.UnitsError, TypeError) as exc_geocentric: + try: + self = cls.from_geodetic(*args, **kwargs) + except Exception as exc_geodetic: + raise TypeError('Coordinates could not be parsed as either ' + 'geocentric or geodetic, with respective ' + 'exceptions "{0}" and "{1}"' + .format(exc_geocentric, exc_geodetic)) + return self + + @classmethod + def from_geocentric(cls, x, y, z, unit=None): + """ + Location on Earth, initialized from geocentric coordinates. + + Parameters + ---------- + x, y, z : `~astropy.units.Quantity` or array-like + Cartesian coordinates. If not quantities, ``unit`` should be given. + unit : `~astropy.units.UnitBase` object or None + Physical unit of the coordinate values. If ``x``, ``y``, and/or + ``z`` are quantities, they will be converted to this unit. + + Raises + ------ + astropy.units.UnitsError + If the units on ``x``, ``y``, and ``z`` do not match or an invalid + unit is given. + ValueError + If the shapes of ``x``, ``y``, and ``z`` do not match. + TypeError + If ``x`` is not a `~astropy.units.Quantity` and no unit is given. + """ + if unit is None: + try: + unit = x.unit + except AttributeError: + raise TypeError("Geocentric coordinates should be Quantities " + "unless an explicit unit is given.") + else: + unit = u.Unit(unit) + + if unit.physical_type != 'length': + raise u.UnitsError("Geocentric coordinates should be in " + "units of length.") + + try: + x = u.Quantity(x, unit, copy=False) + y = u.Quantity(y, unit, copy=False) + z = u.Quantity(z, unit, copy=False) + except u.UnitsError: + raise u.UnitsError("Geocentric coordinate units should all be " + "consistent.") + + x, y, z = np.broadcast_arrays(x, y, z) + struc = np.empty(x.shape, cls._location_dtype) + struc['x'], struc['y'], struc['z'] = x, y, z + return super(EarthLocation, cls).__new__(cls, struc, unit, copy=False) + + @classmethod + def from_geodetic(cls, lon, lat, height=0., ellipsoid=None): + """ + Location on Earth, initialized from geodetic coordinates. + + Parameters + ---------- + lon : `~astropy.coordinates.Longitude` or float + Earth East longitude. Can be anything that initialises an + `~astropy.coordinates.Angle` object (if float, in degrees). + lat : `~astropy.coordinates.Latitude` or float + Earth latitude. Can be anything that initialises an + `~astropy.coordinates.Latitude` object (if float, in degrees). + height : `~astropy.units.Quantity` or float, optional + Height above reference ellipsoid (if float, in meters; default: 0). + ellipsoid : str, optional + Name of the reference ellipsoid to use (default: 'WGS84'). + Available ellipoids are: 'WGS84', 'GRS80', 'WGS72'. + + Raises + ------ + astropy.units.UnitsError + If the units on ``lon`` and ``lat`` are inconsistent with angular + ones, or that on ``height`` with a length. + ValueError + If ``lon``, ``lat``, and ``height`` do not have the same shape, or + if ``ellipsoid`` is not recognized as among the ones implemented. + + Notes + ----- + For the conversion to geocentric coordinates, the ERFA routine + ``gd2gc`` is used. See https://github.com/liberfa/erfa + """ + ellipsoid = _check_ellipsoid(ellipsoid, default=cls._ellipsoid) + lon = Longitude(lon, u.degree, wrap_angle=180*u.degree, copy=False) + lat = Latitude(lat, u.degree, copy=False) + # don't convert to m by default, so we can use the height unit below. + if not isinstance(height, u.Quantity): + height = u.Quantity(height, u.m, copy=False) + # convert to float in units required for erfa routine, and ensure + # all broadcast to same shape, and are at least 1-dimensional. + _lon, _lat, _height = np.broadcast_arrays(lon.to(u.radian).value, + lat.to(u.radian).value, + height.to(u.m).value) + # get geocentric coordinates. Have to give one-dimensional array. + xyz = erfa_time.era_gd2gc(ELLIPSOIDS[ellipsoid], _lon.ravel(), + _lat.ravel(), _height.ravel()) + self = xyz.view(cls._location_dtype, cls).reshape(lon.shape) + self._unit = u.meter + self._ellipsoid = ellipsoid + return self.to(height.unit) + + @property + def ellipsoid(self): + """The default ellipsoid used to convert to geodetic coordinates.""" + return self._ellipsoid + + @ellipsoid.setter + def ellipsoid(self, ellipsoid): + self._ellipsoid = _check_ellipsoid(ellipsoid) + + @property + def geodetic(self): + """Convert to geodetic coordinates for the default ellipsoid.""" + return self.to_geodetic() + + def to_geodetic(self, ellipsoid=None): + """Convert to geodetic coordinates. + + Parameters + ---------- + ellipsoid : str, optional + Reference ellipsoid to use. Default is the one the coordinates + were initialized with. Available are: 'WGS84', 'GRS80', 'WGS72' + + Returns + ------- + (lon, lat, height) : tuple + The tuple contains instances of `~astropy.coordinates.Longitude`, + `~astropy.coordinates.Latitude`, and `~astropy.units.Quantity` + + Raises + ------ + ValueError + if ``ellipsoid`` is not recognized as among the ones implemented. + + Notes + ----- + For the conversion to geodetic coordinates, the ERFA routine + ``gc2gd`` is used. See https://github.com/liberfa/erfa + """ + ellipsoid = _check_ellipsoid(ellipsoid, default=self.ellipsoid) + self_array = self.to(u.meter).view(self._array_dtype, np.ndarray) + lon, lat, height = erfa_time.era_gc2gd(ELLIPSOIDS[ellipsoid], + np.atleast_2d(self_array)) + return (Longitude(lon.squeeze() * u.radian, u.degree, + wrap_angle=180.*u.degree), + Latitude(lat.squeeze() * u.radian, u.degree), + u.Quantity(height.squeeze() * u.meter, self.unit)) + + @property + def longitude(self): + """Longitude of the location, for the default ellipsoid.""" + return self.geodetic[0] + + @property + def latitude(self): + """Latitude of the location, for the default ellipsoid.""" + return self.geodetic[1] + + @property + def height(self): + """Height of the location, for the default ellipsoid.""" + return self.geodetic[2] + + # mostly for symmetry with geodedic and to_geodetic. + @property + def geocentric(self): + """Convert to a tuple with X, Y, and Z as quantities""" + return self.to_geocentric() + + def to_geocentric(self): + """Convert to a tuple with X, Y, and Z as quantities""" + return (self.x, self.y, self.z) + + @property + def x(self): + """The X component of the geocentric coordinates.""" + return self['x'] + + @property + def y(self): + """The Y component of the geocentric coordinates.""" + return self['y'] + + @property + def z(self): + """The Z component of the geocentric coordinates.""" + return self['z'] + + def __getitem__(self, item): + result = super(EarthLocation, self).__getitem__(item) + if result.dtype is self.dtype: + return result.view(self.__class__) + else: + return result.view(u.Quantity) + + def __array_finalize__(self, obj): + super(EarthLocation, self).__array_finalize__(obj) + if hasattr(obj, '_ellipsoid'): + self._ellipsoid = obj._ellipsoid + + def __len__(self): + if self.shape == (): + raise IndexError('0-d EarthLocation arrays cannot be indexed') + else: + return super(EarthLocation, self).__len__() + + def to(self, unit, equivalencies=[]): + array_view = self.view(self._array_dtype, u.Quantity) + converted = array_view.to(unit, equivalencies) + return self._new_view(converted.view(self.dtype).reshape(self.shape), + unit) + to.__doc__ = u.Quantity.to.__doc__ diff --git a/astropy/coordinates/earth_orientation.py b/astropy/coordinates/earth_orientation.py new file mode 100644 index 0000000..4f9c3cf --- /dev/null +++ b/astropy/coordinates/earth_orientation.py @@ -0,0 +1,418 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This module contains standard functions for earth orientation, such as +precession and nutation. + +This module is (currently) not intended to be part of the public API, but +is instead primarily for internal use in `coordinates` +""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import numpy as np + +from ..time import Time +from .. import units as u + +jd1950 = Time('B1950', scale='tai').jd +jd2000 = Time('J2000', scale='utc').jd +_asecperrad = u.radian.to(u.arcsec) + + +def eccentricity(jd): + """ + Computes the eccentricity of the Earth's orbit at the requested Julian + Date. + + Parameters + ---------- + jd : scalar or array-like + julian date at which to compute the eccentricity + + returns + ------- + eccentricity : scalar or array + The eccentricity in degrees (or array of eccentricities) + + References + ---------- + * Explanatory Supplement to the Astronomical Almanac: P. Kenneth + Seidelmann (ed), University Science Books (1992). + """ + T = (jd - jd1950) / 36525.0 + + p = (-0.000000126, - 0.00004193, 0.01673011) + + return np.polyval(p, T) + + +def mean_lon_of_perigee(jd): + """ + Computes the mean longitude of perigee of the Earth's orbit at the + requested Julian Date. + + Parameters + ---------- + jd : scalar or array-like + julian date at which to compute the mean longitude of perigee + + returns + ------- + mean_lon_of_perigee : scalar or array + Mean longitude of perigee in degrees (or array of mean longitudes) + + References + ---------- + * Explanatory Supplement to the Astronomical Almanac: P. Kenneth + Seidelmann (ed), University Science Books (1992). + """ + T = (jd - jd1950) / 36525.0 + + p = (0.012, 1.65, 6190.67, 1015489.951) + + return np.polyval(p, T) / 3600. + + +def obliquity(jd, algorithm=2006): + """ + Computes the obliquity of the Earth at the requested Julian Date. + + Parameters + ---------- + jd : scalar or array-like + julian date at which to compute the obliquity + algorithm : int + Year of algorithm based on IAU adoption. Can be 2006, 2000 or 1980. The + 2006 algorithm is mentioned in Circular 179, but the canonical reference + for the IAU adoption is apparently Hilton et al. 06 is composed of the + 1980 algorithm with a precession-rate correction due to the 2000 + precession models, and a description of the 1980 algorithm can be found + in the Explanatory Supplement to the Astronomical Almanac. + + returns + ------- + obliquity : scalar or array + Mean obliquity in degrees (or array of obliquities) + + References + ---------- + * Hilton, J. et al., 2006, Celest.Mech.Dyn.Astron. 94, 351. 2000 + * USNO Circular 179 + * Explanatory Supplement to the Astronomical Almanac: P. Kenneth + Seidelmann (ed), University Science Books (1992). + """ + T = (jd - jd2000) / 36525.0 + + if algorithm == 2006: + p = (-0.0000000434, -0.000000576, 0.00200340, -0.0001831, -46.836769, 84381.406) + corr = 0 + elif algorithm == 2000: + p = (0.001813, -0.00059, -46.8150, 84381.448) + corr = -0.02524 * T + elif algorithm == 1980: + p = (0.001813, -0.00059, -46.8150, 84381.448) + corr = 0 + else: + raise ValueError('invalid algorithm year for computing obliquity') + + return (np.polyval(p, T) + corr) / 3600. + + +# TODO: replace this with SOFA equivalent +def precession_matrix_Capitaine(fromepoch, toepoch): + """ + Computes the precession matrix from one julian epoch to another. + The exact method is based on Capitaine et al. 2003, which should + match the IAU 2006 standard. + + Parameters + ---------- + fromepoch : `~astropy.time.Time` + The epoch to precess from. + toepoch : `~astropy.time.Time` + The epoch to precess to. + + Returns + ------- + pmatrix : 3x3 array + Precession matrix to get from ``fromepoch`` to ``toepoch`` + + References + ---------- + USNO Circular 179 + """ + mat_fromto2000 = _precess_from_J2000_Capitaine(fromepoch.jyear).T + mat_2000toto = _precess_from_J2000_Capitaine(toepoch.jyear) + + return np.dot(mat_2000toto, mat_fromto2000) + + +def _precess_from_J2000_Capitaine(epoch): + """ + Computes the precession matrix from J2000 to the given Julian Epoch. + Expression from from Capitaine et al. 2003 as expressed in the USNO + Circular 179. This should match the IAU 2006 standard from SOFA. + + Parameters + ---------- + epoch : scalar + The epoch as a julian year number (e.g. J2000 is 2000.0) + + """ + from .angles import rotation_matrix + + T = (epoch - 2000.0) / 100.0 + # from USNO circular + pzeta = (-0.0000003173, -0.000005971, 0.01801828, 0.2988499, 2306.083227, 2.650545) + pz = (-0.0000002904, -0.000028596, 0.01826837, 1.0927348, 2306.077181, -2.650545) + ptheta = (-0.0000001274, -0.000007089, -0.04182264, -0.4294934, 2004.191903, 0) + zeta = np.polyval(pzeta, T) / 3600.0 + z = np.polyval(pz, T) / 3600.0 + theta = np.polyval(ptheta, T) / 3600.0 + + return rotation_matrix(-z, 'z') *\ + rotation_matrix(theta, 'y') *\ + rotation_matrix(-zeta, 'z') + + +def _precession_matrix_besselian(epoch1, epoch2): + """ + computes the precession matrix from one Besselian epoch to another using + Newcomb's method. + + ``epoch1`` and ``epoch2`` are in besselian year numbers + """ + from .angles import rotation_matrix + + # tropical years + t1 = (epoch1 - 1850.0) / 1000.0 + t2 = (epoch2 - 1850.0) / 1000.0 + dt = t2 - t1 + + zeta1 = 23035.545 + t1 * 139.720 + 0.060 * t1 * t1 + zeta2 = 30.240 - 0.27 * t1 + zeta3 = 17.995 + pzeta = (zeta3, zeta2, zeta1, 0) + zeta = np.polyval(pzeta, dt) / 3600 + + z1 = 23035.545 + t1 * 139.720 + 0.060 * t1 * t1 + z2 = 109.480 + 0.39 * t1 + z3 = 18.325 + pz = (z3, z2, z1, 0) + z = np.polyval(pz, dt) / 3600 + + theta1 = 20051.12 - 85.29 * t1 - 0.37 * t1 * t1 + theta2 = -42.65 - 0.37 * t1 + theta3 = -41.8 + ptheta = (theta3, theta2, theta1, 0) + theta = np.polyval(ptheta, dt) / 3600 + + return rotation_matrix(-z, 'z') *\ + rotation_matrix(theta, 'y') *\ + rotation_matrix(-zeta, 'z') + + +def _load_nutation_data(datastr, seriestype): + """ + Loads nutation series from data stored in string form. + + Seriestype can be 'lunisolar' or 'planetary' + """ + + if seriestype == 'lunisolar': + dtypes = [('nl', int), + ('nlp', int), + ('nF', int), + ('nD', int), + ('nOm', int), + ('ps', float), + ('pst', float), + ('pc', float), + ('ec', float), + ('ect', float), + ('es', float)] + elif seriestype == 'planetary': + dtypes = [('nl', int), + ('nF', int), + ('nD', int), + ('nOm', int), + ('nme', int), + ('nve', int), + ('nea', int), + ('nma', int), + ('nju', int), + ('nsa', int), + ('nur', int), + ('nne', int), + ('npa', int), + ('sp', int), + ('cp', int), + ('se', int), + ('ce', int)] + else: + raise ValueError('requested invalid nutation series type') + + lines = [l for l in datastr.split('\n') + if not l.startswith('#') if not l.strip() == ''] + + lists = [[] for _ in dtypes] + for l in lines: + for i, e in enumerate(l.split(' ')): + lists[i].append(dtypes[i][1](e)) + return np.rec.fromarrays(lists, names=[e[0] for e in dtypes]) + +_nut_data_00b = """ +#l lprime F D Omega longitude_sin longitude_sin*t longitude_cos obliquity_cos obliquity_cos*t,obliquity_sin + +0 0 0 0 1 -172064161.0 -174666.0 33386.0 92052331.0 9086.0 15377.0 +0 0 2 -2 2 -13170906.0 -1675.0 -13696.0 5730336.0 -3015.0 -4587.0 +0 0 2 0 2 -2276413.0 -234.0 2796.0 978459.0 -485.0 1374.0 +0 0 0 0 2 2074554.0 207.0 -698.0 -897492.0 470.0 -291.0 +0 1 0 0 0 1475877.0 -3633.0 11817.0 73871.0 -184.0 -1924.0 +0 1 2 -2 2 -516821.0 1226.0 -524.0 224386.0 -677.0 -174.0 +1 0 0 0 0 711159.0 73.0 -872.0 -6750.0 0.0 358.0 +0 0 2 0 1 -387298.0 -367.0 380.0 200728.0 18.0 318.0 +1 0 2 0 2 -301461.0 -36.0 816.0 129025.0 -63.0 367.0 +0 -1 2 -2 2 215829.0 -494.0 111.0 -95929.0 299.0 132.0 +0 0 2 -2 1 128227.0 137.0 181.0 -68982.0 -9.0 39.0 +-1 0 2 0 2 123457.0 11.0 19.0 -53311.0 32.0 -4.0 +-1 0 0 2 0 156994.0 10.0 -168.0 -1235.0 0.0 82.0 +1 0 0 0 1 63110.0 63.0 27.0 -33228.0 0.0 -9.0 +-1 0 0 0 1 -57976.0 -63.0 -189.0 31429.0 0.0 -75.0 +-1 0 2 2 2 -59641.0 -11.0 149.0 25543.0 -11.0 66.0 +1 0 2 0 1 -51613.0 -42.0 129.0 26366.0 0.0 78.0 +-2 0 2 0 1 45893.0 50.0 31.0 -24236.0 -10.0 20.0 +0 0 0 2 0 63384.0 11.0 -150.0 -1220.0 0.0 29.0 +0 0 2 2 2 -38571.0 -1.0 158.0 16452.0 -11.0 68.0 +0 -2 2 -2 2 32481.0 0.0 0.0 -13870.0 0.0 0.0 +-2 0 0 2 0 -47722.0 0.0 -18.0 477.0 0.0 -25.0 +2 0 2 0 2 -31046.0 -1.0 131.0 13238.0 -11.0 59.0 +1 0 2 -2 2 28593.0 0.0 -1.0 -12338.0 10.0 -3.0 +-1 0 2 0 1 20441.0 21.0 10.0 -10758.0 0.0 -3.0 +2 0 0 0 0 29243.0 0.0 -74.0 -609.0 0.0 13.0 +0 0 2 0 0 25887.0 0.0 -66.0 -550.0 0.0 11.0 +0 1 0 0 1 -14053.0 -25.0 79.0 8551.0 -2.0 -45.0 +-1 0 0 2 1 15164.0 10.0 11.0 -8001.0 0.0 -1.0 +0 2 2 -2 2 -15794.0 72.0 -16.0 6850.0 -42.0 -5.0 +0 0 -2 2 0 21783.0 0.0 13.0 -167.0 0.0 13.0 +1 0 0 -2 1 -12873.0 -10.0 -37.0 6953.0 0.0 -14.0 +0 -1 0 0 1 -12654.0 11.0 63.0 6415.0 0.0 26.0 +-1 0 2 2 1 -10204.0 0.0 25.0 5222.0 0.0 15.0 +0 2 0 0 0 16707.0 -85.0 -10.0 168.0 -1.0 10.0 +1 0 2 2 2 -7691.0 0.0 44.0 3268.0 0.0 19.0 +-2 0 2 0 0 -11024.0 0.0 -14.0 104.0 0.0 2.0 +0 1 2 0 2 7566.0 -21.0 -11.0 -3250.0 0.0 -5.0 +0 0 2 2 1 -6637.0 -11.0 25.0 3353.0 0.0 14.0 +0 -1 2 0 2 -7141.0 21.0 8.0 3070.0 0.0 4.0 +0 0 0 2 1 -6302.0 -11.0 2.0 3272.0 0.0 4.0 +1 0 2 -2 1 5800.0 10.0 2.0 -3045.0 0.0 -1.0 +2 0 2 -2 2 6443.0 0.0 -7.0 -2768.0 0.0 -4.0 +-2 0 0 2 1 -5774.0 -11.0 -15.0 3041.0 0.0 -5.0 +2 0 2 0 1 -5350.0 0.0 21.0 2695.0 0.0 12.0 +0 -1 2 -2 1 -4752.0 -11.0 -3.0 2719.0 0.0 -3.0 +0 0 0 -2 1 -4940.0 -11.0 -21.0 2720.0 0.0 -9.0 +-1 -1 0 2 0 7350.0 0.0 -8.0 -51.0 0.0 4.0 +2 0 0 -2 1 4065.0 0.0 6.0 -2206.0 0.0 1.0 +1 0 0 2 0 6579.0 0.0 -24.0 -199.0 0.0 2.0 +0 1 2 -2 1 3579.0 0.0 5.0 -1900.0 0.0 1.0 +1 -1 0 0 0 4725.0 0.0 -6.0 -41.0 0.0 3.0 +-2 0 2 0 2 -3075.0 0.0 -2.0 1313.0 0.0 -1.0 +3 0 2 0 2 -2904.0 0.0 15.0 1233.0 0.0 7.0 +0 -1 0 2 0 4348.0 0.0 -10.0 -81.0 0.0 2.0 +1 -1 2 0 2 -2878.0 0.0 8.0 1232.0 0.0 4.0 +0 0 0 1 0 -4230.0 0.0 5.0 -20.0 0.0 -2.0 +-1 -1 2 2 2 -2819.0 0.0 7.0 1207.0 0.0 3.0 +-1 0 2 0 0 -4056.0 0.0 5.0 40.0 0.0 -2.0 +0 -1 2 2 2 -2647.0 0.0 11.0 1129.0 0.0 5.0 +-2 0 0 0 1 -2294.0 0.0 -10.0 1266.0 0.0 -4.0 +1 1 2 0 2 2481.0 0.0 -7.0 -1062.0 0.0 -3.0 +2 0 0 0 1 2179.0 0.0 -2.0 -1129.0 0.0 -2.0 +-1 1 0 1 0 3276.0 0.0 1.0 -9.0 0.0 0.0 +1 1 0 0 0 -3389.0 0.0 5.0 35.0 0.0 -2.0 +1 0 2 0 0 3339.0 0.0 -13.0 -107.0 0.0 1.0 +-1 0 2 -2 1 -1987.0 0.0 -6.0 1073.0 0.0 -2.0 +1 0 0 0 2 -1981.0 0.0 0.0 854.0 0.0 0.0 +-1 0 0 1 0 4026.0 0.0 -353.0 -553.0 0.0 -139.0 +0 0 2 1 2 1660.0 0.0 -5.0 -710.0 0.0 -2.0 +-1 0 2 4 2 -1521.0 0.0 9.0 647.0 0.0 4.0 +-1 1 0 1 1 1314.0 0.0 0.0 -700.0 0.0 0.0 +0 -2 2 -2 1 -1283.0 0.0 0.0 672.0 0.0 0.0 +1 0 2 2 1 -1331.0 0.0 8.0 663.0 0.0 4.0 +-2 0 2 2 2 1383.0 0.0 -2.0 -594.0 0.0 -2.0 +-1 0 0 0 2 1405.0 0.0 4.0 -610.0 0.0 2.0 +1 1 2 -2 2 1290.0 0.0 0.0 -556.0 0.0 0.0 +"""[1:-1] +_nut_data_00b = _load_nutation_data(_nut_data_00b, 'lunisolar') + +# TODO: replace w/SOFA equivalent + + +def nutation_components2000B(jd): + """ + Computes nutation components following the IAU 2000B specification + + Parameters + ---------- + jd : scalar + epoch at which to compute the nutation components as a JD + + Returns + ------- + eps : float + epsilon in radians + dpsi : float + dpsi in radians + deps : float + depsilon in raidans + """ + epsa = np.radians(obliquity(jd, 2000)) + t = (jd - jd2000) / 36525 + + # Fundamental (Delaunay) arguments from Simon et al. (1994) via SOFA + # Mean anomaly of moon + el = ((485868.249036 + 1717915923.2178 * t) % 1296000) / _asecperrad + # Mean anomaly of sun + elp = ((1287104.79305 + 129596581.0481 * t) % 1296000) / _asecperrad + # Mean argument of the latitude of Moon + F = ((335779.526232 + 1739527262.8478 * t) % 1296000) / _asecperrad + # Mean elongation of the Moon from Sun + D = ((1072260.70369 + 1602961601.2090 * t) % 1296000) / _asecperrad + # Mean longitude of the ascending node of Moon + Om = ((450160.398036 + -6962890.5431 * t) % 1296000) / _asecperrad + + # compute nutation series using array loaded from data directory + dat = _nut_data_00b + arg = dat.nl * el + dat.nlp * elp + dat.nF * F + dat.nD * D + dat.nOm * Om + sarg = np.sin(arg) + carg = np.cos(arg) + + p1u_asecperrad = _asecperrad * 1e7 # 0.1 microasrcsecperrad + dpsils = np.sum((dat.ps + dat.pst * t) * sarg + dat.pc * carg) / p1u_asecperrad + depsls = np.sum((dat.ec + dat.ect * t) * carg + dat.es * sarg) / p1u_asecperrad + # fixed offset in place of planetary tersm + m_asecperrad = _asecperrad * 1e3 # milliarcsec per rad + dpsipl = -0.135 / m_asecperrad + depspl = 0.388 / m_asecperrad + + return epsa, dpsils + dpsipl, depsls + depspl # all in radians + + +def nutation_matrix(epoch): + """ + Nutation matrix generated from nutation components. + + Matrix converts from mean coordinate to true coordinate as + r_true = M * r_mean + """ + from .angles import rotation_matrix + + # TODO: implement higher precision 2006/2000A model if requested/needed + epsa, dpsi, deps = nutation_components2000B(epoch.jd) # all in radians + + rot1 = rotation_matrix(-(epsa + deps), 'x', False) + rot2 = rotation_matrix(-dpsi, 'z', False) + rot3 = rotation_matrix(epsa, 'x', False) + + return rot1 * rot2 * rot3 diff --git a/astropy/coordinates/errors.py b/astropy/coordinates/errors.py new file mode 100644 index 0000000..afac457 --- /dev/null +++ b/astropy/coordinates/errors.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +''' This module defines custom errors and exceptions used in astropy.coordinates. +''' +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from ..utils.exceptions import AstropyWarning + +__all__ = ['RangeError', 'BoundsError', 'IllegalHourError', + 'IllegalMinuteError', 'IllegalSecondError', 'ConvertError', + 'IllegalHourWarning', 'IllegalMinuteWarning', 'IllegalSecondWarning'] + + +class RangeError(ValueError): + """ + Raised when some part of an angle is out of its valid range. + """ + + +class BoundsError(RangeError): + """ + Raised when an angle is outside of its user-specified bounds. + """ + + +class IllegalHourError(RangeError): + """ + Raised when an hour value is not in the range [0,24). + + Parameters + ---------- + hour : int, float + + Examples + -------- + + .. code-block:: python + + if not 0 <= hr < 24: + raise IllegalHourError(hour) + """ + def __init__(self, hour): + self.hour = hour + + def __str__(self): + return "An invalid value for 'hours' was found ('{0}'); must be in the range [0,24).".format(self.hour) + + +class IllegalHourWarning(AstropyWarning): + """ + Raised when an hour value is 24. + + Parameters + ---------- + hour : int, float + """ + def __init__(self, hour, alternativeactionstr=None): + self.hour = hour + self.alternativeactionstr = alternativeactionstr + + def __str__(self): + message = "'hour' was found to be '{0}', which is not in range (-24, 24).".format(self.hour) + if self.alternativeactionstr is not None: + message += ' ' + self.alternativeactionstr + return message + + +class IllegalMinuteError(RangeError): + """ + Raised when an minute value is not in the range [0,60]. + + Parameters + ---------- + minute : int, float + + Examples + -------- + + .. code-block:: python + + if not 0 <= min < 60: + raise IllegalMinuteError(minute) + + """ + def __init__(self, minute): + self.minute = minute + + def __str__(self): + return "An invalid value for 'minute' was found ('{0}'); should be in the range [0,60).".format(self.minute) + + +class IllegalMinuteWarning(AstropyWarning): + """ + Raised when a minute value is 60. + + Parameters + ---------- + minute : int, float + """ + def __init__(self, minute, alternativeactionstr=None): + self.minute = minute + self.alternativeactionstr = alternativeactionstr + + def __str__(self): + message = "'minute' was found to be '{0}', which is not in range [0,60).".format(self.minute) + if self.alternativeactionstr is not None: + message += ' ' + self.alternativeactionstr + return message + + +class IllegalSecondError(RangeError): + """ + Raised when an second value (time) is not in the range [0,60]. + + Parameters + ---------- + second : int, float + + Examples + -------- + + .. code-block:: python + + if not 0 <= sec < 60: + raise IllegalSecondError(second) + """ + def __init__(self, second): + self.second = second + + def __str__(self): + return "An invalid value for 'second' was found ('{0}'); should be in the range [0,60).".format(self.second) + + +class IllegalSecondWarning(AstropyWarning): + """ + Raised when a second value is 60. + + Parameters + ---------- + second : int, float + """ + def __init__(self, second, alternativeactionstr=None): + self.second = second + self.alternativeactionstr = alternativeactionstr + + def __str__(self): + message = "'second' was found to be '{0}', which is not in range [0,60).".format(self.second) + if self.alternativeactionstr is not None: + message += ' ' + self.alternativeactionstr + return message + + +# TODO: consider if this should be used to `units`? +class UnitsError(ValueError): + """ + Raised if units are missing or invalid. + """ + + +class ConvertError(Exception): + """ + Raised if a coordinate system cannot be converted to another + """ diff --git a/astropy/coordinates/matching.py b/astropy/coordinates/matching.py new file mode 100644 index 0000000..9f3993d --- /dev/null +++ b/astropy/coordinates/matching.py @@ -0,0 +1,189 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This module contains functions for matching coordinate catalogs. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import numpy as np + +from ..extern import six +from .representation import UnitSphericalRepresentation + +__all__ = ['match_coordinates_3d', 'match_coordinates_sky'] + + +def match_coordinates_3d(matchcoord, catalogcoord, nthneighbor=1, storekdtree='_kdtree_3d'): + """ + Finds the nearest 3-dimensional matches of a coordinate or coordinates in + a set of catalog coordinates. + + This finds the 3-dimensional closest neighbor, which is only different + from the on-sky distance if ``distance`` is set in either ``matchcoord`` + or ``catalogcoord``. + + Parameters + ---------- + matchcoord : `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord` + The coordinate(s) to match to the catalog. + catalogcoord : `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord` + The base catalog in which to search for matches. Typically this will + be a coordinate object that is an array (i.e., + ``catalogcoord.isscalar == False``) + nthneighbor : int, optional + Which closest neighbor to search for. Typically ``1`` is desired here, + as that is correct for matching one set of coordinates to another. + The next likely use case is ``2``, for matching a coordinate catalog + against *itself* (``1`` is inappropriate because each point will find + itself as the closest match). + storekdtree : bool or str, optional + If a string, will store the KD-Tree used for the computation + in the ``catalogcoord``, as an attribute in ``catalogcoord`` with the + provided name. This dramatically speeds up subsequent calls with the + same catalog. If False, the KD-Tree is discarded after use. + + Returns + ------- + idx : integer array + Indecies into ``catalogcoord`` to get the matched points for each + ``matchcoord``. Shape matches ``matchcoord``. + sep2d : `~astropy.coordinates.Angle` + The on-sky separation between the closest match for each ``matchcoord`` + and the ``matchcoord``. Shape matches ``matchcoord``. + dist3d : `~astropy.units.Quantity` + The 3D distance between the closest match for each ``matchcoord`` and + the ``matchcoord``. Shape matches ``matchcoord``. + + Notes + ----- + This function requires `SciPy `_ to be installed + or it will fail. + """ + from warnings import warn + + #without scipy this will immediately fail + from scipy import spatial + try: + KDTree = spatial.cKDTree + except: + warn('C-base KD tree not found, falling back on (much slower) ' + 'python implementation') + KDTree = spatial.KDTree + + if storekdtree is True: # backwards compatibility for pre v0.4 + storekdtree = '_kdtree' + + # figure out where any cached KDTree might be + if isinstance(storekdtree, six.string_types): + kdt = getattr(catalogcoord, storekdtree, None) + if kdt is not None and not isinstance(kdt, KDTree): + raise ValueError('Invalid `storekdtree` string:' + storekdtree) + elif isinstance(storekdtree, KDTree): + kdt = storekdtree + storekdtree = None + elif not storekdtree: + kdt = None + else: + raise ValueError('Invalid `storekdtree` argument:' + + str(storekdtree)) + + if kdt is None: + #need to build the cartesian KD-tree for the catalog + cartxyz = catalogcoord.cartesian.xyz + flatxyz = cartxyz.reshape((3, np.prod(cartxyz.shape) // 3)) + kdt = KDTree(flatxyz.value.T) + + #make sure coordinate systems match + matchcoord = matchcoord.transform_to(catalogcoord) + + #make sure units match + catunit = catalogcoord.cartesian.x.unit + matchxyz = matchcoord.cartesian.xyz.to(catunit) + + matchflatxyz = matchxyz.reshape((3, np.prod(matchxyz.shape) // 3)) + dist, idx = kdt.query(matchflatxyz.T, nthneighbor) + + if nthneighbor > 1: # query gives 1D arrays if k=1, 2D arrays otherwise + dist = dist[:, -1] + idx = idx[:, -1] + + if storekdtree: + #cache the kdtree in `catalogcoord` + setattr(catalogcoord, storekdtree, kdt) + + sep2d = catalogcoord[idx].separation(matchcoord) + return idx.reshape(matchxyz.shape[1:]), sep2d, dist.reshape(matchxyz.shape[1:]) * catunit + + +def match_coordinates_sky(matchcoord, catalogcoord, nthneighbor=1, storekdtree='_kdtree_sky'): + """ + Finds the nearest on-sky matches of a coordinate or coordinates in + a set of catalog coordinates. + + This finds the on-sky closest neighbor, which is only different from the + 3-dimensional match if ``distance`` is set in either ``matchcoord`` + or ``catalogcoord``. + + Parameters + ---------- + matchcoord : `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord` + The coordinate(s) to match to the catalog. + catalogcoord : `~astropy.coordinates.BaseCoordinateFrame` or `~astropy.coordinates.SkyCoord` + The base catalog in which to search for matches. Typically this will + be a coordinate object that is an array (i.e., + ``catalogcoord.isscalar == False``) + nthneighbor : int, optional + Which closest neighbor to search for. Typically ``1`` is desired here, + as that is correct for matching one set of coordinates to another. + The next likely use case is ``2``, for matching a coordinate catalog + against *itself* (``1`` is inappropriate because each point will find + itself as the closest match). + storekdtree : bool or str, optional + If a string, will store the KD-Tree used for the computation + in the ``catalogcoord``, as an attrbute in ``catalogcoord`` with the + provided name. This dramatically speeds up subsequent calls with the + same catalog. If False, the KD-Tree is discarded after use. + + Returns + ------- + idx : integer array + Indecies into ``catalogcoord`` to get the matched points for each + ``matchcoord``. Shape matches ``matchcoord``. + sep2d : `~astropy.coordinates.Angle` + The on-sky separation between the closest match for each + ``matchcoord`` and the ``matchcoord``. Shape matches ``matchcoord``. + dist3d : `~astropy.units.Quantity` + The 3D distance between the closest match for each ``matchcoord`` and + the ``matchcoord``. Shape matches ``matchcoord``. If either + ``matchcoord`` or ``catalogcoord`` don't have a distance, this is the 3D + distance on the unit sphere, rather than a true distance. + + Notes + ----- + This function requires `SciPy `_ to be installed + or it will fail. + """ + + # send to catalog frame + newmatch = matchcoord.transform_to(catalogcoord) + + #strip out distance info + match_urepr = newmatch.data.represent_as(UnitSphericalRepresentation) + newmatch_u = newmatch.realize_frame(match_urepr) + + cat_urepr = catalogcoord.data.represent_as(UnitSphericalRepresentation) + newcat_u = catalogcoord.realize_frame(cat_urepr) + + idx, sep2d, sep3d = match_coordinates_3d(newmatch_u, newcat_u, nthneighbor, storekdtree) + # sep3d is *wrong* above, because the distance information was removed, + # unless one of the catalogs doesn't have a real distance + if not (isinstance(catalogcoord.data, UnitSphericalRepresentation) or + isinstance(newmatch.data, UnitSphericalRepresentation)): + sep3d = catalogcoord[idx].separation_3d(newmatch) + + #update the kdtree on the actual passed-in coordinate + if storekdtree: + setattr(catalogcoord, storekdtree, getattr(newcat_u, storekdtree)) + + return idx, sep2d, sep3d diff --git a/astropy/coordinates/name_resolve.py b/astropy/coordinates/name_resolve.py new file mode 100644 index 0000000..19a552d --- /dev/null +++ b/astropy/coordinates/name_resolve.py @@ -0,0 +1,193 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This module contains convenience functions for getting a coordinate object +for a named object by querying SESAME and getting the first returned result. +Note that this is intended to be a convenience, and is very simple. If you +need precise coordinates for an object you should find the appropriate +reference for that measurement and input the coordinates manually. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +# Standard library +import os +import re +import socket + +# Astropy +from .. import config as _config +from ..extern import six +from ..extern.six.moves import urllib +from .. import units as u +from .sky_coordinate import SkyCoord +from ..utils import data +from ..utils import state + +__all__ = ["get_icrs_coordinates"] + + +class sesame_url(state.ScienceState): + """ + The URL(s) to Sesame's web-queryable database. + """ + _value = ["http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame/", + "http://vizier.cfa.harvard.edu/viz-bin/nph-sesame/"] + + @classmethod + def validate(cls, value): + # TODO: Implement me + return value + + +SESAME_URL = state.ScienceStateAlias( + "0.4", "SESAME_URL", "sesame_url", sesame_url, cfgtype="list") + + +class sesame_database(state.ScienceState): + """ + This specifies the default database that SESAME will query when + using the name resolve mechanism in the coordinates + subpackage. Default is to search all databases, but this can be + 'all', 'simbad', 'ned', or 'vizier'. + """ + _value = 'all' + + @classmethod + def validate(cls, value): + if value not in ['all', 'simbad', 'ned', 'vizier']: + raise ValueError("Unknown database '{0}'".format(value)) + return value + + +SESAME_DATABASE = state.ScienceStateAlias( + "0.4", "SESAME_DATABASE", "sesame_database", sesame_database) + + +NAME_RESOLVE_TIMEOUT = _config.ConfigAlias( + '0.4', "NAME_RESOLVE_TIMEOUT", "remote_timeout", + "astropy.coordinates.name_resolve", "astropy.utils.data") + + +class NameResolveError(Exception): + pass + + +def _parse_response(resp_data): + """ + Given a string response from SESAME, parse out the coordinates by looking + for a line starting with a J, meaning ICRS J2000 coordinates. + + Parameters + ---------- + resp_data : str + The string HTTP response from SESAME. + + Returns + ------- + ra : str + The string Right Ascension parsed from the HTTP response. + dec : str + The string Declination parsed from the HTTP response. + """ + + pattr = re.compile(r"%J\s*([0-9\.]+)\s*([\+\-\.0-9]+)") + matched = pattr.search(resp_data.decode('utf-8')) + + if matched is None: + return None, None + else: + ra, dec = matched.groups() + return ra, dec + + +def get_icrs_coordinates(name): + """ + Retrieve an ICRS object by using an online name resolving service to + retrieve coordinates for the specified name. By default, this will + search all available databases until a match is found. If you would like + to specify the database, use the science state + ``astropy.coordinates.name_resolve.sesame_database``. You can also + specify a list of servers to use for querying Sesame using the science + state ``astropy.coordinates.name_resolve.sesame_url``. This will try + each one in order until a valid response is returned. By default, this + list includes the main Sesame host and a mirror at vizier. The + configuration item `astropy.utils.data.Conf.remote_timeout` controls the + number of seconds to wait for a response from the server before giving + up. + + Parameters + ---------- + name : str + The name of the object to get coordinates for, e.g. ``'M42'``. + + Returns + ------- + coord : `astropy.coordinates.ICRS` object + The object's coordinates in the ICRS frame. + + """ + from .. import conf + + database = sesame_database.get() + # The web API just takes the first letter of the database name + db = database.upper()[0] + + # Make sure we don't have duplicates in the url list + urls = [] + domains = [] + for url in sesame_url.get(): + domain = urllib.parse.urlparse(url).netloc + + # Check for duplicates + if domain not in domains: + domains.append(domain) + + # Add the query to the end of the url, add to url list + fmt_url = os.path.join(url, "{db}?{name}") + fmt_url = fmt_url.format(name=urllib.parse.quote(name), db=db) + urls.append(fmt_url) + + for url in urls: + try: + # Retrieve ascii name resolve data from CDS + resp = urllib.request.urlopen(url, timeout=data.conf.remote_timeout) + resp_data = resp.read() + break + except urllib.error.URLError as e: + # This catches a timeout error, see: + # http://stackoverflow.com/questions/2712524/handling-urllib2s-timeout-python + if isinstance(e.reason, socket.timeout): + # If it was a timeout, try with the next URL + continue + else: + raise NameResolveError( + "Unable to retrieve coordinates for name '{0}'; " + "connection timed out".format(name)) + except socket.timeout: + # There are some cases where urllib2 does not catch socket.timeout + # especially while receiving response data on an already previously + # working request + raise NameResolveError( + "Unable to retrieve coordinates for name '{0}'; connection " + "timed out".format(name)) + + # All Sesame URL's timed out... + else: + raise NameResolveError("All Sesame queries timed out. Unable to " + "retrieve coordinates.") + + ra, dec = _parse_response(resp_data) + + if ra is None and dec is None: + if db == "A": + err = "Unable to find coordinates for name '{0}'".format(name) + else: + err = "Unable to find coordinates for name '{0}' in database {1}"\ + .format(name, database) + + raise NameResolveError(err) + + # Return SkyCoord object + sc = SkyCoord(ra=ra, dec=dec, unit=(u.degree, u.degree), frame='icrs') + return sc diff --git a/astropy/coordinates/representation.py b/astropy/coordinates/representation.py new file mode 100644 index 0000000..ee95a83 --- /dev/null +++ b/astropy/coordinates/representation.py @@ -0,0 +1,707 @@ +""" +In this module, we define the coordinate representation classes, which are +used to represent low-level cartesian, spherical, cylindrical, and other +coordinates. +""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import abc + +import numpy as np +import astropy.units as u + +from .angles import Angle, Longitude, Latitude +from .distances import Distance +from ..extern import six +from ..utils import OrderedDict + +__all__ = ["BaseRepresentation", "CartesianRepresentation", + "SphericalRepresentation", "UnitSphericalRepresentation", + "PhysicsSphericalRepresentation", "CylindricalRepresentation"] + +# Module-level dict mapping representation string alias names to class. +# This is populated by the metaclass init so all representation classes +# get registered automatically. +REPRESENTATION_CLASSES = {} + + +def broadcast_quantity(*args): + """ + A Quantity-aware version of np.broadcast_arrays + """ + new_arrays = np.broadcast_arrays(*args) + new_quantities = [] + for i in range(len(new_arrays)): + new_quantities.append(args[i]._new_view(new_arrays[i])) + return tuple(new_quantities) + + +class MetaBaseRepresentation(type): + def __init__(cls, name, bases, dct): + super(MetaBaseRepresentation, cls).__init__(name, bases, dct) + + if name != 'BaseRepresentation' and 'attr_classes' not in dct: + raise NotImplementedError('Representations must have an ' + '"attr_classes" class attribute.') + + # Register representation name (except for BaseRepresentation) + if cls.__name__ == 'BaseRepresentation': + return + + REPRESENTATION_CLASSES[cls.get_name()] = cls + + +@six.add_metaclass(MetaBaseRepresentation) +class BaseRepresentation(object): + """ + Base Representation object, for representing a point in a 3D coordinate + system. + + Notes + ----- + All representation classes should subclass this base representation + class. All subclasses should then define a ``to_cartesian`` method and a + ``from_cartesian`` class method. By default, transformations are done via + the cartesian system, but classes that want to define a smarter + transformation path can overload the ``represent_as`` method. + Furthermore, all classes must define an ``attr_classes`` attribute, an + `~collections.OrderedDict` which maps component names to the class that + creates them. They can also define a `recommended_units` dictionary, which + maps component names to the units they are best presented to users in. Note + that frame classes may override this with their own preferred units. + """ + + recommended_units = {} # subclasses can override + + def represent_as(self, other_class): + if other_class == self.__class__: + return self + else: + # The default is to convert via cartesian coordinates + return other_class.from_cartesian(self.to_cartesian()) + + @classmethod + def from_representation(cls, representation): + return representation.represent_as(cls) + + # Should be replaced by abstractclassmethod once we support only Python 3 + @abc.abstractmethod + def from_cartesian(self): + raise NotImplementedError() + + @abc.abstractmethod + def to_cartesian(self): + raise NotImplementedError() + + @property + def components(self): + """A tuple with the in-order names of the coordinate components""" + return tuple(self.attr_classes) + + @classmethod + def get_name(cls): + name = cls.__name__.lower() + if name.endswith('representation'): + name = name[:-14] + return name + + def __getitem__(self, view): + return self.__class__(*[getattr(self, component)[view] + for component in self.components]) + + def __len__(self): + if self.isscalar: + raise TypeError("'{cls}' object with scalar values have no " + "len()".format(cls=self.__class__.__name__)) + else: + return len(getattr(self, self.components[0])) + + def __nonzero__(self): # Py 2.x + return self.isscalar or len(self) != 0 + + def __bool__(self): # Py 3.x + return self.isscalar or len(self) != 0 + + @property + def shape(self): + return getattr(self, self.components[0]).shape + + @property + def isscalar(self): + return getattr(self, self.components[0]).isscalar + + @property + def _values(self): + """Turn the coordinates into a record array with the coordinate values. + + The record array fields will have the component names. + """ + allcomp = np.array([getattr(self, component).value + for component in self.components]) + dtype = np.dtype([(str(component), np.float) + for component in self.components]) + return (np.rollaxis(allcomp, 0, len(allcomp.shape)) + .copy().view(dtype).squeeze()) + + @property + def _units(self): + """Return a dictionary with the units of the coordinate components.""" + return dict([(component, getattr(self, component).unit) + for component in self.components]) + + @property + def _unitstr(self): + units_set = set(self._units.values()) + if len(units_set) == 1: + unitstr = units_set.pop().to_string() + else: + unitstr = '({0})'.format( + ', '.join([self._units[component].to_string() + for component in self.components])) + return unitstr + + def __str__(self): + if self.isscalar and len(set(self._units.values())) > 1: + return '({0})'.format(', '.join( + ['{0}'.format(getattr(self, component)) + for component in self.components])) + else: + return '{0} {1:s}'.format(self._values, self._unitstr) + + def __repr__(self): + if self.isscalar: + return '<{0} {1}>'.format( + self.__class__.__name__, + ', '.join(['{0}={1}'.format(component, + getattr(self, component)) + for component in self.components])) + + else: + prefixstr = ' ' + arrstr = np.array2string(self._values, separator=', ', + prefix=prefixstr) + + return '<{0} ({1}) in {2:s}\n{3}{4}>'.format( + self.__class__.__name__, ', '.join(self.components), + self._unitstr, prefixstr, arrstr) + + +class CartesianRepresentation(BaseRepresentation): + """ + Representation of points in 3D cartesian coordinates. + + Parameters + ---------- + x, y, z : `~astropy.units.Quantity` + The x, y, and z coordinates of the point(s). If ``x``, ``y``, and + ``z`` have different shapes, they should be broadcastable. + + copy : bool, optional + If True arrays will be copied rather than referenced. + """ + + attr_classes = OrderedDict([('x', u.Quantity), + ('y', u.Quantity), + ('z', u.Quantity)]) + + def __init__(self, x, y=None, z=None, copy=True): + + if y is None and z is None: + x, y, z = x + elif (y is None and z is not None) or (y is not None and z is None): + raise ValueError("x, y, and z are required to instantiate CartesianRepresentation") + + if not isinstance(x, self.attr_classes['x']): + raise TypeError('x should be a {0}'.format(self.attr_classes['x'].__name__)) + + if not isinstance(y, self.attr_classes['x']): + raise TypeError('y should be a {0}'.format(self.attr_classes['y'].__name__)) + + if not isinstance(z, self.attr_classes['x']): + raise TypeError('z should be a {0}'.format(self.attr_classes['z'].__name__)) + + x = self.attr_classes['x'](x, copy=copy) + y = self.attr_classes['y'](y, copy=copy) + z = self.attr_classes['z'](z, copy=copy) + + if not (x.unit.physical_type == y.unit.physical_type == z.unit.physical_type): + raise u.UnitsError("x, y, and z should have matching physical types") + + try: + x, y, z = broadcast_quantity(x, y, z) + except ValueError: + raise ValueError("Input parameters x, y, and z cannot be broadcast") + + self._x = x + self._y = y + self._z = z + + @property + def x(self): + """ + The x component of the point(s). + """ + return self._x + + @property + def y(self): + """ + The y component of the point(s). + """ + return self._y + + @property + def z(self): + """ + The z component of the point(s). + """ + return self._z + + @property + def xyz(self): + return u.Quantity((self._x, self._y, self._z)) + + @classmethod + def from_cartesian(cls, other): + return other + + def to_cartesian(self): + return self + + +class SphericalRepresentation(BaseRepresentation): + """ + Representation of points in 3D spherical coordinates. + + Parameters + ---------- + lon, lat : `~astropy.units.Quantity` + The longitude and latitude of the point(s), in angular units. The + latitude should be between -90 and 90 degrees, and the longitude will + be wrapped to an angle between 0 and 360 degrees. These can also be + instances of `~astropy.coordinates.Angle`, + `~astropy.coordinates.Longitude`, or `~astropy.coordinates.Latitude`. + + distance : `~astropy.units.Quantity` + The distance to the point(s). If the distance is a length, it is + passed to the :class:`~astropy.coordinates.Distance` class, otherwise + it is passed to the :class:`~astropy.units.Quantity` class. + + copy : bool, optional + If True arrays will be copied rather than referenced. + """ + + attr_classes = OrderedDict([('lon', Longitude), + ('lat', Latitude), + ('distance', u.Quantity)]) + recommended_units = {'lon': u.deg, 'lat': u.deg} + + def __init__(self, lon, lat, distance, copy=True): + + if not isinstance(lon, u.Quantity) or isinstance(lon, Latitude): + raise TypeError('lon should be a Quantity, Angle, or Longitude') + + if not isinstance(lat, u.Quantity) or isinstance(lat, Longitude): + raise TypeError('lat should be a Quantity, Angle, or Latitude') + + # Let the Longitude and Latitude classes deal with e.g. parsing + lon = self.attr_classes['lon'](lon, copy=copy) + lat = self.attr_classes['lat'](lat, copy=copy) + + distance = self.attr_classes['distance'](distance, copy=copy) + if distance.unit.physical_type == 'length': + distance = distance.view(Distance) + + try: + lon, lat, distance = broadcast_quantity(lon, lat, distance) + except ValueError: + raise ValueError("Input parameters lon, lat, and distance cannot be broadcast") + + self._lon = lon + self._lat = lat + self._distance = distance + + @property + def lon(self): + """ + The longitude of the point(s). + """ + return self._lon + + @property + def lat(self): + """ + The latitude of the point(s). + """ + return self._lat + + @property + def distance(self): + """ + The distance from the origin to the point(s). + """ + return self._distance + + def represent_as(self, other_class): + # Take a short cut if the other clsss is a spherical representation + if other_class is PhysicsSphericalRepresentation: + return PhysicsSphericalRepresentation(phi=self.lon, + theta=90 * u.deg - self.lat, + r=self.distance) + elif other_class is UnitSphericalRepresentation: + return UnitSphericalRepresentation(lon=self.lon, lat=self.lat) + else: + return super(SphericalRepresentation, self).represent_as(other_class) + + def to_cartesian(self): + """ + Converts spherical polar coordinates to 3D rectangular cartesian + coordinates. + """ + + # We need to convert Distance to Quantity to allow negative values. + if isinstance(self.distance, Distance): + d = self.distance.view(u.Quantity) + else: + d = self.distance + + x = d * np.cos(self.lat) * np.cos(self.lon) + y = d * np.cos(self.lat) * np.sin(self.lon) + z = d * np.sin(self.lat) + + return CartesianRepresentation(x=x, y=y, z=z) + + @classmethod + def from_cartesian(cls, cart): + """ + Converts 3D rectangular cartesian coordinates to spherical polar + coordinates. + """ + + s = np.hypot(cart.x, cart.y) + r = np.hypot(s, cart.z) + + lon = np.arctan2(cart.y, cart.x) + lat = np.arctan2(cart.z, s) + + return cls(lon=lon, lat=lat, distance=r) + + +class UnitSphericalRepresentation(BaseRepresentation): + """ + Representation of points on a unit sphere. + + Parameters + ---------- + lon, lat : `~astropy.units.Quantity` or str + The longitude and latitude of the point(s), in angular units. The + latitude should be between -90 and 90 degrees, and the longitude will + be wrapped to an angle between 0 and 360 degrees. These can also be + instances of `~astropy.coordinates.Angle`, + `~astropy.coordinates.Longitude`, or `~astropy.coordinates.Latitude`. + + copy : bool, optional + If True arrays will be copied rather than referenced. + """ + + attr_classes = OrderedDict([('lon', Longitude), + ('lat', Latitude)]) + recommended_units = {'lon': u.deg, 'lat': u.deg} + + def __init__(self, lon, lat, copy=True): + + if not isinstance(lon, u.Quantity) or isinstance(lon, Latitude): + raise TypeError('lon should be a Quantity, Angle, or Longitude') + + if not isinstance(lat, u.Quantity) or isinstance(lat, Longitude): + raise TypeError('lat should be a Quantity, Angle, or Latitude') + # Let the Longitude and Latitude classes deal with e.g. parsing + lon = self.attr_classes['lon'](lon, copy=copy) + lat = self.attr_classes['lat'](lat, copy=copy) + + try: + lon, lat = broadcast_quantity(lon, lat) + except ValueError: + raise ValueError("Input parameters lon and lat cannot be broadcast") + + self._lon = lon + self._lat = lat + + @property + def lon(self): + """ + The longitude of the point(s). + """ + return self._lon + + @property + def lat(self): + """ + The latitude of the point(s). + """ + return self._lat + + # TODO: implement represent_as for efficient transformations + + def to_cartesian(self): + """ + Converts spherical polar coordinates to 3D rectangular cartesian + coordinates. + """ + + x = u.one * np.cos(self.lat) * np.cos(self.lon) + y = u.one * np.cos(self.lat) * np.sin(self.lon) + z = u.one * np.sin(self.lat) + + return CartesianRepresentation(x=x, y=y, z=z) + + @classmethod + def from_cartesian(cls, cart): + """ + Converts 3D rectangular cartesian coordinates to spherical polar + coordinates. + """ + + s = np.hypot(cart.x, cart.y) + + lon = np.arctan2(cart.y, cart.x) + lat = np.arctan2(cart.z, s) + + return cls(lon=lon, lat=lat) + + def represent_as(self, other_class): + # Take a short cut if the other clsss is a spherical representation + if other_class is PhysicsSphericalRepresentation: + return PhysicsSphericalRepresentation(phi=self.lon, + theta=90 * u.deg - self.lat, + r=1.0) + elif other_class is SphericalRepresentation: + return SphericalRepresentation(lon=self.lon, lat=self.lat, distance=1.0) + else: + return super(UnitSphericalRepresentation, self).represent_as(other_class) + + +class PhysicsSphericalRepresentation(BaseRepresentation): + """ + Representation of points in 3D spherical coordinates (using the physics + convention of using ``phi`` and ``theta`` for azimuth and inclination + from the pole). + + Parameters + ---------- + phi, theta : `~astropy.units.Quantity` or str + The azimuth and inclination of the point(s), in angular units. The + inclination should be between 0 and 180 degrees, and the azimuth will + be wrapped to an angle between 0 and 360 degrees. These can also be + instances of `~astropy.coordinates.Angle`. If ``copy`` is False, `phi` + will be changed inplace if it is not between 0 and 360 degrees. + + r : `~astropy.units.Quantity` + The distance to the point(s). If the distance is a length, it is + passed to the :class:`~astropy.coordinates.Distance` class, otherwise + it is passed to the :class:`~astropy.units.Quantity` class. + + copy : bool, optional + If True arrays will be copied rather than referenced. + """ + + attr_classes = OrderedDict([('phi', Angle), + ('theta', Angle), + ('r', u.Quantity)]) + recommended_units = {'phi': u.deg, 'theta': u.deg} + + def __init__(self, phi, theta, r, copy=True): + + if not isinstance(phi, u.Quantity) or isinstance(phi, Latitude): + raise TypeError('phi should be a Quantity or Angle') + + if not isinstance(theta, u.Quantity) or isinstance(theta, Longitude): + raise TypeError('phi should be a Quantity or Angle') + + # Let the Longitude and Latitude classes deal with e.g. parsing + phi = self.attr_classes['phi'](phi, copy=copy) + theta = self.attr_classes['theta'](theta, copy=copy) + + # Wrap/validate phi/theta + if copy: + phi = phi.wrap_at(360 * u.deg) + else: + # necessary because the above version of `wrap_at` has to be a copy + phi.wrap_at(360 * u.deg, inplace=True) + if np.any(theta.value < 0.) or np.any(theta.value > 180.): + raise ValueError('Inclination angle(s) must be within 0 deg <= angle <= 180 deg, ' + 'got {0}'.format(theta.to(u.degree))) + + r = self.attr_classes['r'](r, copy=copy) + if r.unit.physical_type == 'length': + r = r.view(Distance) + + try: + phi, theta, r = broadcast_quantity(phi, theta, r) + except ValueError: + raise ValueError("Input parameters phi, theta, and r cannot be broadcast") + + self._phi = phi + self._theta = theta + self._distance = r + + @property + def phi(self): + """ + The azimuth of the point(s). + """ + return self._phi + + @property + def theta(self): + """ + The elevation of the point(s). + """ + return self._theta + + @property + def r(self): + """ + The distance from the origin to the point(s). + """ + return self._distance + + def represent_as(self, other_class): + # Take a short cut if the other clsss is a spherical representation + if other_class is SphericalRepresentation: + return SphericalRepresentation(lon=self.phi, + lat=90 * u.deg - self.theta, + distance=self.r) + elif other_class is UnitSphericalRepresentation: + return UnitSphericalRepresentation(lon=self.phi, + lat=90 * u.deg - self.theta) + else: + return super(PhysicsSphericalRepresentation, self).represent_as(other_class) + + def to_cartesian(self): + """ + Converts spherical polar coordinates to 3D rectangular cartesian + coordinates. + """ + + # We need to convert Distance to Quantity to allow negative values. + if isinstance(self.r, Distance): + d = self.r.view(u.Quantity) + else: + d = self.r + + x = d * np.sin(self.theta) * np.cos(self.phi) + y = d * np.sin(self.theta) * np.sin(self.phi) + z = d * np.cos(self.theta) + + return CartesianRepresentation(x=x, y=y, z=z) + + @classmethod + def from_cartesian(cls, cart): + """ + Converts 3D rectangular cartesian coordinates to spherical polar + coordinates. + """ + + s = np.hypot(cart.x, cart.y) + r = np.hypot(s, cart.z) + + phi = np.arctan2(cart.y, cart.x) + theta = np.arctan2(s, cart.z) + + return cls(phi=phi, theta=theta, r=r) + + +class CylindricalRepresentation(BaseRepresentation): + """ + Representation of points in 3D cylindrical coordinates. + + Parameters + ---------- + rho : `~astropy.units.Quantity` + The distance from the z axis to the point(s). + + phi : `~astropy.units.Quantity` + The azimuth of the point(s), in angular units, which will be wrapped + to an angle between 0 and 360 degrees. This can also be instances of + `~astropy.coordinates.Angle`, + + z : `~astropy.units.Quantity` + The z coordinate(s) of the point(s) + + copy : bool, optional + If True arrays will be copied rather than referenced. + """ + + attr_classes = OrderedDict([('rho', u.Quantity), + ('phi', Angle), + ('z', u.Quantity)]) + recommended_units = {'phi': u.deg} + + def __init__(self, rho, phi, z, copy=True): + + if not isinstance(phi, u.Quantity) or isinstance(phi, Latitude): + raise TypeError('phi should be a Quantity or Angle') + + rho = self.attr_classes['rho'](rho, copy=copy) + phi = self.attr_classes['phi'](phi, copy=copy) + z = self.attr_classes['z'](z, copy=copy) + + if not (rho.unit.physical_type == z.unit.physical_type): + raise u.UnitsError("rho and z should have matching physical types") + + try: + rho, phi, z = broadcast_quantity(rho, phi, z) + except ValueError: + raise ValueError("Input parameters rho, phi, and z cannot be broadcast") + + self._rho = rho + self._phi = phi + self._z = z + + @property + def rho(self): + """ + The distance of the point(s) from the z-axis. + """ + return self._rho + + @property + def phi(self): + """ + The azimuth of the point(s). + """ + return self._phi + + @property + def z(self): + """ + The height of the point(s). + """ + return self._z + + @classmethod + def from_cartesian(cls, cart): + """ + Converts 3D rectangular cartesian coordinates to cylindrical polar + coordinates. + """ + + rho = np.hypot(cart.x, cart.y) + phi = np.arctan2(cart.y, cart.x) + z = cart.z + + return cls(rho=rho, phi=phi, z=z) + + def to_cartesian(self): + """ + Converts cylindrical polar coordinates to 3D rectangular cartesian + coordinates. + """ + x = self.rho * np.cos(self.phi) + y = self.rho * np.sin(self.phi) + z = self.z + + return CartesianRepresentation(x=x, y=y, z=z) diff --git a/astropy/coordinates/setup_package.py b/astropy/coordinates/setup_package.py new file mode 100644 index 0000000..a0d0164 --- /dev/null +++ b/astropy/coordinates/setup_package.py @@ -0,0 +1,9 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + + +def get_package_data(): + return {'astropy.coordinates.tests.accuracy': ['*.csv']} + + +def requires_2to3(): + return False diff --git a/astropy/coordinates/sky_coordinate.py b/astropy/coordinates/sky_coordinate.py new file mode 100644 index 0000000..94960b9 --- /dev/null +++ b/astropy/coordinates/sky_coordinate.py @@ -0,0 +1,986 @@ +from __future__ import (absolute_import, division, print_function, unicode_literals) + +import collections + +import numpy as np + +from ..utils.compat.misc import override__dir__ +from ..extern import six +from ..extern.six.moves import zip +from ..units import Unit, IrreducibleUnit +from .. import units as u + +from .distances import Distance +from .baseframe import BaseCoordinateFrame, frame_transform_graph, GenericFrame, _get_repr_cls +from .builtin_frames import ICRS +from .representation import (BaseRepresentation, SphericalRepresentation, + UnitSphericalRepresentation) + +__all__ = ['SkyCoord'] + + +# Define a convenience mapping. This is used like a module constants +# but is actually dynamically evaluated. +def FRAME_ATTR_NAMES_SET(): + """Set of all possible frame-specific attributes""" + out = set() + for frame_cls in frame_transform_graph.frame_set: + for attr in frame_cls.get_frame_attr_names().keys(): + out.add(attr) + return out + + +class SkyCoord(object): + """High-level object providing a flexible interface for celestial coordinate + representation, manipulation, and transformation between systems. + + The `SkyCoord` class accepts a wide variety of inputs for initialization. + At a minimum these must provide one or more celestial coordinate values + with unambiguous units. Typically one also specifies the coordinate + frame, though this is not required. The general pattern is for spherical + representations is:: + + SkyCoord(COORD, [FRAME], keyword_args ...) + SkyCoord(LON, LAT, [FRAME], keyword_args ...) + SkyCoord(LON, LAT, [DISTANCE], frame=FRAME, unit=UNIT, keyword_args ...) + SkyCoord([FRAME], =LON, =LAT, keyword_args ...) + + It is also possible to input coordinate values in other representations + such as cartesian or cylindrical. In this case one includes the keyword + argument ``representation='cartesian'`` (for example) along with data in + ``x``, ``y``, and ``z``. + + Examples + -------- + The examples below illustrate common ways of initializing a `SkyCoord` + object. For a complete description of the allowed syntax see the + full coordinates documentation. First some imports:: + + >>> from astropy.coordinates import SkyCoord # High-level coordinates + >>> from astropy.coordinates import ICRS, Galactic, FK4, FK5 # Low-level frames + >>> from astropy.coordinates import Angle, Latitude, Longitude # Angles + >>> import astropy.units as u + + The coordinate values and frame specification can now be provided using + positional and keyword arguments:: + + >>> c = SkyCoord(10, 20, unit="deg") # defaults to ICRS frame + >>> c = SkyCoord([1, 2, 3], [-30, 45, 8], "icrs", unit="deg") # 3 coords + + >>> coords = ["1:12:43.2 +1:12:43", "1 12 43.2 +1 12 43"] + >>> c = SkyCoord(coords, FK4, unit=(u.deg, u.hourangle), obstime="J1992.21") + + >>> c = SkyCoord("1h12m43.2s +1d12m43s", Galactic) # Units from string + >>> c = SkyCoord("galactic", l="1h12m43.2s", b="+1d12m43s") + + >>> ra = Longitude([1, 2, 3], unit=u.deg) # Could also use Angle + >>> dec = np.array([4.5, 5.2, 6.3]) * u.deg # Astropy Quantity + >>> c = SkyCoord(ra, dec, frame='icrs') + >>> c = SkyCoord(ICRS, ra=ra, dec=dec, obstime='2001-01-02T12:34:56') + + >>> c = FK4(1 * u.deg, 2 * u.deg) # Uses defaults for obstime, equinox + >>> c = SkyCoord(c, obstime='J2010.11', equinox='B1965') # Override defaults + + >>> c = SkyCoord(w=0, u=1, v=2, unit='kpc', frame='galactic', representation='cartesian') + + As shown, the frame can be a `~astropy.coordinates.BaseCoordinateFrame` + class or the corresponding string alias. The frame classes that are built in + to astropy are `ICRS`, `FK5`, `FK4`, `FK4NoETerms`, and `Galactic`. + The string aliases are simply lower-case versions of the class name, and + allow for creating a `SkyCoord` object and transforming frames without + explicitly importing the frame classes. + + Parameters + ---------- + frame : `~astropy.coordinates.BaseCoordinateFrame` class or string, optional + Type of coordinate frame this `SkyCoord` should represent. Defaults to + to ICRS if not given or given as None. + unit : `~astropy.units.Unit`, string, or tuple of :class:`~astropy.units.Unit` or str, optional + Units for supplied ``LON`` and ``LAT`` values, respectively. If + only one unit is supplied then it applies to both ``LON`` and + ``LAT``. + obstime : valid `~astropy.time.Time` initializer, optional + Time of observation + equinox : valid `~astropy.time.Time` initializer, optional + Coordinate frame equinox + representation : str or Representation class + Specifies the representation, e.g. 'spherical', 'cartesian', or + 'cylindrical'. This affects the positional args and other keyword args + which must correspond to the given representation. + **keyword_args + Other keyword arguments as applicable for user-defined coordinate frames. + Common options include: + + ra, dec : valid `~astropy.coordinates.Angle` initializer, optional + RA and Dec for frames where ``ra`` and ``dec`` are keys in the + frame's ``representation_component_names``, including `ICRS`, + `FK5`, `FK4`, and `FK4NoETerms`. + l, b : valid `~astropy.coordinates.Angle` initializer, optional + Galactic ``l`` and ``b`` for for frames where ``l`` and ``b`` are + keys in the frame's ``representation_component_names``, including + the `Galactic` frame. + x, y, z : float or `~astropy.units.Quantity`, optional + Cartesian coordinates values + w, u, v : float or `~astropy.units.Quantity`, optional + Cartesian coordinates values for the Galactic frame. + """ + + def __init__(self, *args, **kwargs): + + # Parse the args and kwargs to assemble a sanitized and validated + # kwargs dict for initializing attributes for this object and for + # creating the internal self._sky_coord_frame object + args = list(args) # Make it mutable + kwargs = self._parse_inputs(args, kwargs) + + # Set internal versions of object state attributes + for attr in FRAME_ATTR_NAMES_SET(): + setattr(self, '_' + attr, kwargs[attr]) + + frame = kwargs['frame'] + coord_kwargs = {} + if 'representation' in kwargs: + coord_kwargs['representation'] = _get_repr_cls(kwargs['representation']) + for attr, value in kwargs.items(): + if value is not None and (attr in frame.representation_component_names + or attr in frame.get_frame_attr_names()): + coord_kwargs[attr] = value + + # Finally make the internal coordinate object. + self._sky_coord_frame = frame.__class__(**coord_kwargs) + + if not self._sky_coord_frame.has_data: + raise ValueError('Cannot create a SkyCoord without data') + + @property + def frame(self): + return self._sky_coord_frame + + @property + def representation(self): + return self.frame.representation + + @representation.setter + def representation(self, value): + self.frame.representation = value + + def __len__(self): + return len(self.frame) + + def __nonzero__(self): # Py 2.x + return self.frame.__nonzero__() + + def __bool__(self): # Py 3.x + return self.frame.__bool__() + + def __getitem__(self, item): + self_frame = self._sky_coord_frame + try: + # First turn `self` into a mockup of the thing we want - we can copy + # this to get all the right attributes + self._sky_coord_frame = self_frame[item] + return SkyCoord(self) + finally: + # now put back the right frame in self + self._sky_coord_frame = self_frame + + def _parse_inputs(self, args, kwargs): + """ + Assemble a validated and sanitized keyword args dict for instantiating a + SkyCoord and coordinate object from the provided `args`, and `kwargs`. + """ + valid_kwargs = {} + + # Put the SkyCoord attributes like frame, equinox, obstime, location + # into valid_kwargs dict. `Frame` could come from args or kwargs, so + # set valid_kwargs['frame'] accordingly. The others must be specified + # by keyword args or else get a None default. Pop them off of kwargs + # in the process. + frame = valid_kwargs['frame'] = _get_frame(args, kwargs) + if 'representation' in kwargs: + valid_kwargs['representation'] = _get_repr_cls(kwargs.pop('representation')) + + for attr in FRAME_ATTR_NAMES_SET(): + valid_kwargs[attr] = kwargs.pop(attr, None) + + # Get units + units = _get_units(args, kwargs) + + # Grab any frame-specific attr names like `ra` or `l` or `distance` from kwargs + # and migrate to valid_kwargs. + valid_kwargs.update(_get_representation_attrs(frame, units, kwargs)) + + # Error if anything is still left in kwargs + if kwargs: + raise ValueError('Unrecognized keyword argument(s) {0}' + .format(', '.join("'{0}'".format(key) for key in kwargs))) + + # Finally deal with the unnamed args. This figures out what the arg[0] is + # and returns a dict with appropriate key/values for initializing frame class. + if args: + if len(args) == 1: + # One arg which must be a coordinate. In this case + # coord_kwargs will contain keys like 'ra', 'dec', 'distance' + # along with any frame attributes like equinox or obstime which + # were explicitly specified in the coordinate object (i.e. non-default). + coord_kwargs = _parse_coordinate_arg(args[0], frame, units) + + elif len(args) <= 3: + frame_attr_names = frame.representation_component_names.keys() + repr_attr_names = frame.representation_component_names.values() + coord_kwargs = {} + for arg, frame_attr_name, repr_attr_name, unit in zip(args, frame_attr_names, + repr_attr_names, units): + attr_class = frame.representation.attr_classes[repr_attr_name] + coord_kwargs[frame_attr_name] = attr_class(arg, unit=unit) + + else: + raise ValueError('Must supply no more than three positional arguments, got {}' + .format(len(args))) + + # Copy the coord_kwargs into the final valid_kwargs dict. For each + # of the coord_kwargs ensure that there is no conflict with a value + # specified by the user in the original kwargs. + for attr, coord_value in coord_kwargs.items(): + if (attr in valid_kwargs + and valid_kwargs[attr] is not None + and valid_kwargs[attr] != coord_value): + raise ValueError("Coordinate attribute '{0}'={1!r} conflicts with " + "keyword argument '{0}'={2!r}" + .format(attr, coord_value, valid_kwargs[attr])) + valid_kwargs[attr] = coord_value + + return valid_kwargs + + def transform_to(self, frame): + """ + Transform this coordinate to a new frame. + + Parameters + ---------- + frame : str or `BaseCoordinateFrame` class / instance or `SkyCoord` instance + The frame to transform this coordinate into. + + Returns + ------- + coord : `SkyCoord` + A new object with this coordinate represented in the `frame` frame. + + Raises + ------ + ValueError + If there is no possible transformation route. + """ + from astropy.coordinates.errors import ConvertError + + frame_kwargs = {} + + # Frame name (string) or frame class? Coerce into an instance. + try: + frame = _get_frame_class(frame)() + except: + pass + + if isinstance(frame, SkyCoord): + frame = frame.frame # Change to underlying coord frame instance + + if isinstance(frame, BaseCoordinateFrame): + new_frame_cls = frame.__class__ + + # Set the keyword args for making a new frame instance for the + # transform. If the supplied frame instance has a non-default + # value set then use that, otherwise use the self attribute value + # if it is not None. + for attr in FRAME_ATTR_NAMES_SET(): + self_val = getattr(self, attr, None) + frame_val = getattr(frame, attr, None) + if (frame_val is not None and + attr not in frame._attr_names_with_defaults): + frame_kwargs[attr] = frame_val + elif self_val is not None: + frame_kwargs[attr] = self_val + else: + raise ValueError('Transform `frame` must be a frame name, class, or instance') + + # Get the composite transform to the new frame + trans = frame_transform_graph.get_transform(self.frame.__class__, new_frame_cls) + if trans is None: + raise ConvertError('Cannot transform from {0} to {1}' + .format(self.frame.__class__, new_frame_cls)) + + # Make a generic frame which will accept all the frame kwargs that + # are provided and allow for transforming through intermediate frames + # which may require one or more of those kwargs. + generic_frame = GenericFrame(frame_kwargs) + + # Do the transformation, returning a coordinate frame of the desired + # final type (not generic). + new_coord = trans(self.frame, generic_frame) + + # Finally make the new SkyCoord object from the `new_coord` and + # remaining frame_kwargs that are not frame_attributes in `new_coord`. + # We could remove overlaps here, but the init code is set up to accept + # overlaps as long as the values are identical (which they must be). + return self.__class__(new_coord, **frame_kwargs) + + def __getattr__(self, attr): + """ + Overrides getattr to return coordinates that this can be transformed + to, based on the alias attr in the master transform graph. + """ + + if self.frame.name == attr: + return self # Should this be a deepcopy of self? + + # Anything in the set of all possible frame_attr_names is handled + # here. If the attr is relevant for the current frame then delegate + # to self.frame otherwise get it from self._. + if attr in FRAME_ATTR_NAMES_SET(): + if attr in self.frame.get_frame_attr_names(): + return getattr(self.frame, attr) + else: + return getattr(self, '_' + attr) + + # Some attributes might not fall in the above category but still + # are available through self._sky_coord_frame. + if not attr.startswith('_') and hasattr(self._sky_coord_frame, attr): + return getattr(self._sky_coord_frame, attr) + + # Try to interpret as a new frame for transforming. + frame_cls = frame_transform_graph.lookup_name(attr) + if frame_cls is not None and self.frame.is_transformable_to(frame_cls): + return self.transform_to(attr) + + # Fail + raise AttributeError("'{0}' object has no attribute '{1}'" + .format(self.__class__.__name__, attr)) + + @override__dir__ + def __dir__(self): + """ + Override the builtin `dir` behavior to include: + - Transforms available by aliases + - Attribute / methods of the underlying self.frame object + """ + + # determine the aliases that this can be transformed to. + dir_values = set() + for name in frame_transform_graph.get_names(): + frame_cls = frame_transform_graph.lookup_name(name) + if self.frame.is_transformable_to(frame_cls): + dir_values.add(name) + + # Add public attributes of self.frame + dir_values.update(set(attr for attr in dir(self.frame) if not attr.startswith('_'))) + + # Add all possible frame attributes + dir_values.update(FRAME_ATTR_NAMES_SET()) + + return dir_values + + def __repr__(self): + clsnm = self.__class__.__name__ + coonm = self.frame.__class__.__name__ + + s = '<{clsnm} ({coonm})'.format(**locals()) + crepr = repr(self.frame) + return s + crepr[crepr.index(':'):] + + def to_string(self, style='decimal', **kwargs): + """ + A string representation of the coordinates. + + The default styles definitions are:: + + 'decimal': 'lat': {'decimal': True, 'unit': "deg"} + 'lon': {'decimal': True, 'unit': "deg"} + 'dms': 'lat': {'unit': "deg"} + 'lon': {'unit': "deg"} + 'hmsdms': 'lat': {'alwayssign': True, 'pad': True, 'unit': "deg"} + 'lon': {'pad': True, 'unit': "hour"} + + See :meth:`~astropy.coordinates.Angle.to_string` for details and + keyword arguments (the two angles forming the coordinates are are + both :class:`~astropy.coordinates.Angle` instances). Keyword + arguments have precedence over the style defaults and are passed + to :meth:`~astropy.coordinates.Angle.to_string`. + + Parameters + ---------- + style : {'hmsdms', 'dms', 'decimal'} + The formatting specification to use. These encode the three most + common ways to represent coordinates. The default is `decimal`. + kwargs + Keyword args passed to :meth:`~astropy.coordinates.Angle.to_string`. + """ + + sph_coord = self.frame.represent_as(SphericalRepresentation) + + styles = {'hmsdms': {'lonargs': {'unit': u.hour, 'pad': True}, + 'latargs': {'unit': u.degree, 'pad': True, 'alwayssign': True}}, + 'dms': {'lonargs': {'unit': u.degree}, + 'latargs': {'unit': u.degree}}, + 'decimal': {'lonargs': {'unit': u.degree, 'decimal': True}, + 'latargs': {'unit': u.degree, 'decimal': True}} + } + + lonargs = {} + latargs = {} + + if style in styles: + lonargs.update(styles[style]['lonargs']) + latargs.update(styles[style]['latargs']) + else: + raise ValueError('Invalid style. Valid options are: {0}'.format(",".join(styles))) + + lonargs.update(kwargs) + latargs.update(kwargs) + + if np.isscalar(sph_coord.lon.value): + coord_string = (sph_coord.lon.to_string(**lonargs) + + " " + + sph_coord.lat.to_string(**latargs)) + else: + coord_string = [] + for lonangle, latangle in zip(sph_coord.lon, sph_coord.lat): + coord_string += [(lonangle.to_string(**lonargs) + + " " + + latangle.to_string(**latargs))] + + return coord_string + + # High-level convinience methods + def separation(self, other): + """ + Computes on-sky separation between this coordinate and another. + + Parameters + ---------- + other : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame` + The coordinate to get the separation to. + + Returns + ------- + sep : `~astropy.coordinates.Angle` + The on-sky separation between this and the ``other`` coordinate. + + Notes + ----- + The separation is calculated using the Vincenty formula, which + is stable at all locations, including poles and antipodes [1]_. + + .. [1] http://en.wikipedia.org/wiki/Great-circle_distance + + """ + from . import Angle + from .angle_utilities import angular_separation + + if isinstance(other, SkyCoord): + self_in_other_system = self.transform_to(other.frame) + elif isinstance(other, BaseCoordinateFrame) and other.has_data: + # it's a frame + self_in_other_system = self.transform_to(other) + else: + raise TypeError('Can only get separation to another SkyCoord or a ' + 'coordinate frame with data') + + lon1 = self_in_other_system.spherical.lon + lat1 = self_in_other_system.spherical.lat + lon2 = other.spherical.lon + lat2 = other.spherical.lat + + # Get the separation as a Quantity, convert to Angle in degrees + sep = angular_separation(lon1, lat1, lon2, lat2) + return Angle(sep, unit=u.degree) + + def separation_3d(self, other): + """ + Computes three dimensional separation between this coordinate + and another. + + Parameters + ---------- + other : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame` + The coordinate to get the separation to. + + Returns + ------- + sep : `~astropy.coordinates.Distance` + The real-space distance between these two coordinates. + + Raises + ------ + ValueError + If this or the other coordinate do not have distances. + """ + + if isinstance(other, SkyCoord): + self_in_other_system = self.transform_to(other.frame) + elif isinstance(other, BaseCoordinateFrame) and other.has_data: + # it's a frame + self_in_other_system = self.transform_to(other) + else: + raise TypeError('Can only get separation to another SkyCoord or a ' + 'coordinate frame with data') + + if self.data.__class__ == UnitSphericalRepresentation: + raise ValueError('This object does not have a distance; cannot ' + 'compute 3d separation.') + if other.data.__class__ == UnitSphericalRepresentation: + raise ValueError('The other object does not have a distance; ' + 'cannot compute 3d separation.') + + dx = self_in_other_system.cartesian.x - other.cartesian.x + dy = self_in_other_system.cartesian.y - other.cartesian.y + dz = self_in_other_system.cartesian.z - other.cartesian.z + + distval = (dx.value ** 2 + dy.value ** 2 + dz.value ** 2) ** 0.5 + return Distance(distval, dx.unit) + + def match_to_catalog_sky(self, catalogcoord, nthneighbor=1): + """ + Finds the nearest on-sky matches of this coordinate in a set of + catalog coordinates. + + Parameters + ---------- + catalogcoord : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame` + The base catalog in which to search for matches. Typically this + will be a coordinate object that is an array (i.e., + ``catalogcoord.isscalar == False``) + nthneighbor : int, optional + Which closest neighbor to search for. Typically ``1`` is + desired here, as that is correct for matching one set of + coordinates to another. The next likely use case is ``2``, + for matching a coordinate catalog against *itself* (``1`` + is inappropriate because each point will find itself as the + closest match). + + Returns + ------- + idx : integer array + Indices into ``catalogcoord`` to get the matched points for + each of this object's coordinates. Shape matches this + object. + sep2d : `~astropy.coordinates.Angle` + The on-sky separation between the closest match for each + element in this object in ``catalogcoord``. Shape matches + this object. + dist3d : `~astropy.units.Quantity` + The 3D distance between the closest match for each element + in this object in ``catalogcoord``. Shape matches this + object. + + Notes + ----- + This method requires `SciPy `_ to be + installed or it will fail. + + See Also + -------- + astropy.coordinates.match_coordinates_sky + """ + from .matching import match_coordinates_sky + + if (isinstance(catalogcoord, (SkyCoord, BaseCoordinateFrame)) + and catalogcoord.has_data): + self_in_catalog_frame = self.transform_to(catalogcoord) + else: + raise TypeError('Can only get separation to another SkyCoord or a ' + 'coordinate frame with data') + + res = match_coordinates_sky(self_in_catalog_frame, catalogcoord, + nthneighbor=nthneighbor, + storekdtree='_kdtree_sky') + return res + + def match_to_catalog_3d(self, catalogcoord, nthneighbor=1): + """ + Finds the nearest 3-dimensional matches of this coordinate to a set + of catalog coordinates. + + This finds the 3-dimensional closest neighbor, which is only different + from the on-sky distance if ``distance`` is set in this object or the + ``catalogcoord`` object. + + Parameters + ---------- + catalogcoord : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame` + The base catalog in which to search for matches. Typically this + will be a coordinate object that is an array (i.e., + ``catalogcoord.isscalar == False``) + nthneighbor : int, optional + Which closest neighbor to search for. Typically ``1`` is + desired here, as that is correct for matching one set of + coordinates to another. The next likely use case is + ``2``, for matching a coordinate catalog against *itself* + (``1`` is inappropriate because each point will find + itself as the closest match). + + Returns + ------- + idx : integer array + Indices into ``catalogcoord`` to get the matched points for + each of this object's coordinates. Shape matches this + object. + sep2d : `~astropy.coordinates.Angle` + The on-sky separation between the closest match for each + element in this object in ``catalogcoord``. Shape matches + this object. + dist3d : `~astropy.units.Quantity` + The 3D distance between the closest match for each element + in this object in ``catalogcoord``. Shape matches this + object. + + Notes + ----- + This method requires `SciPy `_ to be + installed or it will fail. + + See Also + -------- + astropy.coordinates.match_coordinates_3d + """ + from .matching import match_coordinates_3d + + if (isinstance(catalogcoord, (SkyCoord, BaseCoordinateFrame)) + and catalogcoord.has_data): + self_in_catalog_frame = self.transform_to(catalogcoord) + else: + raise TypeError('Can only get separation to another SkyCoord or a ' + 'coordinate frame with data') + + res = match_coordinates_3d(self_in_catalog_frame, catalogcoord, + nthneighbor=nthneighbor, + storekdtree='_kdtree_3d') + + return res + + def position_angle(self, other): + """ + Computes the on-sky position angle (East of North) between this + `SkyCoord` and another. + + Parameters + ---------- + other : `SkyCoord` + The other coordinate to compute the position angle to. It is + treated as the "head" of the vector of the position angle. + + Returns + ------- + pa : `~astropy.coordinates.Angle` + The (positive) position angle of the vector pointing from ``self`` + to ``other``. If either ``self`` or ``other`` contain arrays, this + will be an array following the appropriate `numpy` broadcasting + rules. + + Examples + -------- + + >>> c1 = SkyCoord(0*u.deg, 0*u.deg) + >>> c2 = SkyCoord(1*u.deg, 0*u.deg) + >>> c1.position_angle(c2).degree + 90.0 + >>> c3 = SkyCoord(1*u.deg, 1*u.deg) + >>> c1.position_angle(c3).degree # doctest: +FLOAT_CMP + 44.995636455344844 + """ + from . import angle_utilities + + if self.frame.name == other.frame.name: + other_in_self_frame = other + else: + other_in_self_frame = other.frame.transform_to(self.frame) + + slat = self.represent_as(UnitSphericalRepresentation).lat + slon = self.represent_as(UnitSphericalRepresentation).lon + olat = other_in_self_frame.represent_as(UnitSphericalRepresentation).lat + olon = other_in_self_frame.represent_as(UnitSphericalRepresentation).lon + + return angle_utilities.position_angle(slon, slat, olon, olat) + + # Name resolve + @classmethod + def from_name(cls, name, frame='icrs'): + """ + Given a name, query the CDS name resolver to attempt to retrieve + coordinate information for that object. The search database, sesame + url, and query timeout can be set through configuration items in + ``astropy.coordinates.name_resolve`` -- see docstring for + `~astropy.coordinates.get_icrs_coordinates` for more + information. + + Parameters + ---------- + name : str + The name of the object to get coordinates for, e.g. ``'M42'``. + frame : str or `BaseCoordinateFrame` class or instance + The frame to transform the object to. + + Returns + ------- + coord : SkyCoord + Instance of the SkyCoord class. + """ + + from .name_resolve import get_icrs_coordinates + + icrs_coord = get_icrs_coordinates(name) + icrs_sky_coord = cls(icrs_coord) + if frame in ('icrs', icrs_coord.__class__): + return icrs_sky_coord + else: + return icrs_sky_coord.transform_to(frame) + +# <----------------Private utility functions below here-------------------------> + + +def _get_frame_class(frame): + """ + Get a frame class from the input `frame`, which could be a frame name + string, or frame class. + """ + import inspect + + if isinstance(frame, six.string_types): + frame_names = frame_transform_graph.get_names() + if frame not in frame_names: + raise ValueError('Coordinate frame {0} not in allowed values {1}' + .format(frame, sorted(frame_names))) + frame_cls = frame_transform_graph.lookup_name(frame) + + elif inspect.isclass(frame) and issubclass(frame, BaseCoordinateFrame): + frame_cls = frame + + else: + raise ValueError('Coordinate frame must be a frame name or frame class') + + return frame_cls + + +def _get_frame(args, kwargs): + """ + Determine the coordinate frame from input SkyCoord args and kwargs. This + modifies args and/or kwargs in-place to remove the item that provided + `frame`. It also infers the frame if an input coordinate was provided and + checks for conflicts. + + This allows for frame to be specified as a string like 'icrs' or a frame + class like ICRS, but not an instance ICRS() since the latter could have + non-default representation attributes which would require a three-way merge. + """ + frame = kwargs.pop('frame', None) + + if frame is not None: + # Frame was provided as kwarg so validate and coerce into corresponding frame. + frame_cls = _get_frame_class(frame) + frame_specified_explicitly = True + else: + # Look for the frame in args + for arg in args: + try: + frame_cls = _get_frame_class(arg) + frame_specified_explicitly = True + except ValueError: + pass + else: + args.remove(arg) + break + else: + # Not in args nor kwargs - default to icrs + frame_cls = ICRS + frame_specified_explicitly = False + + # Check that the new frame doesn't conflict with existing coordinate frame + # if a coordinate is supplied in the args list. If the frame still had not + # been set by this point and a coordinate was supplied, then use that frame. + for arg in args: + coord_frame_cls = None + if isinstance(arg, BaseCoordinateFrame): + coord_frame_cls = arg.__class__ + elif isinstance(arg, SkyCoord): + coord_frame_cls = arg.frame.__class__ + + if coord_frame_cls is not None: + if not frame_specified_explicitly: + frame_cls = coord_frame_cls + elif frame_cls is not coord_frame_cls: + raise ValueError("Cannot override frame='{0}' of input coordinate with " + "new frame='{1}'. Instead transform the coordinate." + .format(coord_frame_cls.__name__, frame_cls.__name__)) + + if 'representation' in kwargs: + frame = frame_cls(representation=_get_repr_cls(kwargs['representation'])) + else: + frame = frame_cls() + + return frame + + +def _get_units(args, kwargs): + """ + Get the longitude unit and latitude unit from kwargs. Possible enhancement + is to allow input from args as well. + """ + if 'unit' not in kwargs: + units = [None, None, None] + + else: + units = kwargs.pop('unit') + + if isinstance(units, six.string_types): + units = [x.strip() for x in units.split(',')] + # Allow for input like unit='deg' or unit='m' + if len(units) == 1: + units = [units[0], units[0], units[0]] + elif isinstance(units, (Unit, IrreducibleUnit)): + units = [units, units, units] + + try: + units = [(Unit(x) if x else None) for x in units] + units.extend(None for x in range(3 - len(units))) + if len(units) > 3: + raise ValueError() + except: + raise ValueError('Unit keyword must have one to three unit values as ' + 'tuple or comma-separated string') + + return units + + +def _parse_coordinate_arg(coords, frame, units): + """ + Single unnamed arg supplied. This must be: + - Coordinate frame with data + - Representation + - List or tuple of: + - String which splits into two values + - Iterable with two values + """ + is_scalar = False # Differentiate between scalar and list input + valid_kwargs = {} # Returned dict of lon, lat, and distance (optional) + + frame_attr_names = frame.representation_component_names.keys() + repr_attr_names = frame.representation_component_names.values() + repr_attr_classes = frame.representation.attr_classes.values() + n_attr_names = len(repr_attr_names) + + # Turn a single string into a list of strings for convenience + if isinstance(coords, six.string_types): + is_scalar = True + coords = [coords] + + if isinstance(coords, (SkyCoord, BaseCoordinateFrame)): + # Note that during parsing of `frame` it is checked that any coordinate + # args have the same frame as explicitly supplied, so don't worry here. + + if not coords.has_data: + raise ValueError('Cannot initialize from a frame without coordinate data') + + data = coords.data.represent_as(frame.representation) + + values = [] # List of values corresponding to representation attrs + for repr_attr_name in repr_attr_names: + # If coords did not have an explicit distance then don't include in initializers. + if (isinstance(coords.data, UnitSphericalRepresentation) and + repr_attr_name == 'distance'): + continue + + # Get the value from `data` in the eventual representation + values.append(getattr(data, repr_attr_name)) + + for attr in FRAME_ATTR_NAMES_SET(): + value = getattr(coords, attr, None) + use_value = (isinstance(coords, SkyCoord) + or attr not in coords._attr_names_with_defaults) + if use_value and value is not None: + valid_kwargs[attr] = value + + elif isinstance(coords, BaseRepresentation): + data = coords.represent_as(frame.representation) + values = [getattr(data, repr_attr_name) for repr_attr_name in repr_attr_names] + + elif (isinstance(coords, np.ndarray) and coords.dtype.kind in 'if' + and coords.ndim == 2 and coords.shape[1] <= 3): + # 2-d array of coordinate values. Handle specially for efficiency. + values = coords.transpose() # Iterates over repr attrs + + elif isinstance(coords, (collections.Sequence, np.ndarray)): + # Handles generic list-like input. + + # First turn into a list of lists like [[v1_0, v2_0, v3_0], ... [v1_N, v2_N, v3_N]] + vals = [] + for ii, coord in enumerate(coords): + if isinstance(coord, six.string_types): + coord1 = coord.split() + if len(coord1) == 6: + coord1 = (' '.join(coord1[:3]), ' '.join(coord1[3:])) + coord = coord1 + + vals.append(coord) # This assumes coord is a sequence at this point + + # Do some basic validation of the list elements: all have a length and all + # lengths the same + try: + n_coords = sorted(set(len(x) for x in vals)) + except: + raise ValueError('One or more elements of input sequence does not have a length') + + if len(n_coords) > 1: + raise ValueError('Input coordinate values must have same number of elements, found {0}' + .format(n_coords)) + n_coords = n_coords[0] + + # Must have no more coord inputs than representation attributes + if n_coords > n_attr_names: + raise ValueError('Input coordinates have {0} values but {1} representation ' + 'only accepts {2}' + .format(n_coords, frame.representation.get_name(), n_attr_names)) + + # Now transpose vals to get [(v1_0 .. v1_N), (v2_0 .. v2_N), (v3_0 .. v3_N)] + # (ok since we know it is exactly rectangular). (Note: can't just use zip(*values) + # because Longitude et al distinguishes list from tuple so [a1, a2, ..] is needed + # while (a1, a2, ..) doesn't work. + values = [list(x) for x in zip(*vals)] + + if is_scalar: + values = [x[0] for x in values] + + else: + raise ValueError('Cannot parse coordinates from first argument') + + # Finally we have a list of values from which to create the keyword args + # for the frame initialization. Validate by running through the appropriate + # class initializer and supply units (which might be None). + try: + for frame_attr_name, repr_attr_class, value, unit in zip( + frame_attr_names, repr_attr_classes, values, units): + valid_kwargs[frame_attr_name] = repr_attr_class(value, unit=unit) + except Exception as err: + raise ValueError('Cannot parse longitude and latitude from first argument: {0}' + .format(err)) + + return valid_kwargs + + +def _get_representation_attrs(frame, units, kwargs): + """ + Find instances of the "representation attributes" for specifying data + for this frame. Pop them off of kwargs, run through the appropriate class + constructor (to validate and apply unit), and put into the output + valid_kwargs. "Representation attributes" are the frame-specific aliases + for the underlying data values in the representation, e.g. "ra" for "lon" + for many equatorial spherical representations, or "w" for "x" in the + cartesian representation of Galactic. + """ + frame_attr_names = frame.representation_component_names.keys() + repr_attr_classes = frame.representation.attr_classes.values() + + valid_kwargs = {} + for frame_attr_name, repr_attr_class, unit in zip(frame_attr_names, repr_attr_classes, units): + value = kwargs.pop(frame_attr_name, None) + if value is not None: + valid_kwargs[frame_attr_name] = repr_attr_class(value, unit=unit) + + return valid_kwargs diff --git a/astropy/coordinates/tests/__init__.py b/astropy/coordinates/tests/__init__.py new file mode 100644 index 0000000..800d82e --- /dev/null +++ b/astropy/coordinates/tests/__init__.py @@ -0,0 +1,2 @@ +from __future__ import (absolute_import, division, print_function, + unicode_literals) diff --git a/astropy/coordinates/tests/accuracy/__init__.py b/astropy/coordinates/tests/accuracy/__init__.py new file mode 100644 index 0000000..d9dfd79 --- /dev/null +++ b/astropy/coordinates/tests/accuracy/__init__.py @@ -0,0 +1,4 @@ +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +N_ACCURACY_TESTS = 10 # the number of samples to use per accuracy test diff --git a/astropy/coordinates/tests/accuracy/fk4_no_e_fk4.csv b/astropy/coordinates/tests/accuracy/fk4_no_e_fk4.csv new file mode 100644 index 0000000..89ede4c --- /dev/null +++ b/astropy/coordinates/tests/accuracy/fk4_no_e_fk4.csv @@ -0,0 +1,202 @@ +# This file was generated with the ref_fk4_no_e_fk4.py script, and the reference values were computed using AST +obstime,ra_in,dec_in,ra_fk4ne,dec_fk4ne,ra_fk4,dec_fk4 +B1995.95,334.661793414,43.9385116594,334.661871722,43.9384643913,334.661715106,43.9385589276 +B1954.56,113.895199649,-14.1109832563,113.895104206,-14.1109806856,113.895295093,-14.110985827 +B1953.55,66.2107722038,-7.76265420193,66.2106936357,-7.76263900837,66.2108507719,-7.76266939548 +B1970.69,73.6417002791,41.7006137481,73.6415874825,41.7005905459,73.6418130758,41.7006369502 +B1960.78,204.381010469,-14.9357743223,204.381033022,-14.935790469,204.380987917,-14.9357581756 +B1975.98,214.396093073,-66.7648451487,214.39618819,-66.7649221332,214.395997956,-66.7647681643 +B1977.93,347.225227105,6.27744217753,347.225265767,6.27744057158,347.225188443,6.27744378347 +B1973.69,235.143754874,-5.59566003897,235.143821166,-5.59565879904,235.143688582,-5.59566127889 +B1960.79,269.606389512,26.7823112195,269.6064937,26.7823268289,269.606285325,26.78229561 +B1961.97,235.285153507,-14.0695156888,235.285221697,-14.0695245442,235.285085317,-14.0695068334 +B1960.84,269.177331338,42.9472695107,269.177458208,42.9472886864,269.177204468,42.947250335 +B1982.78,346.070424986,-3.51848810713,346.070465234,-3.51847491299,346.070384739,-3.51850130129 +B1992.32,3.01978725896,7.19732176646,3.0198007213,7.19731786183,3.0197737966,7.1973256711 +B1996.52,38.3199756112,18.8080489808,38.3199297604,18.8080292742,38.320021462,18.8080686874 +B1990.02,107.533336957,-4.33088623215,107.533242366,-4.3308791254,107.533431548,-4.33089333889 +B1984.04,236.30802591,14.3162535375,236.308095417,14.316277761,236.307956402,14.316229314 +B1960.36,291.532518915,-33.7960784017,291.532631247,-33.7960622584,291.532406582,-33.7960945449 +B1987.08,313.983328941,27.7572327639,313.983419024,27.757215788,313.983238857,27.7572497397 +B1984.85,347.273135054,-13.6880685538,347.273174533,-13.6880403026,347.273095575,-13.688096805 +B1969.09,260.526724891,-37.6134342267,260.526837065,-37.6134483095,260.526612717,-37.6134201437 +B1992.51,231.291118043,-27.2371455509,231.291186922,-27.2371716878,231.291049163,-27.237119414 +B1976.41,258.283303492,-30.1025933842,258.283404615,-30.1026049901,258.28320237,-30.1025817782 +B1994.65,168.335642599,-44.084769302,168.33559145,-44.0848244927,168.335693748,-44.0847141113 +B1991.03,117.210483914,32.8708634152,117.210375337,32.8708843641,117.210592491,32.8708424662 +B1961.43,158.272058119,-29.286471988,158.271999107,-29.2865040826,158.27211713,-29.2864398934 +B1991.03,262.688069789,-48.1516431413,262.688204769,-48.1516601921,262.687934809,-48.1516260902 +B1956.93,357.845250924,19.2890677934,357.845273996,19.2890447616,357.845227852,19.2890908252 +B1974.12,243.674536239,-10.0431678136,243.67461278,-10.0431700653,243.674459697,-10.0431655619 +B1957.44,284.696106425,19.6051067047,284.696206986,19.6051121836,284.696005864,19.6051012256 +B1972.41,61.5291328053,18.6403709997,61.5290555992,18.640359185,61.5292100114,18.6403828144 +B1983.30,9.66573928438,-22.9075078717,9.66574187976,-22.9074636315,9.66573668899,-22.9075521118 +B1989.45,288.133287813,-36.6947385674,288.1334053,-36.6947252717,288.133170326,-36.694751863 +B1983.10,325.340113758,-33.7758802174,325.340195579,-33.7758368156,325.340031937,-33.7759236192 +B1985.58,8.88343575454,-49.4693354042,8.88344142656,-49.4692581619,8.88343008249,-49.4694126467 +B1994.40,177.029034641,-67.7755279684,177.028973591,-67.7756101942,177.02909569,-67.7754457425 +B1957.08,189.451860246,-68.7071945134,189.451852687,-68.707280034,189.451867805,-68.7071089929 +B1957.38,214.691763751,-32.6160600699,214.691808834,-32.6161002775,214.691718668,-32.6160198625 +B1966.30,18.7047162369,-32.9080620608,18.7047012927,-32.9080042868,18.7047311812,-32.9081198349 +B1951.59,322.232230099,14.4669345738,322.232303942,14.4669266585,322.232156257,14.466942489 +B1984.39,262.175824918,51.7319974933,262.175969881,51.7320265851,262.175679954,51.7319684013 +B1988.24,294.6060041,34.0181871087,294.606115453,34.0181812889,294.605892748,34.0181929283 +B1967.50,180.08019102,26.2892216009,180.080170768,26.2892699746,180.080211273,26.2891732273 +B1980.80,291.668187169,-22.2789167174,291.668288006,-22.2789027838,291.668086332,-22.2789306509 +B1997.92,34.548669268,-15.8924906144,34.5486300111,-15.8924591395,34.548708525,-15.8925220893 +B1964.55,78.8220157436,-37.4332268082,78.8219051397,-37.4331986299,78.8221263475,-37.4332549865 +B1984.33,93.1388621771,60.5731416456,93.1386708523,60.5731340793,93.139053502,60.5731492117 +B1952.11,168.518071423,7.09229333513,168.51803468,7.09231202586,168.518108166,7.09227464443 +B1953.13,165.374352937,39.3890686842,165.374299611,39.3891290726,165.374406263,39.3890082959 +B1990.72,255.423520875,-17.5881075751,255.423610608,-17.5881124458,255.423431143,-17.5881027044 +B1971.83,64.0990821181,36.8289797648,64.098987426,36.8289518646,64.0991768103,36.829007665 +B1969.60,191.321958369,-52.3532066605,191.321958947,-52.3532769701,191.321957792,-52.3531363511 +B1966.53,60.3872023631,25.1025882655,60.3871229238,25.1025691776,60.3872818026,25.1026073533 +B1972.88,276.773010626,56.6051138031,276.773182582,56.6051241599,276.772838671,56.6051034461 +B1991.77,334.141397682,37.3852087993,334.141469519,37.3851690556,334.141325844,37.3852485429 +B1973.34,219.417716878,-20.2290328911,219.417764848,-20.2290543437,219.417668907,-20.2290114386 +B1971.06,54.0660580808,-29.3264933861,54.0659838918,-29.3264524474,54.06613227,-29.3265343247 +B1978.54,176.26561333,-0.572718169429,176.265589013,-0.572711155523,176.265637647,-0.572725183324 +B1986.95,135.84418338,-9.94938261687,135.844104187,-9.94938414897,135.844262573,-9.94938108476 +B1952.75,305.496508312,-8.63421746611,305.496595751,-8.63420374088,305.496420873,-8.63423119132 +B1981.21,327.995002307,-58.3471659896,327.995125925,-58.3471028456,327.994878689,-58.3472291335 +B1981.05,138.185539617,11.9337947187,138.185462216,11.9338143115,138.185617017,11.9337751259 +B1950.06,113.578525223,29.6301583121,113.578418602,29.6301753387,113.578631843,29.6301412853 +B1980.14,204.621895006,36.5235009134,204.621922605,36.5235622135,204.621867408,36.5234396134 +B1952.01,67.6144926088,-13.7094836718,67.6144111325,-13.7094635522,67.6145740851,-13.7095037914 +B1979.29,45.3029557779,36.4639084123,45.30288945,36.4638681314,45.3030221059,36.4639486932 +B1972.42,247.534489816,-3.23349952461,247.534569024,-3.23349456661,247.534410608,-3.2335044826 +B1967.69,287.858418461,26.2825631559,287.858523588,26.2825653277,287.858313334,26.2825609839 +B1996.68,206.473163472,-38.4312130715,206.473195575,-38.4312637479,206.473131368,-38.4311623951 +B1963.36,350.362793376,-7.51631961926,350.36282729,-7.51630014511,350.362759462,-7.51633909343 +B1964.06,228.259575769,40.311002157,228.259650941,40.3110571481,228.259500598,40.3109471658 +B1975.25,319.831820932,40.7337792676,319.831918659,40.7337465323,319.831723205,40.7338120029 +B1982.34,178.349313153,-38.3854710615,178.349286408,-38.3855223276,178.349339897,-38.3854197955 +B1998.53,126.58195076,-73.6980337652,126.581645487,-73.6980707198,126.582256033,-73.6979968102 +B1951.79,257.122932676,24.0154376566,257.123027615,24.0154606049,257.122837737,24.0154147083 +B1971.16,181.414481921,-17.7858263698,181.414465135,-17.7858473968,181.414498707,-17.7858053429 +B1979.42,81.2295383474,-9.26450146427,81.2294479067,-9.26448844016,81.2296287882,-9.26451448837 +B1986.59,88.1907984871,32.4238226453,88.1906888861,32.4238179627,88.1909080881,32.4238273279 +B1958.78,285.408252018,67.7826509035,285.408502334,67.7826473151,285.408001701,67.7826544915 +B1975.53,178.262069224,51.7327600597,178.262035148,51.7328376286,178.2621033,51.7326824908 +B1975.01,329.433722424,-46.8960749035,329.433814783,-46.8960177216,329.433630065,-46.8961320854 +B1994.64,340.333860195,36.5560891832,340.333920655,36.5560469817,340.333799735,36.5561313847 +B1969.13,191.963602676,21.3572019706,191.963604196,21.3572439205,191.963601156,21.3571600208 +B1983.14,90.8973340407,3.44588414281,90.897240458,3.44589104844,90.8974276234,3.44587723717 +B1952.34,259.510340943,47.0512387915,259.51047047,47.0512697696,259.510211416,47.0512078131 +B1987.56,132.277954966,30.4307232942,132.277860775,30.4307550149,132.278049157,30.4306915735 +B1968.44,179.513439448,-54.44865752,179.513406635,-54.4487285563,179.513472261,-54.4485864837 +B1997.40,81.5670170865,-19.9451944488,81.5669219294,-19.9451761627,81.5671122436,-19.9452127349 +B1967.36,127.283632829,-10.0946390302,127.283546305,-10.0946385601,127.283719352,-10.0946395003 +B1984.19,234.306643184,-86.4404274379,234.307689689,-86.4404960056,234.305596721,-86.440358869 +B1991.23,112.65584231,11.2521500479,112.655747491,11.2521615342,112.655937129,11.2521385617 +B1974.31,276.744760981,21.4151577082,276.744862642,21.4151677292,276.74465932,21.4151476871 +B1999.21,281.461357214,-15.511897988,281.461455717,-15.5118901893,281.46125871,-15.5119057865 +B1980.19,306.867413859,-11.9467360888,306.867501237,-11.9467197906,306.86732648,-11.946752387 +B1987.98,341.966066455,-2.82477813631,341.966112735,-2.82476612903,341.966020175,-2.82479014361 +B1984.23,38.6362483924,9.3322810896,38.6362039361,9.33227526676,38.6362928487,9.33228691243 +B1996.62,327.861128148,-46.529254733,327.861222674,-46.5291991016,327.86103362,-46.5293103644 +B1997.49,120.979858288,87.22617179,120.978013685,87.226204397,120.981702849,87.2261391801 +B1999.51,297.496953653,0.839666332936,297.497044724,0.83967387104,297.496862583,0.839658794827 +B1956.31,323.316228643,-0.794522598791,323.316298957,-0.794513783928,323.316158329,-0.794531413663 +B1998.83,15.3775095611,-38.7740290611,15.3775004994,-38.7739636006,15.3775186228,-38.7740945216 +B1961.46,70.486199672,-24.0682131367,70.4861102148,-24.0681861769,70.4862891293,-24.0682400965 +B1959.30,106.020475905,36.6574903487,106.020358021,36.6575015631,106.020593788,36.6574791342 +B1975.46,225.719957006,-24.2326924255,225.720016128,-24.2327172566,225.719897883,-24.2326675945 +B1976.52,31.0403178442,23.2187819108,31.040282636,23.2187540208,31.0403530525,23.2188098008 +B1964.13,51.4602071324,-27.0058546166,51.4601381551,-27.0058147039,51.4602761098,-27.0058945294 +B1965.51,185.697546923,55.594260797,185.697531081,55.5943432416,185.697562765,55.5941783525 +B1965.49,248.162878677,-23.7609450888,248.162965707,-23.7609586287,248.162791647,-23.7609315488 +B1963.32,308.385291884,51.2349043028,308.385426622,51.2348753519,308.385157147,51.2349332534 +B1979.67,233.050205996,63.3093356498,233.050347232,63.3094022915,233.05006476,63.3092690079 +B1960.86,209.382723191,-41.4659129842,209.382762908,-41.4659667228,209.382683474,-41.4658592457 +B1970.12,256.001743835,-16.3448051664,256.001833404,-16.3448088895,256.001654267,-16.3448014432 +B1964.43,90.8700685367,21.3678694408,90.8699682366,21.3678706796,90.8701688369,21.3678682019 +B1958.69,324.057486054,57.4352750563,324.057615131,57.4352248218,324.057356976,57.4353252907 +B1961.29,159.225729446,-45.2472278228,159.225658238,-45.2472794744,159.225800655,-45.2471761712 +B1999.43,7.38749687642,-53.1540997613,7.38750715011,-53.1540192078,7.38748660267,-53.1541803148 +B1971.70,345.477965039,-10.1831007688,345.478006755,-10.1830778328,345.477923323,-10.1831237048 +B1991.41,234.801152081,71.8511934075,234.80136258,71.8512610944,234.800941584,71.8511257203 +B1978.63,184.754250038,-66.4894904918,184.754223702,-66.4895738307,184.754276373,-66.4894071529 +B1982.60,245.64829793,-38.7682176459,245.648397087,-38.7682459424,245.648198773,-38.7681893494 +B1986.49,176.234540627,12.5643501076,176.234515663,12.564377805,176.23456559,12.5643224102 +B1969.56,333.536461653,-55.645568776,333.536564215,-55.6455021935,333.53635909,-55.6456353585 +B1969.64,185.716717981,-21.5568171888,185.71670839,-21.5568445326,185.716727571,-21.556789845 +B1992.98,25.9775574253,12.7249831044,25.9775324561,12.7249706335,25.9775823945,12.7249955753 +B1990.50,204.302987352,-36.6989586206,204.303014372,-36.6990074874,204.302960331,-36.6989097538 +B1991.83,221.487546141,22.5689795999,221.487598122,22.569018351,221.487494159,22.5689408487 +B1959.40,338.956666009,-30.7135370512,338.956724763,-30.7134891887,338.956607255,-30.7135849138 +B1967.98,149.5308077,21.1458572723,149.530740161,21.1458902834,149.530875238,21.1458242612 +B1974.10,95.1983908472,-1.61163007915,95.1982963974,-1.61162187599,95.198485297,-1.6116382823 +B1998.30,35.0615395317,-28.6207880841,35.0614956333,-28.620739571,35.0615834301,-28.6208365972 +B1978.17,174.903919876,-25.7547140538,174.903890465,-25.754746515,174.903949287,-25.7546815927 +B1991.38,167.27863063,54.1842744725,167.278565096,54.1843495205,167.278696164,54.1841994246 +B1953.81,10.7133541168,-26.6356033619,10.7133548501,-26.6355537205,10.7133533835,-26.6356530033 +B1977.66,249.939886269,43.0233288254,249.939997359,43.0233681421,249.939775179,43.0232895085 +B1977.40,258.100960451,-37.3838036503,258.101070404,-37.3838198729,258.1008505,-37.3837874275 +B1995.27,262.732112385,-19.8057986634,262.732208125,-19.8058013404,262.732016645,-19.8057959863 +B1968.47,149.166366188,63.2857703333,149.166225063,63.2858369635,149.166507312,63.2857037031 +B1995.06,5.4355841259,0.695799807062,5.43559350993,0.695806590879,5.43557474185,0.695793023234 +B1957.03,327.231056694,-11.1377396332,327.231123747,-11.137718635,327.230989642,-11.1377606314 +B1954.96,284.17633852,-71.0631656787,284.17663058,-71.0631583005,284.176046459,-71.0631730565 +B1998.66,59.4717008987,14.0960045791,59.4716277587,14.0959969126,59.4717740389,14.0960122456 +B1997.10,112.602946077,-17.7763932222,112.6028484,-17.7763914439,112.603043755,-17.7763950006 +B1979.55,219.940310095,-26.5130440909,219.940361247,-26.5130741126,219.940258944,-26.5130140693 +B1952.60,131.216503219,-60.6790709392,131.216335542,-60.6791085681,131.216670895,-60.6790333101 +B1952.51,56.1738921125,-19.3427782341,56.1738209005,-19.3427485454,56.1739633247,-19.3428079229 +B1966.23,63.8293728328,-59.8347944156,63.8292225342,-59.8347407237,63.829523132,-59.8348481073 +B1968.79,312.440281577,-82.909075449,312.440938353,-82.9090254915,312.439624792,-82.9091254056 +B1988.21,104.43408064,-66.6447299251,104.433841614,-66.6447318349,104.434319666,-66.644728015 +B1992.96,210.664663673,-17.5831928536,210.664697001,-17.5832123123,210.664630345,-17.5831733949 +B1977.29,163.438155327,-54.6954182678,163.438079056,-54.6954822858,163.438231598,-54.6953542498 +B1966.19,148.024127582,2.32865180198,148.024062692,2.32866254348,148.024192472,2.32864106049 +B1970.29,317.748400264,-34.6457182874,317.748492841,-34.6456795601,317.748307686,-34.6457570147 +B1955.48,249.374885326,79.5246095403,249.375329338,79.5246600743,249.374441319,79.5245590057 +B1956.86,100.53840787,-27.7507223648,100.538300623,-27.7507149055,100.538515118,-27.750729824 +B1987.27,23.1984832267,21.1208388177,23.1984619158,21.1208127728,23.1985045377,21.1208648626 +B1993.82,71.5045009532,3.00896662959,71.504418313,3.00897208869,71.5045835934,3.00896117048 +B1962.95,335.405788093,-6.90098238794,335.40584389,-6.90096525284,335.405732296,-6.90099952305 +B1984.28,307.588884401,18.8511389183,307.588974176,18.8511327496,307.588794626,18.851145087 +B1967.96,343.704504442,-46.9224252956,343.704568407,-46.9223583286,343.704440477,-46.9224922627 +B1950.30,18.8112053675,35.1485289159,18.8111898096,35.1484812505,18.8112209256,35.1485765813 +B1988.06,208.609805013,-46.3894275721,208.609846395,-46.3894876445,208.609763631,-46.3893674997 +B1970.70,172.978655994,15.4172636989,172.978625355,15.4172953255,172.978686632,15.4172320724 +B1966.69,7.8152324312,-34.9365736294,7.81523908357,-34.936512861,7.81522577882,-34.9366343978 +B1963.90,134.503366944,-72.4111269318,134.503104699,-72.4111743348,134.503629189,-72.4110795286 +B1979.63,149.073048424,14.7065160273,149.072982715,14.7065415958,149.073114132,14.7064904588 +B1966.26,217.406604209,16.5186514295,217.406648071,16.518683228,217.406560347,16.518619631 +B1996.84,241.829541848,16.5114334946,241.82961848,16.5114581776,241.829465216,16.5114088117 +B1954.80,301.991652158,46.8228690265,301.991781762,46.8228497806,301.991522554,46.8228882722 +B1994.16,280.629434995,-19.0017596678,280.629535379,-19.0017524272,280.629334611,-19.0017669083 +B1978.40,144.252375855,-10.2581330338,144.252305474,-10.258136788,144.252446236,-10.2581292796 +B1953.10,286.0305233,12.7464714044,286.030620257,12.7464773437,286.030426344,12.7464654651 +B1993.75,321.524751743,61.8464645226,321.524904902,61.8464140081,321.524598583,61.846515037 +B1961.24,94.4962887092,-44.0946278203,94.4961574273,-44.0946145181,94.4964199912,-44.0946411224 +B1989.97,356.110922656,-39.1892569317,356.110954348,-39.1891928509,356.110890964,-39.1893210125 +B1990.09,307.190555646,-43.7191034979,307.190673602,-43.7190689248,307.190437689,-43.719138071 +B1951.45,263.331776174,25.1917278571,263.331876059,25.1917473693,263.331676289,25.1917083448 +B1981.35,128.003624894,58.8666544649,128.003461169,58.8666953172,128.003788619,58.8666136124 +B1980.23,317.984216655,-8.89508525523,317.984293507,-8.89506861216,317.984139802,-8.8951018983 +B1953.91,312.465272698,5.18400310772,312.465354085,5.18400654399,312.465191311,5.18399967144 +B1988.65,344.0759205,-20.8070551085,344.07596665,-20.8070176615,344.07587435,-20.8070925556 +B1957.17,0.0386123471053,-42.7336081023,0.0386371599928,-42.7335390653,0.0385875341353,-42.7336771394 +B1973.18,5.95477509083,23.9728714179,5.95478442291,23.9728402559,5.95476575873,23.97290258 +B1954.86,113.065220613,27.4191705733,113.065116003,27.4191866686,113.065325223,27.4191544779 +B1978.49,358.313822853,67.0446512684,358.313876751,67.0445691316,358.313768955,67.0447334052 +B1970.19,53.5839203362,-15.011852649,53.5838539771,-15.0118268548,53.5839866953,-15.0118784432 +B1979.33,60.2557627351,25.6833225299,60.2556830704,25.6833027692,60.2558423998,25.6833422906 +B1987.44,273.08593329,76.4393919681,273.086334137,76.439406706,273.085532444,76.4393772296 +B1994.48,25.0306798156,-51.1202356021,25.0306434336,-51.1201589045,25.0307161977,-51.1203122997 +B1968.97,253.970437895,31.094899255,253.970536535,31.0949284071,253.970339254,31.0948701027 +B1964.62,168.89950144,-43.2270950714,168.899452201,-43.2271494771,168.89955068,-43.2270406658 +B1975.46,3.66775780511,39.2622225734,3.66777368182,39.26216915,3.66774192836,39.2622759968 +B1976.64,278.936590632,6.21231840756,278.936686041,6.21232668172,278.936495223,6.21231013337 +B1955.27,285.91236301,9.40548699672,285.912458882,9.40549352262,285.912267137,9.40548047079 +B1952.30,53.8450026285,60.7259893436,53.8448709018,60.7259324097,53.8451343557,60.7260462774 +B1981.10,8.53330744443,-7.54498028811,8.5333117472,-7.54495997493,8.53330314165,-7.54500060131 +B1991.12,274.342957522,-1.24603088049,274.3430518,-1.24602319414,274.342863244,-1.24603856684 +B1952.75,80.5212647616,19.4060625392,80.5211705543,19.4060589302,80.521358969,19.4060661482 +B1989.90,94.3827831954,15.0883386826,94.382685566,15.0883434466,94.3828808249,15.0883339185 +B1962.21,164.473020999,-47.6965440186,164.472957775,-47.69660143,164.473084223,-47.6964866073 +B1990.18,89.9736906625,-16.9964263489,89.973593279,-16.9964134056,89.9737880461,-16.9964392923 +B1964.91,204.582082173,15.6789515837,204.582105142,15.678984165,204.582059203,15.6789190023 diff --git a/astropy/coordinates/tests/accuracy/fk4_no_e_fk5.csv b/astropy/coordinates/tests/accuracy/fk4_no_e_fk5.csv new file mode 100644 index 0000000..99102a4 --- /dev/null +++ b/astropy/coordinates/tests/accuracy/fk4_no_e_fk5.csv @@ -0,0 +1,202 @@ +# This file was generated with the ref_fk4_no_e_fk5.py script, and the reference values were computed using AST +equinox_fk4,equinox_fk5,obstime,ra_in,dec_in,ra_fk5,dec_fk5,ra_fk4,dec_fk4 +B1948.36,J1992.59,B1995.95,334.661793414,43.9385116594,335.127505587,44.1614743713,334.19703321,43.7164045503 +B1971.64,J2006.23,B1954.56,113.895199649,-14.1109832563,114.294239451,-14.189617335,113.496041526,-14.0335757922 +B1970.49,J2015.57,B1953.55,66.2107722038,-7.76265420193,66.7573654302,-7.66250556575,65.6644607308,-7.86499337709 +B1931.50,J1999.69,B1970.69,73.6417002791,41.7006137481,74.8414427945,41.8037189279,72.4451689528,41.5898910005 +B1951.47,J1977.66,B1960.78,204.381010469,-14.9357743223,204.732916483,-15.0684119497,204.02947143,-14.8027671534 +B1955.96,J1999.16,B1975.98,214.396093073,-66.7648451487,215.271219746,-66.9622610907,213.531009752,-66.5653657951 +B1956.23,J2000.23,B1977.93,347.225227105,6.27744217753,347.783144277,6.51660389395,346.667337259,6.03880786927 +B1957.34,J1996.85,B1973.69,235.143754874,-5.59566003897,235.668034446,-5.72055011897,234.619987804,-5.46911905342 +B1941.60,J1993.80,B1960.79,269.606389512,26.7823112195,270.128504362,26.7816404236,269.084278188,26.7856304113 +B1930.71,J2013.89,B1961.97,235.285153507,-14.0695156888,236.447792421,-14.3293747521,234.125715822,-13.8019427393 +B1953.56,J1980.00,B1960.84,269.177331338,42.9472695107,269.379190001,42.9454157845,268.975475883,42.9496418506 +B1940.10,J1975.82,B1982.78,346.070424986,-3.51848810713,346.530942755,-3.32528640922,345.609649936,-3.71130492658 +B1934.68,J2014.12,B1992.32,3.01978725896,7.19732176646,4.04111300197,7.63872974164,1.9996375316,6.75549866988 +B1953.24,J2017.66,B1996.52,38.3199756112,18.8080489808,39.2225541698,19.0876452406,37.4201227465,18.5249551135 +B1955.52,J1986.19,B1990.02,107.533336957,-4.33088623215,107.914038138,-4.38286340945,107.152514675,-4.27999097547 +B1927.27,J2006.35,B1984.04,236.30802591,14.3162535375,237.227969566,14.0749779959,235.388744829,14.5634084162 +B1974.27,J1978.23,B1960.36,291.532518915,-33.7960784017,291.597238932,-33.7879646382,291.467788569,-33.8041689728 +B1930.19,J1986.95,B1987.08,313.983328941,27.7572327639,314.590894151,27.9778790422,313.375876285,27.5389973059 +B1945.29,J1997.99,B1984.85,347.273135054,-13.6880685538,347.963547495,-13.4015008868,346.58154003,-13.9738567052 +B1958.28,J2008.13,B1969.09,260.526724891,-37.6134342267,261.376886242,-37.6570793786,259.677433211,-37.5657291394 +B1934.85,J1985.89,B1992.51,231.291118043,-27.2371455509,232.060225806,-27.4133463836,230.524106155,-27.0579724511 +B1937.09,J1998.50,B1976.41,258.283303492,-30.1025933842,259.264766067,-30.1691519653,257.303071837,-30.0303039078 +B1956.16,J2023.91,B1994.65,168.335642599,-44.084769302,169.131984863,-44.4546574256,167.543307692,-43.7159381708 +B1964.94,J2000.65,B1991.03,117.210483914,32.8708634152,117.781943773,32.7790791562,116.63804006,32.9608828232 +B1952.23,J1998.51,B1961.43,158.272058119,-29.286471988,158.811965795,-29.5262894831,157.73289082,-29.0475527364 +B1934.88,J2008.31,B1991.03,262.688069789,-48.1516431413,264.082620089,-48.1987316304,261.295758898,-48.0946938009 +B1964.21,J2001.06,B1956.93,357.845250924,19.2890677934,358.315118415,19.4941375001,357.375940593,19.084061288 +B1965.72,J1987.86,B1974.12,243.674536239,-10.0431678136,243.97803572,-10.097540261,243.371196745,-9.98821027624 +B1960.54,J2016.21,B1957.44,284.696106425,19.6051067047,285.302622767,19.6853290904,284.089422958,19.5280584762 +B1972.20,J1981.44,B1972.41,61.5291328053,18.6403709997,61.6630317661,18.6648463372,61.3952747433,18.6157899771 +B1967.75,J1983.60,B1983.30,9.66573928438,-22.9075078717,9.8627174508,-22.8205464878,9.46866122286,-22.9945202022 +B1973.18,J1983.75,B1989.45,288.133287813,-36.6947385674,288.310596498,-36.676339325,287.955909092,-36.712964737 +B1948.23,J1994.10,B1983.10,325.340113758,-33.7758802174,326.023797476,-33.5649649991,324.65398011,-33.9850593768 +B1949.25,J1980.08,B1985.58,8.88343575454,-49.4693354042,9.24701151693,-49.2998476535,8.51878534341,-49.6389915796 +B1954.32,J1994.49,B1994.40,177.029034641,-67.7755279684,177.517646511,-67.9988963388,176.544747657,-67.552257953 +B1972.10,J2015.50,B1957.08,189.451860246,-68.7071945134,190.114123213,-68.9453284555,188.797874924,-68.4686046268 +B1943.61,J1992.69,B1957.38,214.691763751,-32.6160600699,215.421492998,-32.8397553215,213.964722034,-32.3903875087 +B1954.91,J2018.83,B1966.30,18.7047162369,-32.9080620608,19.4489945613,-32.5717365496,17.9585532678,-33.2458719202 +B1955.68,J2022.94,B1951.59,322.232230099,14.4669345738,323.034821026,14.7645630389,321.42944541,14.1725191869 +B1953.00,J2016.94,B1984.39,262.175824918,51.7319974933,262.548281917,51.6846881399,261.803746337,51.7815981232 +B1930.93,J1980.75,B1988.24,294.6060041,34.0181871087,295.074015891,34.1347005761,294.137889278,33.9037336792 +B1945.15,J2003.12,B1967.50,180.08019102,26.2892216009,180.821706382,25.9664807149,179.336612509,26.6119683301 +B1936.07,J1980.42,B1980.80,291.668187169,-22.2789167174,292.329992922,-22.1864262743,291.005523355,-22.3687549985 +B1964.41,J2018.79,B1997.92,34.548669268,-15.8924906144,35.1967101241,-15.6441308582,33.9006331013,-16.1427921034 +B1963.20,J1992.50,B1964.55,78.8220157436,-37.4332268082,79.075079173,-37.4019554736,78.5689855058,-37.465204993 +B1933.72,J2019.89,B1984.33,93.1388621771,60.5731416456,95.0905202877,60.5387165097,91.184708092,60.591240446 +B1961.19,J1981.21,B1952.11,168.518071423,7.09229333513,168.777442158,6.98298378221,168.258596163,7.20150240716 +B1971.23,J2006.89,B1953.13,165.374352937,39.3890686842,165.87176885,39.196720756,164.875283704,39.5809806285 +B1948.80,J2018.63,B1990.72,255.423520875,-17.5881075751,256.438156117,-17.6826060848,254.410141307,-17.4869506738 +B1970.65,J1975.05,B1971.83,64.0990821181,36.8289797648,64.172215273,36.8396700703,64.0259656206,36.8182613277 +B1946.87,J1990.24,B1969.60,191.321958369,-52.3532066605,191.941068845,-52.5897148324,190.706679307,-52.1161877868 +B1928.29,J1976.44,B1966.53,60.3872023631,25.1025882655,61.1139601332,25.2335783606,59.6618880776,24.9686447968 +B1943.19,J2002.49,B1972.88,276.773010626,56.6051138031,277.035261703,56.6448029825,276.510294672,56.5669265636 +B1934.47,J1983.76,B1991.77,334.141397682,37.3852087993,334.681936673,37.63269657,333.601820309,37.1388490904 +B1932.42,J2004.50,B1973.34,219.417716878,-20.2290328911,220.436864842,-20.53677356,218.402163145,-19.9167676954 +B1935.55,J1975.26,B1971.06,54.0660580808,-29.3264933861,54.4742787759,-29.1973856015,53.6578513784,-29.4568765774 +B1968.98,J1989.10,B1978.54,176.26561333,-0.572718169429,176.523526883,-0.684515911301,176.007690571,-0.460953265257 +B1965.89,J2012.99,B1986.95,135.84418338,-9.94938261687,136.4156383,-10.1384151142,135.27243952,-9.76217234374 +B1956.58,J2018.60,B1952.75,305.496508312,-8.63421746611,306.333192119,-8.43166153129,304.658373119,-8.83266452583 +B1972.76,J2000.27,B1981.21,327.995002307,-58.3471659896,328.478135531,-58.216943679,327.509419894,-58.4767020929 +B1930.95,J1999.19,B1981.05,138.185539617,11.9337947187,139.11218066,11.6486009656,137.256622001,12.2148869077 +B1955.11,J1977.39,B1950.06,113.578525223,29.6301583121,113.928637253,29.5801804457,113.228110262,29.6794410184 +B1941.57,J2012.54,B1980.14,204.621895006,36.5235009134,205.408269314,36.1654570594,203.833462777,36.8838069508 +B1966.08,J2016.57,B1952.01,67.6144926088,-13.7094836718,68.1982560465,-13.6037505529,67.030977723,-13.8178646409 +B1957.99,J2018.30,B1979.29,45.3029557779,36.4639084123,46.2543764288,36.6980750272,44.3559469687,36.2257877401 +B1946.13,J2016.34,B1972.42,247.534489816,-3.23349952461,248.455025871,-3.37995755876,246.615033795,-3.08124145001 +B1960.80,J1999.98,B1967.69,287.858418461,26.2825631559,288.257968726,26.350185895,287.458797059,26.2163884515 +B1935.76,J1975.44,B1996.68,206.473163472,-38.4312130715,207.060791642,-38.6284341117,205.887695173,-38.2329839969 +B1925.84,J1992.06,B1963.36,350.362793376,-7.51631961926,351.218703416,-7.15237789524,349.505768066,-7.87933870474 +B1939.04,J2012.01,B1964.06,228.259575769,40.311002157,228.937164323,40.0423286476,227.581733934,40.5832613094 +B1955.09,J2020.54,B1975.25,319.831820932,40.7337792676,320.468436705,41.0135236496,319.195878847,40.4566449257 +B1948.03,J1989.70,B1982.34,178.349313153,-38.3854710615,178.878815281,-38.6173901794,177.821462042,-38.1536135802 +B1960.53,J1984.34,B1998.53,126.58195076,-73.6980337652,126.522212859,-73.7769656974,126.639550025,-73.6189928568 +B1933.23,J2019.21,B1951.79,257.122932676,24.0154376566,258.016684748,23.9123993004,256.229480593,24.1257542269 +B1972.01,J1994.20,B1971.16,181.414481921,-17.7858263698,181.700080407,-17.9093349916,181.129088126,-17.6623025404 +B1972.77,J2005.85,B1979.42,81.2295383474,-9.26450146427,81.6239207678,-9.2370487074,80.8352159785,-9.29320699924 +B1974.04,J2004.85,B1986.59,88.1907984871,32.4238226453,88.6946934578,32.4284817102,87.6869564835,32.4176559135 +B1927.94,J1991.17,B1958.78,285.408252018,67.7826509035,285.385328422,67.8761253941,285.427216468,67.6890523656 +B1962.02,J2007.00,B1975.53,178.262069224,51.7327600597,178.846486725,51.48241739,177.67431932,51.983025032 +B1955.03,J1997.43,B1975.01,329.433722424,-46.8960749035,330.103614247,-46.692118107,328.760372892,-47.0986245326 +B1929.45,J2009.92,B1994.64,340.333860195,36.5560891832,341.254677798,36.9791399195,339.41634063,36.1354568961 +B1974.47,J1983.10,B1969.13,191.963602676,21.3572019706,192.070505327,21.3101918576,191.856675141,21.4042306751 +B1952.44,J1984.77,B1983.14,90.8973340407,3.44588414281,91.3225022889,3.4423974082,90.4721556483,3.44803542582 +B1958.72,J1999.14,B1952.34,259.510340943,47.0512387915,259.790647567,47.0108077311,259.230159931,47.0927522883 +B1961.24,J2000.00,B1987.56,132.277954966,30.4307232942,132.867785214,30.2847386621,131.686701777,30.5750623541 +B1953.42,J2013.40,B1968.44,179.513439448,-54.44865752,180.28117417,-54.7825927917,178.751891964,-54.1147599287 +B1951.37,J1984.64,B1997.40,81.5670170865,-19.9451944488,81.9269374609,-19.9186092791,81.2071330825,-19.9729303814 +B1932.54,J2024.61,B1967.36,127.283632829,-10.0946390302,128.389991343,-10.4090695278,126.176062442,-9.78808786127 +B1937.01,J1991.26,B1984.19,234.306643184,-86.4404274379,239.159196268,-86.6062091923,229.877779523,-86.2547679857 +B1945.13,J2017.30,B1991.23,112.65584231,11.2521500479,113.653522343,11.0941650144,111.656584771,11.4036739314 +B1928.39,J2015.91,B1974.31,276.744760981,21.4151577082,277.676112471,21.4763175641,275.813216777,21.3618641896 +B1962.92,J2020.33,B1999.21,281.461357214,-15.511897988,282.28369792,-15.4461443386,280.638389569,-15.573154518 +B1942.13,J2011.97,B1980.19,306.867413859,-11.9467360888,307.826980276,-11.7108610078,305.905696925,-12.1773958306 +B1974.49,J1990.83,B1987.98,341.966066455,-2.82477813631,342.177020368,-2.73822865168,341.755054209,-2.91122392552 +B1969.43,J1976.38,B1984.23,38.6362483924,9.3322810896,38.7295143004,9.36248136387,38.5430036498,9.30204150263 +B1971.93,J2003.15,B1996.62,327.861128148,-46.529254733,328.357950708,-46.3816911362,327.362455314,-46.6760151537 +B1961.96,J2022.83,B1997.49,120.979858288,87.22617179,127.356289341,87.0356348542,113.806804821,87.3823224486 +B1926.35,J1982.80,B1999.51,297.496953653,0.839666332936,298.215707802,0.986504246823,296.777520197,0.696326958488 +B1944.12,J2012.89,B1956.31,323.316228643,-0.794522598791,324.199877999,-0.485711290555,322.43128765,-1.09980368764 +B1925.53,J1977.07,B1998.83,15.3775095611,-38.7740290611,15.975820035,-38.4977806231,14.777479512,-39.0510731102 +B1928.26,J1984.73,B1961.46,70.486199672,-24.0682131367,71.0773386429,-23.9647188389,69.8952283629,-24.1747645717 +B1959.07,J2001.01,B1959.30,106.020475905,36.6574903487,106.724489447,36.5916635763,105.315480342,36.7205573317 +B1974.33,J1998.24,B1975.46,225.719957006,-24.2326924255,226.069642,-24.3253436846,225.370713094,-24.1394598386 +B1958.31,J2014.48,B1976.52,31.0403178442,23.2187819108,31.8305300515,23.4855979353,30.252581142,22.9497454125 +B1945.76,J1981.40,B1964.13,51.4602071324,-27.0058546166,51.8377992184,-26.8827293131,51.0826217069,-27.1300027947 +B1927.06,J2019.62,B1965.51,185.697546923,55.594260797,186.80220854,55.0820030044,184.579796584,56.1075102073 +B1969.71,J1983.82,B1965.49,248.162878677,-23.7609450888,248.376028149,-23.7900358626,247.949821476,-23.7315830399 +B1960.34,J1996.74,B1963.32,308.385291884,51.2349043028,308.653885549,51.3611262862,308.116543047,51.1094272568 +B1948.94,J1982.47,B1979.67,233.050205996,63.3093356498,233.183624905,63.1972984089,232.917717277,63.4217190672 +B1935.78,J2009.44,B1960.86,209.382723191,-41.4659129842,210.508471779,-41.8212717612,208.265390379,-41.1066153061 +B1929.09,J2015.70,B1970.12,256.001743835,-16.3448051664,257.249402003,-16.4563411788,254.755864037,-16.2230882626 +B1958.66,J1984.63,B1964.43,90.8700685367,21.3678694408,91.2595104175,21.3651813051,90.4806144234,21.369574816 +B1974.74,J2003.91,B1958.69,324.057486054,57.4352750563,324.282176393,57.5669676284,323.83284781,57.3039563174 +B1954.68,J2011.04,B1961.29,159.225729446,-45.2472278228,159.836674886,-45.541209348,158.616784774,-44.9544310498 +B1967.01,J1998.76,B1999.43,7.38749687642,-53.1540997613,7.76348958513,-52.978901243,7.01015979396,-53.3294476754 +B1932.65,J1988.10,B1971.70,345.477965039,-10.1831007688,346.201723123,-9.88376000525,344.753111544,-10.4814629211 +B1968.81,J2011.71,B1991.41,234.801152081,71.8511934075,234.75819291,71.7134587916,234.849277912,71.9887729277 +B1952.24,J1992.46,B1978.63,184.754250038,-66.4894904918,185.31507512,-66.7125454196,184.198875868,-66.2662547282 +B1974.18,J2008.57,B1982.60,245.64829793,-38.7682176459,246.229734859,-38.8462753796,245.067899116,-38.6883914108 +B1961.79,J1977.75,B1986.49,176.234540627,12.5643501076,176.440478946,12.4756870596,176.028521797,12.6529921788 +B1929.65,J2019.85,B1969.56,333.536461653,-55.645568776,335.008274077,-55.1931712959,332.042112951,-56.0921730529 +B1939.61,J2001.08,B1969.64,185.716717981,-21.5568171888,186.518819162,-21.8971031217,184.916731889,-21.2160545858 +B1938.65,J1988.76,B1992.98,25.9775574253,12.7249831044,26.6478047282,12.9750431145,25.30853404,12.4734949987 +B1928.56,J2017.18,B1990.50,204.302987352,-36.6989586206,205.594712571,-37.1462777072,203.0212666,-36.2470793085 +B1959.00,J1997.12,B1991.83,221.487546141,22.5689795999,221.917649745,22.4105410037,221.057403953,22.7284735519 +B1936.24,J2008.46,B1959.40,338.956666009,-30.7135370512,339.96511112,-30.3370017697,337.943007518,-31.0875245016 +B1952.57,J2024.63,B1967.98,149.5308077,21.1458572723,150.530972008,20.7983136142,148.526893227,21.4898432119 +B1963.49,J2017.63,B1974.10,95.1983908472,-1.61163007915,95.8836923542,-1.64073778617,94.5129553374,-1.58611303786 +B1935.59,J2021.68,B1998.30,35.0615395317,-28.6207880841,36.013756926,-28.230800891,34.108208406,-29.0153533631 +B1939.64,J2018.11,B1978.17,174.903919876,-25.7547140538,175.892230462,-26.1901662894,173.918861307,-25.3199298979 +B1942.82,J1978.35,B1991.38,167.27863063,54.1842744725,167.792865117,53.9911451576,166.761561647,54.3770117742 +B1972.82,J1989.59,B1953.81,10.7133541168,-26.6356033619,10.9196530538,-26.5438625332,10.5069242085,-26.7274067313 +B1958.01,J1984.82,B1977.66,249.939886269,43.0233288254,250.152708941,42.9723921243,249.727120549,43.0747862488 +B1972.53,J1995.55,B1977.40,258.100960451,-37.3838036503,258.492060536,-37.4098004846,257.710089764,-37.356950996 +B1929.84,J1995.96,B1995.27,262.732112385,-19.8057986634,263.710992923,-19.8492450608,261.753853467,-19.7561160195 +B1938.23,J2022.91,B1968.47,149.166366188,63.2857703333,150.716387336,62.8777259431,147.585925919,63.6872128631 +B1938.61,J2021.20,B1995.06,5.4355841259,0.695799807062,6.49453219621,1.15308515723,4.37708671511,0.237709638907 +B1966.75,J2024.18,B1957.03,327.231056694,-11.1377396332,328.000338248,-10.8677167475,326.460220195,-11.4054365616 +B1965.64,J1994.07,B1954.96,284.17633852,-71.0631656787,284.986812185,-71.023307137,283.363277683,-71.1008495735 +B1939.69,J2023.20,B1998.66,59.4717008987,14.0960045791,60.6437035945,14.328043327,58.3026637155,13.8557849823 +B1957.49,J2004.52,B1997.10,112.602946077,-17.7763932222,113.128066437,-17.8781256601,112.077653347,-17.6768763275 +B1946.05,J2000.05,B1979.55,219.940310095,-26.5130440909,220.729784133,-26.7422090278,219.153387495,-26.2812240409 +B1928.73,J1989.10,B1952.60,131.216503219,-60.6790709392,131.538762739,-60.9012667241,130.892392069,-60.458301418 +B1961.94,J1983.12,B1952.51,56.1738921125,-19.3427782341,56.4110363048,-19.277323069,55.9367786527,-19.4086389221 +B1954.27,J1997.44,B1966.23,63.8293728328,-59.8347944156,64.0119166618,-59.729130492,63.6478301692,-59.9411437379 +B1942.23,J1992.36,B1968.79,312.440281577,-82.909075449,314.686445788,-82.716709549,310.089004633,-83.0931764134 +B1937.90,J2001.19,B1988.21,104.43408064,-66.6447299251,104.453108425,-66.7326176146,104.411797906,-66.5569652662 +B1939.59,J2002.19,B1992.96,210.664663673,-17.5831928536,211.524274634,-17.8816169868,209.807499075,-17.2821058793 +B1963.49,J1975.54,B1977.29,163.438155327,-54.6954182678,163.565812054,-54.7597430346,163.310636547,-54.6311360803 +B1946.22,J1989.83,B1966.19,148.024127582,2.32865180198,148.587881848,2.12205454824,147.4598279,2.53398329697 +B1939.43,J1983.41,B1970.29,317.748400264,-34.6457182874,318.424549161,-34.4635023474,317.070001399,-34.8259880426 +B1938.21,J2022.93,B1955.48,249.374885326,79.5246095403,248.102397286,79.3535339601,250.706479605,79.6856520522 +B1938.85,J1986.82,B1956.86,100.53840787,-27.7507223648,101.014813183,-27.8006647295,100.061930435,-27.7029635357 +B1967.12,J2016.63,B1987.27,23.1984832267,21.1208388177,23.8758085173,21.373531354,22.5228650317,20.8668643466 +B1933.48,J2010.39,B1993.82,71.5045009532,3.00896662959,72.5117326573,3.14121771682,70.4983597655,2.86958134305 +B1937.60,J2003.03,B1962.95,335.405788093,-6.90098238794,336.26168125,-6.56861752396,334.548406282,-7.23108048852 +B1954.59,J2010.06,B1984.28,307.588884401,18.8511389183,308.216046805,19.0408491208,306.961527852,18.6641076356 +B1973.77,J1985.60,B1967.96,343.704504442,-46.9224252956,343.875951545,-46.8591633148,343.532811099,-46.9856319247 +B1925.21,J2009.55,B1950.30,18.8112053675,35.1485289159,20.0024206727,35.5914321179,17.6282310843,34.702488267 +B1937.00,J2006.08,B1988.06,208.609805013,-46.3894275721,209.692670963,-46.7252945952,207.535889978,-46.0500943647 +B1961.33,J2010.87,B1970.70,172.978655994,15.4172636989,173.622309813,15.1433334088,172.333980129,15.6908149586 +B1937.54,J1982.63,B1966.69,7.8152324312,-34.9365736294,8.36836736742,-34.688026104,7.26063844242,-35.1854511343 +B1970.91,J1979.71,B1963.90,134.503366944,-72.4111269318,134.50595835,-72.4454843369,134.500551115,-72.3767711725 +B1950.41,J2022.56,B1979.63,149.073048424,14.7065160273,150.050293767,14.3602043524,148.092931537,15.0493015642 +B1950.13,J1995.20,B1966.26,217.406604209,16.5186514295,217.938935407,16.3200334608,216.874300735,16.7186856279 +B1960.62,J2007.17,B1996.84,241.829541848,16.5114334946,242.358489224,16.3901534117,241.300790354,16.6348221277 +B1956.99,J2023.02,B1954.80,301.991652158,46.8228690265,302.505263266,47.0190374374,301.477638783,46.6294966913 +B1939.22,J2016.87,B1994.16,280.629434995,-19.0017596678,281.769997009,-18.9177962123,279.487664877,-19.0772612574 +B1925.49,J1981.53,B1978.40,144.252375855,-10.2581330338,144.937158573,-10.5124362227,143.567309058,-10.006008783 +B1954.74,J2000.05,B1953.10,286.0305233,12.7464714044,286.556205108,12.8172544241,285.504676194,12.6779134146 +B1967.05,J1984.09,B1993.75,321.524751743,61.8464645226,321.632971919,61.9207781138,321.416450206,61.7722624353 +B1949.33,J1977.52,B1961.24,94.4962887092,-44.0946278203,94.7059415448,-44.1072194614,94.2866143528,-44.0826087841 +B1964.04,J2019.75,B1989.97,356.110922656,-39.1892569317,356.840327262,-38.8797072373,355.378110002,-39.4985381971 +B1945.63,J1988.30,B1990.09,307.190555646,-43.7191034979,307.916952389,-43.5743180165,306.461507156,-43.8614854707 +B1943.99,J1977.69,B1951.45,263.331776174,25.1917278571,263.675941152,25.1704984165,262.987636197,25.2140768109 +B1969.92,J1999.97,B1981.35,128.003624894,58.8666544649,128.605737687,58.7629529731,127.398829454,58.9689675244 +B1969.84,J2014.01,B1980.23,317.984216655,-8.89508525523,318.575678141,-8.71153679083,317.391922121,-9.07693345337 +B1961.02,J2002.85,B1953.91,312.465272698,5.18400310772,312.985580994,5.34203296971,311.944618588,5.02753402011 +B1939.24,J1981.75,B1988.65,344.0759205,-20.8070551085,344.644700272,-20.5791609914,343.505986239,-21.0343039973 +B1941.99,J1994.43,B1957.17,0.0386123471053,-42.7336081023,0.708747131881,-42.4416361362,359.365316955,-43.025582347 +B1939.26,J1987.88,B1973.18,5.95477509083,23.9728714179,6.5909526232,24.2419429246,5.32008332697,23.7034884386 +B1963.98,J2001.37,B1954.86,113.065220613,27.4191705733,113.643404238,27.3366368146,112.486263629,27.4997700815 +B1925.23,J2020.13,B1978.49,358.313822853,67.0446512684,359.505559565,67.5728883986,357.14681594,66.5167328688 +B1929.23,J2017.66,B1970.19,53.5839203362,-15.011852649,54.610957277,-14.7231509285,52.5576449921,-15.3076533827 +B1947.07,J2016.84,B1979.33,60.2557627351,25.6833225299,61.3134611254,25.8729083597,59.2011689172,25.4875201301 +B1937.55,J1985.79,B1987.44,273.08593329,76.4393919681,272.591344908,76.4526927726,273.578774594,76.4237802487 +B1970.29,J1981.68,B1994.48,25.0306798156,-51.1202356021,25.1435488988,-51.0628198065,24.9177386023,-51.177704254 +B1969.04,J1981.01,B1968.97,253.970437895,31.094899255,254.085382476,31.0765584395,253.855499113,31.1133685873 +B1960.83,J2018.73,B1964.62,168.89950144,-43.2270950714,169.584603614,-43.5437889415,168.217301809,-42.9111416391 +B1936.93,J1979.00,B1975.46,3.66775780511,39.2622225734,4.22000281563,39.4958903381,3.11745305971,39.0284106254 +B1971.26,J1994.47,B1976.64,278.936590632,6.21231840756,279.220262021,6.23271017522,278.652884808,6.19255870349 +B1973.48,J1984.09,B1955.27,285.91236301,9.40548699672,286.039106102,9.42175539099,285.785609343,9.38934433416 +B1953.91,J1995.56,B1952.30,53.8450026285,60.7259893436,54.7155379279,60.8613834632,52.9800334576,60.5877592411 +B1938.45,J2016.67,B1981.10,8.53330744443,-7.54498028811,9.52668790991,-7.1149113183,7.53943191911,-7.97616983921 +B1940.05,J2023.92,B1991.12,274.342957522,-1.24603088049,275.427502795,-1.20626637794,273.258110296,-1.27698166464 +B1956.27,J1975.21,B1952.75,80.5212647616,19.4060625392,80.8007537943,19.4231777982,80.2418410241,19.3884398656 +B1963.99,J2002.99,B1989.90,94.3827831954,15.0883386826,94.9409404907,15.0706908672,93.8245150675,15.1038774277 +B1946.06,J2012.59,B1962.21,164.473020999,-47.6965440186,165.218388831,-48.0540734375,163.731379108,-47.3403011601 +B1957.85,J1994.50,B1990.18,89.9736906625,-16.9964263489,90.3810379284,-16.9970588536,89.5663435591,-16.9972444015 +B1946.18,J1990.43,B1964.91,204.582082173,15.6789515837,205.12023156,15.4553934359,204.04377436,15.9034725087 diff --git a/astropy/coordinates/tests/accuracy/galactic_fk4.csv b/astropy/coordinates/tests/accuracy/galactic_fk4.csv new file mode 100644 index 0000000..c3c8276 --- /dev/null +++ b/astropy/coordinates/tests/accuracy/galactic_fk4.csv @@ -0,0 +1,202 @@ +# This file was generated with the ref_galactic_fk4.py script, and the reference values were computed using AST +equinox_fk4,obstime,lon_in,lat_in,ra_fk4,dec_fk4,lon_gal,lat_gal +J1998.36,B1995.95,334.661793414,43.9385116594,215.729885213,-13.2119623291,95.9916336135,-10.7923599366 +J2021.64,B1954.56,113.895199649,-14.1109832563,0.0191713429163,47.9584946764,230.354307383,2.91031092906 +J2020.49,B1953.55,66.2107722038,-7.76265420193,307.0396671,25.0473933964,202.190459847,-36.2511029663 +J1981.50,B1970.69,73.6417002791,41.7006137481,249.552478408,47.490161693,163.738209835,-0.997514227815 +J2001.47,B1960.78,204.381010469,-14.9357743223,85.7262507794,0.592842446128,319.182343564,46.4865699629 +J2005.96,B1975.98,214.396093073,-66.7648451487,38.7974895634,-25.3131215325,311.259111645,-5.26093959516 +J2006.23,B1977.93,347.225227105,6.27744217753,251.681067557,-35.6975782982,82.4439145069,-48.3754431897 +J2007.34,B1973.69,235.143754874,-5.59566003897,108.194271484,-22.3032173532,0.622684927771,37.7376079889 +J1991.60,B1960.79,269.606389512,26.7823112195,159.265817549,-27.2400623832,52.4594618492,22.7351205489 +J1980.71,B1961.97,235.285153507,-14.0695156888,99.5923664647,-26.0329761781,353.421599279,31.5338685058 +J2003.56,B1960.84,269.177331338,42.9472695107,168.194363902,-13.373076419,69.4875812789,27.7142399301 +J1990.10,B1982.78,346.070424986,-3.51848810713,260.556249219,-42.5373980474,71.1723254841,-55.2318229113 +J1984.68,B1992.32,3.01978725896,7.19732176646,261.223075691,-22.5183053503,106.371052811,-54.3443814356 +J2003.24,B1996.52,38.3199756112,18.8080489808,268.244155911,13.0679884186,153.915977612,-37.8861321281 +J2005.52,B1990.02,107.533336957,-4.33088623215,345.276777715,55.2303472065,218.881057613,2.11460956182 +J1977.27,B1984.04,236.30802591,14.3162535375,126.558516177,-13.1859909524,24.4040838917,47.3681313134 +J2024.27,B1960.36,291.532518915,-33.7960784017,65.6262288958,-78.0827780664,4.70715132794,-21.1240080657 +J1980.19,B1987.08,313.983328941,27.7572327639,204.395115343,-33.9974383642,72.5499341116,-11.3261456428 +J1995.29,B1984.85,347.273135054,-13.6880685538,273.878542915,-46.5817989568,57.2967846205,-62.636282227 +J2008.28,B1969.09,260.526724891,-37.6134342267,75.5423582477,-53.0108213216,349.993949344,-0.500521761262 +J1984.85,B1992.51,231.291118043,-27.2371455509,84.3750724965,-27.2619452007,340.394703326,24.1136027935 +J1987.09,B1976.41,258.283303492,-30.1025933842,87.5349107922,-50.9413101937,355.216758932,5.09769033822 +J2006.16,B1994.65,168.335642599,-44.084769302,44.0040901708,7.58736494962,284.861051883,15.3412175718 +J2014.94,B1991.03,117.210483914,32.8708634152,231.950026475,82.367116716,187.3264088,25.4619880653 +J2002.23,B1961.43,158.272058119,-29.286471988,46.4761761399,24.2223508812,269.917276667,24.5785034911 +J1984.88,B1991.03,262.688069789,-48.1516431413,57.7755536872,-52.5013674166,342.226051771,-7.92146528355 +J2014.21,B1956.93,357.845250924,19.2890677934,248.037990583,-19.4340699812,103.672360905,-41.3775036599 +J2015.72,B1974.12,243.674536239,-10.0431678136,108.027253494,-31.863249456,2.91619105856,28.4959537625 +J2010.54,B1957.44,284.696106425,19.6051067047,170.340404941,-40.0951306839,51.2254926849,7.33605738412 +J2022.20,B1972.41,61.5291328053,18.6403709997,277.730191309,33.3416109651,174.063892959,-24.5412790814 +J2017.75,B1983.30,9.66573928438,-22.9075078717,295.17981175,-30.0450764744,85.3259571782,-84.8466105492 +J2023.18,B1989.45,288.133287813,-36.6947385674,61.7090085882,-74.1820684991,0.817296879039,-19.4797887996 +J1998.23,B1983.10,325.340113758,-33.7758802174,307.31206399,-69.6283338955,11.6623486171,-48.8815187305 +J1999.25,B1985.58,8.88343575454,-49.4693354042,325.965770063,-35.6133692502,309.666273629,-67.4551398942 +J2004.32,B1994.40,177.029034641,-67.7755279684,31.2372626731,-12.9650951893,296.955672515,-5.62000346764 +J2022.10,B1957.08,189.451860246,-68.7071945134,33.5293665419,-17.1203080138,301.559917262,-5.75405801934 +J1993.61,B1957.38,214.691763751,-32.6160600699,73.7224767298,-15.6028544376,323.538376206,26.6926709764 +J2004.91,B1966.30,18.7047162369,-32.9080620608,308.505564328,-25.5373410674,263.547066418,-82.3338996972 +J2005.68,B1951.59,322.232230099,14.4669345738,219.553504168,-44.4049264885,66.7343979667,-25.6090866517 +J2003.00,B1984.39,262.175824918,51.7319974933,169.003247618,-3.42937646572,78.8860186239,33.5626186817 +J1980.93,B1988.24,294.6060041,34.0181871087,184.771961476,-28.2403711462,68.3561968833,5.91397226579 +J1995.15,B1967.50,180.08019102,26.2892216009,115.670140935,39.4176352042,214.406973761,78.6105433559 +J1986.07,B1980.80,291.668187169,-22.2789167174,125.910652709,-78.6378819053,16.4272341834,-17.5632578893 +J2014.41,B1997.92,34.548669268,-15.8924906144,297.966081457,-5.74276095396,188.103833481,-67.1344687124 +J2013.20,B1964.55,78.8220157436,-37.4332268082,338.41386544,13.3803692475,241.413633182,-34.4957267196 +J1983.72,B1984.33,93.1388621771,60.5731416456,215.515242863,51.025917079,153.788670192,19.0304556569 +J2011.19,B1952.11,168.518071423,7.09229333513,86.7960140054,42.4095753728,249.125769518,59.3639239957 +J2021.23,B1953.13,165.374352937,39.3890686842,133.539016217,52.7585931969,177.607817121,63.8456065457 +J1998.80,B1990.72,255.423520875,-17.5881075751,104.861453622,-45.3027201728,3.92822669713,14.6469670383 +J2020.65,B1971.83,64.0990821181,36.8289797648,256.321772318,39.914355366,162.091739264,-10.1708006469 +J1996.87,B1969.60,191.321958369,-52.3532066605,47.9455555868,-9.26762335019,302.005131299,10.4906094438 +J1978.29,B1966.53,60.3872023631,25.1025882655,269.550425169,34.4047511633,168.612591713,-20.2821406332 +J1993.19,B1972.88,276.773010626,56.6051138031,178.703826596,-3.3434530167,85.5633134045,25.6194736799 +J1984.47,B1991.77,334.141397682,37.3852087993,218.479404766,-19.0772794069,91.968516951,-15.9701580762 +J1982.42,B1973.34,219.417716878,-20.2290328911,87.1109593231,-14.6004706289,334.420144542,35.926923956 +J1985.55,B1971.06,54.0660580808,-29.3264933861,318.139103598,3.17485146412,226.277308044,-53.6788644695 +J2018.98,B1978.54,176.26561333,-0.572718169429,83.8491889252,31.8061689325,269.795526718,58.0046040213 +J2015.89,B1986.95,135.84418338,-9.94938261687,33.531734841,50.9381624137,238.492810285,23.3637333893 +J2006.58,B1952.75,305.496508312,-8.63421746611,200.932667574,-71.3815196871,35.373262982,-23.9548001451 +J2022.76,B1981.21,327.995002307,-58.3471659896,350.834964718,-54.0901563813,335.114901978,-45.751212695 +J1980.95,B1981.05,138.185539617,11.9337947187,58.631520613,69.0711593141,218.447399995,36.9072609382 +J2005.11,B1950.06,113.578525223,29.6301583121,263.636462216,81.3987773508,189.681688921,21.6122927944 +J1991.57,B1980.14,204.621895006,36.5235009134,133.680798913,22.0411777739,78.808513096,76.2737160193 +J2016.08,B1952.01,67.6144926088,-13.7094836718,313.132620076,22.6386092017,209.654020089,-37.5533649541 +J2007.99,B1979.29,45.3029557779,36.4639084123,253.115407251,25.0961411128,150.086524604,-19.5316401959 +J1996.13,B1972.42,247.534489816,-3.23349952461,117.046928774,-32.0174103517,11.9107776158,29.166414441 +J2010.80,B1967.69,287.858418461,26.2825631559,176.528272624,-34.7137695077,58.5625336575,7.68664923227 +J1985.76,B1996.68,206.473163472,-38.4312130715,65.3730874222,-11.8626802001,314.662473159,23.1067145187 +J1975.84,B1963.36,350.362793376,-7.51631961926,267.890694432,-41.0314521815,72.0362114405,-61.1485139616 +J1989.04,B1964.06,228.259575769,40.311002157,144.950655363,6.68810736787,66.3208521242,57.9479583999 +J2005.09,B1975.25,319.831820932,40.7337792676,206.517019036,-20.3728721852,85.376518971,-6.19125657467 +J1998.03,B1982.34,178.349313153,-38.3854710615,53.3367731423,6.47139870346,290.627978018,23.0870158337 +J2010.53,B1998.53,126.58195076,-73.6980337652,14.0339286057,-10.7989965101,287.063023216,-19.6793830621 +J1983.23,B1951.79,257.122932676,24.0154376566,147.559357599,-22.3222385045,45.3959106575,32.4293515474 +J2022.01,B1971.16,181.414481921,-17.7858263698,71.6065443096,17.7205783522,287.370198984,43.7863289812 +J2022.77,B1979.42,81.2295383474,-9.26450146427,319.219354065,35.7151156735,211.413414641,-23.6522137945 +J2024.04,B1986.59,88.1907984871,32.4238226453,264.445366137,59.5075300274,177.63221018,2.82201785023 +J1977.94,B1958.78,285.408252018,67.7826509035,186.006666089,5.9317624038,98.5359798987,24.0369224888 +J2012.02,B1975.53,178.262069224,51.7327600597,149.616027267,42.1293667525,143.189803372,62.9449441105 +J2005.03,B1975.01,329.433722424,-46.8960749035,333.526505486,-61.2774858546,350.879342803,-50.7102611905 +J1979.45,B1994.64,340.333860195,36.5560891832,223.292370195,-17.1540759917,95.9213163317,-19.343112709 +J2024.47,B1969.13,191.963602676,21.3572019706,114.434412024,27.503830072,291.719299139,84.2576478027 +J2002.44,B1983.14,90.8973340407,3.44588414281,314.995461083,51.3170142615,204.308033362,-9.04466471803 +J2008.72,B1952.34,259.510340943,47.0512387915,164.869262172,-6.16993929742,73.122316095,35.0776892287 +J2011.24,B1987.56,132.277954966,30.4307232942,127.48825526,81.1338000052,193.90517209,37.3016929434 +J2003.42,B1968.44,179.513439448,-54.44865752,42.0526470828,-4.9188999851,295.12094312,7.62776271377 +J2001.37,B1997.40,81.5670170865,-19.9451944488,328.061837334,28.1928252216,222.515151909,-27.3578081826 +J1982.54,B1967.36,127.283632829,-10.0946390302,19.6477131913,52.4470778861,233.960957731,16.5452929993 +J1987.01,B1984.19,234.306643184,-86.4404274379,16.4715901103,-28.4445608636,305.546826719,-24.5135301459 +J1995.13,B1991.23,112.65584231,11.2521500479,339.527728916,71.3900724033,207.206370175,13.7939581949 +J1978.39,B1974.31,276.744760981,21.4151577082,162.407802968,-35.1255983687,49.8499233813,14.5681779743 +J2012.92,B1999.21,281.461357214,-15.511897988,128.74174115,-66.9721136327,18.3606180783,-5.64535469428 +J1992.13,B1980.19,306.867413859,-11.9467360888,207.123394118,-74.3365109128,32.8334594598,-26.7428218865 +J2024.49,B1987.98,341.966066455,-2.82477813631,257.079269135,-45.5062849208,66.5166383458,-51.5988768204 +J2019.43,B1984.23,38.6362483924,9.3322810896,277.298399175,9.29190831823,160.679381282,-46.0098367014 +J2021.93,B1996.62,327.861128148,-46.529254733,334.498175895,-62.285753657,351.779204131,-49.5514803605 +J2011.96,B1997.49,120.979858288,87.22617179,193.112689708,29.8355211732,125.900479598,27.8877168914 +J1976.35,B1999.51,297.496953653,0.839666332936,181.085417356,-61.4154773919,40.6846762696,-12.8402998175 +J1994.12,B1956.31,323.316228643,-0.794522598791,232.744583525,-57.172811607,53.4475657454,-35.7640525677 +J1975.53,B1998.83,15.3775095611,-38.7740290611,313.687711979,-29.7532661336,292.123823728,-78.249288242 +J1978.26,B1961.46,70.486199672,-24.0682131367,323.035235152,17.9223915313,223.250351558,-38.1828370964 +J2009.07,B1959.30,106.020475905,36.6574903487,244.972778375,72.7878865437,180.327652321,18.0468441903 +J2024.33,B1975.46,225.719957006,-24.2326924255,86.0366112823,-21.5082578389,337.38182103,29.8986234954 +J2008.31,B1976.52,31.0403178442,23.2187819108,261.302299942,8.78047554861,143.772447485,-36.7511639142 +J1995.76,B1964.13,51.4602071324,-27.0058546166,315.065766587,2.55796617517,221.775446591,-55.7242078312 +J1977.06,B1965.51,185.697546923,55.594260797,153.338541661,37.440299398,131.049487592,61.2247044105 +J2019.71,B1965.49,248.162878677,-23.7609450888,94.0591617121,-40.8224910786,354.392971779,16.4900856964 +J2010.34,B1963.32,308.385291884,51.2349043028,196.476156832,-11.5444983515,88.2071202318,6.64482242886 +J1998.94,B1979.67,233.050205996,63.3093356498,166.867510536,15.6637804493,98.5156204798,45.4612418027 +J1985.78,B1960.86,209.382723191,-41.4659129842,63.4734186744,-15.1442383851,316.163237599,19.6100595169 +J1979.09,B1970.12,256.001743835,-16.3448051664,106.649451565,-45.3254094121,5.44744564064,14.6683716892 +J2008.66,B1964.43,90.8700685367,21.3678694408,287.008253726,60.3030901692,188.510594416,-0.450177420343 +J2024.74,B1958.69,324.057486054,57.4352750563,204.387834151,-3.71156697525,98.8294418875,3.92980570683 +J2004.68,B1961.29,159.225729446,-45.2472278228,37.9943842023,10.4609560801,279.445767043,11.4162684154 +J2017.01,B1999.43,7.38749687642,-53.1540997613,330.796893447,-36.432735539,310.561617023,-63.5428435122 +J1982.65,B1971.70,345.477965039,-10.1831007688,267.939382589,-46.5465175303,61.3892410717,-59.2635781091 +J2018.81,B1991.41,234.801152081,71.8511934075,175.271116162,19.166721209,107.275353666,39.7113209648 +J2002.24,B1978.63,184.754250038,-66.4894904918,34.1841541814,-14.4958117979,299.686716649,-3.81069739954 +J2024.18,B1982.60,245.64829793,-38.7682176459,73.9547095701,-41.3059410321,341.54798842,7.97381666623 +J2011.79,B1986.49,176.234540627,12.5643501076,97.7788131928,38.1835469935,252.339039389,68.6211958128 +J1979.65,B1969.56,333.536461653,-55.645568776,343.316605394,-54.0790917446,336.652926202,-50.1180340532 +J1989.61,B1969.64,185.716717981,-21.5568171888,70.5091730065,12.162471916,294.316919615,40.7880063819 +J1988.65,B1992.98,25.9775574253,12.7249831044,268.242055984,-0.265815951959,142.501478254,-48.0628732689 +J1978.56,B1990.50,204.302987352,-36.6989586206,66.0962720642,-9.57087489783,313.310472546,25.1146611916 +J2009.00,B1991.83,221.487546141,22.5689795999,126.853873425,2.99323159891,29.3329007943,63.7844492045 +J1986.24,B1959.40,338.956666009,-30.7135370512,297.785788922,-58.4349148844,18.009575125,-60.3855752943 +J2002.57,B1967.98,149.5308077,21.1458572723,94.8524518306,65.0524091023,211.851072125,50.1739306174 +J2013.49,B1974.10,95.1983908472,-1.61163007915,325.627893674,50.8317830992,210.781640873,-7.70049786191 +J1985.59,B1998.30,35.0615395317,-28.6207880841,309.362547589,-10.9681750175,222.2815235,-70.2039555167 +J1989.64,B1978.17,174.903919876,-25.7547140538,60.6700456981,17.1698957102,283.434726362,34.3456435324 +J1992.82,B1991.38,167.27863063,54.1842744725,155.134423626,47.955156172,150.615261991,57.1592810224 +J2022.82,B1953.81,10.7133541168,-26.6356033619,299.700062775,-30.2854018223,42.0331929301,-87.8059255496 +J2008.01,B1977.66,249.939886269,43.0233288254,157.107675338,-4.65986958657,67.7233796,41.7069452763 +J2022.53,B1977.40,258.100960451,-37.3838036503,76.1621887374,-51.0815347034,348.967532389,1.36680578377 +J1979.84,B1995.27,262.732112385,-19.8057986634,105.205356822,-52.5372854117,6.11993733263,7.43500801549 +J1988.23,B1968.47,149.166366188,63.2857703333,174.688450428,50.0778473898,148.575911001,44.2672282651 +J1988.61,B1995.06,5.4355841259,0.695799807062,268.674810006,-23.9106016257,107.664837783,-61.2469672937 +J2016.75,B1957.03,327.231056694,-11.1377396332,252.951382603,-62.1498935414,44.0578705009,-44.1656816269 +J2015.64,B1954.96,284.17633852,-71.0631656787,21.4861837786,-44.7317770239,324.040057511,-25.8326289675 +J1989.69,B1998.66,59.4717008987,14.0960045791,281.495047329,29.7732755945,176.567960403,-28.6862393249 +J2007.49,B1997.10,112.602946077,-17.7763932222,359.226063676,44.0329923131,233.074018942,0.179492159778 +J1996.05,B1979.55,219.940310095,-26.5130440909,81.4465458752,-17.5043303516,331.149281423,30.3269968045 +J1978.73,B1952.60,131.216503219,-60.6790709392,16.634154156,1.81850032112,276.99056645,-10.9885663722 +J2011.94,B1952.51,56.1738921125,-19.3427782341,311.186999671,10.4337104739,211.304689863,-49.7748029903 +J2004.27,B1966.23,63.8293728328,-59.8347944156,346.99277437,-9.45023426908,270.969703277,-42.5256665899 +J1992.23,B1968.79,312.440281577,-82.909075449,11.3540896839,-34.157150499,310.093208312,-30.498791986 +J1987.90,B1988.21,104.43408064,-66.6447299251,5.45340291535,-4.88740816946,277.212698727,-24.1397093769 +J1989.59,B1992.96,210.664663673,-17.5831928536,86.0165552167,-5.98416607071,326.19870132,41.9722447163 +J2013.49,B1977.29,163.438155327,-54.6954182678,35.0847453795,1.14728902679,286.284423558,4.38048943444 +J1996.22,B1966.19,148.024127582,2.32865180198,61.1653789554,55.4628176364,235.147968548,40.5062869945 +J1989.43,B1970.29,317.748400264,-34.6457182874,316.831643302,-75.2962612516,9.65732035894,-42.8175894028 +J1988.21,B1955.48,249.374885326,79.5246095403,183.718980722,20.6913682813,112.565520636,32.5641832214 +J1988.85,B1956.86,100.53840787,-27.7507223648,349.546329312,30.9972476643,237.218549213,-14.0500966148 +J2017.12,B1987.27,23.1984832267,21.1208388177,259.952338902,1.35554765265,135.43691251,-40.8550069313 +J1983.48,B1993.82,71.5045009532,3.00896662959,299.603452073,35.2702997539,194.634394328,-25.8112525209 +J1987.60,B1962.95,335.405788093,-6.90098238794,255.324007551,-53.1294372006,55.8410528163,-49.2268304977 +J2004.59,B1984.28,307.588884401,18.8511389183,199.036872823,-43.8235513129,61.4614776755,-11.7984990917 +J2023.77,B1967.96,343.704504442,-46.9224252956,326.142463635,-52.3943400975,344.29956144,-59.491008965 +J1975.21,B1950.30,18.8112053675,35.1485289159,245.286476487,4.93182941858,128.71934377,-27.3093230426 +J1987.00,B1988.06,208.609805013,-46.3894275721,58.5433364885,-16.5301679525,314.238428253,15.0130783715 +J2011.33,B1970.70,172.978655994,15.4172636989,99.5237786852,42.2246688539,240.910040544,68.0540051284 +J1987.54,B1966.69,7.8152324312,-34.9365736294,307.969684049,-34.8499910529,330.146180835,-81.2012202729 +J2020.91,B1963.90,134.503366944,-72.4111269318,16.6485961895,-9.74558853146,287.251430013,-17.0236932061 +J2000.41,B1979.63,149.073048424,14.7065160273,81.1217800627,62.690435672,220.791567295,47.4907829088 +J2000.13,B1966.26,217.406604209,16.5186514295,119.582956349,3.61620654175,12.9778311665,64.930795113 +J2010.62,B1996.84,241.829541848,16.5114334946,131.926345089,-16.4641006337,30.2129120083,43.7266326194 +J2006.99,B1954.80,301.991652158,46.8228690265,192.281654734,-16.0819438067,82.1831278354,7.65942031664 +J1989.22,B1994.16,280.629434995,-19.0017596678,119.763163085,-67.9434807772,15.0327362766,-6.7748272396 +J1975.49,B1978.40,144.252375855,-10.2581330338,44.2663724488,47.1780541535,244.906125318,30.0146889384 +J2004.74,B1953.10,286.0305233,12.7464714044,168.403291875,-46.9082451034,45.684929198,3.05860565516 +J2017.05,B1993.75,321.524751743,61.8464645226,201.729972147,0.142282640146,100.9998867,8.01195048206 +J1999.33,B1961.24,94.4962887092,-44.0946278203,352.197455758,14.1646347321,251.750996447,-24.2178084719 +J2014.04,B1989.97,356.110922656,-39.1892569317,312.562642311,-44.4976591581,347.918750957,-71.4003276728 +J1995.63,B1990.09,307.190555646,-43.7191034979,2.19409366458,-73.0808365619,356.907677491,-35.5140842361 +J1993.99,B1951.45,263.331776174,25.1917278571,153.169840501,-25.2115873395,48.721484719,27.5451183286 +J2019.92,B1981.35,128.003624894,58.8666544649,188.132407462,57.9560573023,158.151363881,35.6484565698 +J2019.84,B1980.23,317.984216655,-8.89508525523,234.105102561,-66.9359336144,41.2540921899,-35.0019322625 +J2011.02,B1953.91,312.465272698,5.18400310772,210.387316474,-56.4513187131,52.2045687218,-23.2783213403 +J1989.24,B1988.65,344.0759205,-20.8070551085,282.194864755,-52.0563161521,40.4226717361,-63.1239129244 +J1991.99,B1957.17,0.0386123471053,-42.7336081023,317.113101073,-41.6722671173,333.272026627,-71.3154908857 +J1989.26,B1973.18,5.95477509083,23.9728714179,248.645776385,-10.559858406,115.039830262,-38.4244563396 +J2013.98,B1954.86,113.065220613,27.4191705733,278.378621564,81.2392172048,191.670829361,20.3160887911 +J1975.23,B1978.49,358.313822853,67.0446512684,211.780718733,12.9835628445,117.438946252,4.91999007663 +J1979.23,B1970.19,53.5839203362,-15.011852649,305.729478083,10.5974755105,203.68085793,-50.0750506428 +J1997.07,B1979.33,60.2557627351,25.6833225299,269.003573186,34.456942967,167.932574018,-20.1669832622 +J1987.55,B1987.44,273.08593329,76.4393919681,185.756212072,15.2737854446,107.678579719,28.6298222935 +J2020.29,B1994.48,25.0306798156,-51.1202356021,329.572709072,-25.482154153,285.52934787,-64.2572780699 +J2019.04,B1968.97,253.970437895,31.094899255,151.036217235,-15.4523165082,52.7995114065,37.292668285 +J2010.83,B1964.62,168.89950144,-43.2270950714,44.9384665428,8.00028298712,284.864643848,16.2980080563 +J1986.93,B1975.46,3.66775780511,39.2622225734,235.238676188,-2.72528403932,115.361704733,-23.0060112816 +J2021.26,B1976.64,278.936590632,6.21231840756,154.692659833,-49.4790598243,36.5657932055,6.53011956709 +J2023.48,B1955.27,285.91236301,9.40548699672,166.481412833,-50.0386245303,42.5308356905,1.81955485147 +J2003.91,B1952.30,53.8450026285,60.7259893436,226.147190382,33.5643817186,141.659779689,3.93637165962 +J1988.45,B1981.10,8.53330744443,-7.54498028811,278.255981158,-25.1174817952,110.761077739,-69.9571108312 +J1990.05,B1991.12,274.342957522,-1.24603088049,141.305593094,-52.2551761295,27.9749402015,6.88675256757 +J2006.27,B1952.75,80.5212647616,19.4060625392,284.864123639,50.4777772377,185.129288864,-9.72047187101 +J2013.99,B1989.90,94.3827831954,15.0883386826,301.493234394,60.9106556021,195.582917069,-0.623869405575 +J1996.06,B1962.21,164.473020999,-47.6965440186,39.4946578672,6.36223553605,283.94830707,10.9316648861 +J2007.85,B1990.18,89.9736906625,-16.9964263489,333.217747094,35.5496625256,222.809109826,-18.9265144618 +J1996.18,B1964.91,204.582082173,15.6789515837,113.317697869,14.3475448707,348.941256944,74.1578851882 diff --git a/astropy/coordinates/tests/accuracy/generate_ref_ast.py b/astropy/coordinates/tests/accuracy/generate_ref_ast.py new file mode 100644 index 0000000..23b2ac7 --- /dev/null +++ b/astropy/coordinates/tests/accuracy/generate_ref_ast.py @@ -0,0 +1,255 @@ +""" +This series of functions are used to generate the reference CSV files +used by the accuracy tests. Running this as a comand-line script will +generate them all. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import os + +import numpy as np + +from astropy.table import Table, Column + + +def ref_fk4_no_e_fk4(fnout='fk4_no_e_fk4.csv'): + """ + Accuracy tests for the FK4 (with no E-terms of aberration) to/from FK4 + conversion, with arbitrary equinoxes and epoch of observation. + """ + + import starlink.Ast as Ast + + np.random.seed(12345) + + N = 200 + + # Sample uniformly on the unit sphere. These will be either the FK4 + # coordinates for the transformation to FK5, or the FK5 coordinates for the + # transformation to FK4. + ra = np.random.uniform(0., 360., N) + dec = np.degrees(np.arcsin(np.random.uniform(-1., 1., N))) + + # Generate random observation epoch and equinoxes + obstime = ["B{0:7.2f}".format(x) for x in np.random.uniform(1950., 2000., N)] + + ra_fk4ne, dec_fk4ne = [], [] + ra_fk4, dec_fk4 = [], [] + + for i in range(N): + + # Set up frames for AST + frame_fk4ne = Ast.SkyFrame('System=FK4-NO-E,Epoch={epoch},Equinox=B1950'.format(epoch=obstime[i])) + frame_fk4 = Ast.SkyFrame('System=FK4,Epoch={epoch},Equinox=B1950'.format(epoch=obstime[i])) + + # FK4 to FK4 (no E-terms) + frameset = frame_fk4.convert(frame_fk4ne) + coords = np.degrees(frameset.tran([[np.radians(ra[i])], [np.radians(dec[i])]])) + ra_fk4ne.append(coords[0, 0]) + dec_fk4ne.append(coords[1, 0]) + + # FK4 (no E-terms) to FK4 + frameset = frame_fk4ne.convert(frame_fk4) + coords = np.degrees(frameset.tran([[np.radians(ra[i])], [np.radians(dec[i])]])) + ra_fk4.append(coords[0, 0]) + dec_fk4.append(coords[1, 0]) + + # Write out table to a CSV file + t = Table() + t.add_column(Column(name='obstime', data=obstime)) + t.add_column(Column(name='ra_in', data=ra)) + t.add_column(Column(name='dec_in', data=dec)) + t.add_column(Column(name='ra_fk4ne', data=ra_fk4ne)) + t.add_column(Column(name='dec_fk4ne', data=dec_fk4ne)) + t.add_column(Column(name='ra_fk4', data=ra_fk4)) + t.add_column(Column(name='dec_fk4', data=dec_fk4)) + f = open(fnout, 'wb') + f.write("# This file was generated with the {0} script, and the reference " + "values were computed using AST\n".format(os.path.basename(__file__))) + t.write(f, format='ascii', delimiter=',') + + +def ref_fk4_no_e_fk5(fnout='fk4_no_e_fk5.csv'): + """ + Accuracy tests for the FK4 (with no E-terms of aberration) to/from FK5 + conversion, with arbitrary equinoxes and epoch of observation. + """ + + import starlink.Ast as Ast + + np.random.seed(12345) + + N = 200 + + # Sample uniformly on the unit sphere. These will be either the FK4 + # coordinates for the transformation to FK5, or the FK5 coordinates for the + # transformation to FK4. + ra = np.random.uniform(0., 360., N) + dec = np.degrees(np.arcsin(np.random.uniform(-1., 1., N))) + + # Generate random observation epoch and equinoxes + obstime = ["B{0:7.2f}".format(x) for x in np.random.uniform(1950., 2000., N)] + equinox_fk4 = ["B{0:7.2f}".format(x) for x in np.random.uniform(1925., 1975., N)] + equinox_fk5 = ["J{0:7.2f}".format(x) for x in np.random.uniform(1975., 2025., N)] + + ra_fk4, dec_fk4 = [], [] + ra_fk5, dec_fk5 = [], [] + + for i in range(N): + + # Set up frames for AST + frame_fk4 = Ast.SkyFrame('System=FK4-NO-E,Epoch={epoch},Equinox={equinox_fk4}'.format(epoch=obstime[i], equinox_fk4=equinox_fk4[i])) + frame_fk5 = Ast.SkyFrame('System=FK5,Epoch={epoch},Equinox={equinox_fk5}'.format(epoch=obstime[i], equinox_fk5=equinox_fk5[i])) + + # FK4 to FK5 + frameset = frame_fk4.convert(frame_fk5) + coords = np.degrees(frameset.tran([[np.radians(ra[i])], [np.radians(dec[i])]])) + ra_fk5.append(coords[0, 0]) + dec_fk5.append(coords[1, 0]) + + # FK5 to FK4 + frameset = frame_fk5.convert(frame_fk4) + coords = np.degrees(frameset.tran([[np.radians(ra[i])], [np.radians(dec[i])]])) + ra_fk4.append(coords[0, 0]) + dec_fk4.append(coords[1, 0]) + + # Write out table to a CSV file + t = Table() + t.add_column(Column(name='equinox_fk4', data=equinox_fk4)) + t.add_column(Column(name='equinox_fk5', data=equinox_fk5)) + t.add_column(Column(name='obstime', data=obstime)) + t.add_column(Column(name='ra_in', data=ra)) + t.add_column(Column(name='dec_in', data=dec)) + t.add_column(Column(name='ra_fk5', data=ra_fk5)) + t.add_column(Column(name='dec_fk5', data=dec_fk5)) + t.add_column(Column(name='ra_fk4', data=ra_fk4)) + t.add_column(Column(name='dec_fk4', data=dec_fk4)) + f = open(fnout, 'wb') + f.write("# This file was generated with the {0} script, and the reference " + "values were computed using AST\n".format(os.path.basename(__file__))) + t.write(f, format='ascii', delimiter=',') + + +def ref_galactic_fk4(fnout='galactic_fk4.csv'): + """ + Accuracy tests for the ICRS (with no E-terms of aberration) to/from FK5 + conversion, with arbitrary equinoxes and epoch of observation. + """ + + import starlink.Ast as Ast + + np.random.seed(12345) + + N = 200 + + # Sample uniformly on the unit sphere. These will be either the ICRS + # coordinates for the transformation to FK5, or the FK5 coordinates for the + # transformation to ICRS. + lon = np.random.uniform(0., 360., N) + lat = np.degrees(np.arcsin(np.random.uniform(-1., 1., N))) + + # Generate random observation epoch and equinoxes + obstime = ["B{0:7.2f}".format(x) for x in np.random.uniform(1950., 2000., N)] + equinox_fk4 = ["J{0:7.2f}".format(x) for x in np.random.uniform(1975., 2025., N)] + + lon_gal, lat_gal = [], [] + ra_fk4, dec_fk4 = [], [] + + for i in range(N): + + # Set up frames for AST + frame_gal = Ast.SkyFrame('System=Galactic,Epoch={epoch}'.format(epoch=obstime[i])) + frame_fk4 = Ast.SkyFrame('System=FK4,Epoch={epoch},Equinox={equinox_fk4}'.format(epoch=obstime[i], equinox_fk4=equinox_fk4[i])) + + # ICRS to FK5 + frameset = frame_gal.convert(frame_fk4) + coords = np.degrees(frameset.tran([[np.radians(lon[i])], [np.radians(lat[i])]])) + ra_fk4.append(coords[0, 0]) + dec_fk4.append(coords[1, 0]) + + # FK5 to ICRS + frameset = frame_fk4.convert(frame_gal) + coords = np.degrees(frameset.tran([[np.radians(lon[i])], [np.radians(lat[i])]])) + lon_gal.append(coords[0, 0]) + lat_gal.append(coords[1, 0]) + + # Write out table to a CSV file + t = Table() + t.add_column(Column(name='equinox_fk4', data=equinox_fk4)) + t.add_column(Column(name='obstime', data=obstime)) + t.add_column(Column(name='lon_in', data=lon)) + t.add_column(Column(name='lat_in', data=lat)) + t.add_column(Column(name='ra_fk4', data=ra_fk4)) + t.add_column(Column(name='dec_fk4', data=dec_fk4)) + t.add_column(Column(name='lon_gal', data=lon_gal)) + t.add_column(Column(name='lat_gal', data=lat_gal)) + f = open(fnout, 'wb') + f.write("# This file was generated with the {0} script, and the reference " + "values were computed using AST\n".format(os.path.basename(__file__))) + t.write(f, format='ascii', delimiter=',') + + +def ref_icrs_fk5(fnout='icrs_fk5.csv'): + """ + Accuracy tests for the ICRS (with no E-terms of aberration) to/from FK5 + conversion, with arbitrary equinoxes and epoch of observation. + """ + + import starlink.Ast as Ast + + np.random.seed(12345) + + N = 200 + + # Sample uniformly on the unit sphere. These will be either the ICRS + # coordinates for the transformation to FK5, or the FK5 coordinates for the + # transformation to ICRS. + ra = np.random.uniform(0., 360., N) + dec = np.degrees(np.arcsin(np.random.uniform(-1., 1., N))) + + # Generate random observation epoch and equinoxes + obstime = ["B{0:7.2f}".format(x) for x in np.random.uniform(1950., 2000., N)] + equinox_fk5 = ["J{0:7.2f}".format(x) for x in np.random.uniform(1975., 2025., N)] + + ra_icrs, dec_icrs = [], [] + ra_fk5, dec_fk5 = [], [] + + for i in range(N): + + # Set up frames for AST + frame_icrs = Ast.SkyFrame('System=ICRS,Epoch={epoch}'.format(epoch=obstime[i])) + frame_fk5 = Ast.SkyFrame('System=FK5,Epoch={epoch},Equinox={equinox_fk5}'.format(epoch=obstime[i], equinox_fk5=equinox_fk5[i])) + + # ICRS to FK5 + frameset = frame_icrs.convert(frame_fk5) + coords = np.degrees(frameset.tran([[np.radians(ra[i])], [np.radians(dec[i])]])) + ra_fk5.append(coords[0, 0]) + dec_fk5.append(coords[1, 0]) + + # FK5 to ICRS + frameset = frame_fk5.convert(frame_icrs) + coords = np.degrees(frameset.tran([[np.radians(ra[i])], [np.radians(dec[i])]])) + ra_icrs.append(coords[0, 0]) + dec_icrs.append(coords[1, 0]) + + # Write out table to a CSV file + t = Table() + t.add_column(Column(name='equinox_fk5', data=equinox_fk5)) + t.add_column(Column(name='obstime', data=obstime)) + t.add_column(Column(name='ra_in', data=ra)) + t.add_column(Column(name='dec_in', data=dec)) + t.add_column(Column(name='ra_fk5', data=ra_fk5)) + t.add_column(Column(name='dec_fk5', data=dec_fk5)) + t.add_column(Column(name='ra_icrs', data=ra_icrs)) + t.add_column(Column(name='dec_icrs', data=dec_icrs)) + f = open(fnout, 'wb') + f.write("# This file was generated with the {0} script, and the reference " + "values were computed using AST\n".format(os.path.basename(__file__))) + t.write(f, format='ascii', delimiter=',') + +if __name__ == '__main__': + ref_fk4_no_e_fk4() + ref_fk4_no_e_fk5() + ref_galactic_fk4() + ref_icrs_fk5() diff --git a/astropy/coordinates/tests/accuracy/icrs_fk5.csv b/astropy/coordinates/tests/accuracy/icrs_fk5.csv new file mode 100644 index 0000000..ed6703c --- /dev/null +++ b/astropy/coordinates/tests/accuracy/icrs_fk5.csv @@ -0,0 +1,202 @@ +# This file was generated with the ref_icrs_fk5.py script, and the reference values were computed using AST +equinox_fk5,obstime,ra_in,dec_in,ra_fk5,dec_fk5,ra_icrs,dec_icrs +J1998.36,B1995.95,334.661793414,43.9385116594,334.644564717,43.9302620645,334.679023415,43.9467624314 +J2021.64,B1954.56,113.895199649,-14.1109832563,114.144749047,-14.1600275394,113.645603942,-14.0624187531 +J2020.49,B1953.55,66.2107722038,-7.76265420193,66.4590983513,-7.71687128381,65.9625042534,-7.80888947142 +J1981.50,B1970.69,73.6417002791,41.7006137481,73.3167722987,41.6713224382,73.9668646614,41.7293444168 +J2001.47,B1960.78,204.381010469,-14.9357743223,204.400749583,-14.9432299686,204.361272512,-14.9283175102 +J2005.96,B1975.98,214.396093073,-66.7648451487,214.51622501,-66.7922023737,214.276152292,-66.7374486425 +J2006.23,B1977.93,347.225227105,6.27744217753,347.304207997,6.31127500827,347.146246763,6.24361991082 +J2007.34,B1973.69,235.143754874,-5.59566003897,235.241093646,-5.61898190462,235.046433786,-5.57228120384 +J1991.60,B1960.79,269.606389512,26.7823112195,269.522379939,26.7826702924,269.690399178,26.7820207078 +J1980.71,B1961.97,235.285153507,-14.0695156888,235.015999226,-14.0081475332,235.554479961,-14.1304690349 +J2003.56,B1960.84,269.177331338,42.9472695107,269.20449399,42.9469939989,269.150168743,42.9475544195 +J1990.10,B1982.78,346.070424986,-3.51848810713,345.942775401,-3.57196685618,346.198054805,-3.46497978924 +J1984.68,B1992.32,3.01978725896,7.19732176646,2.82298721926,7.11213924582,3.21663102538,7.28248887117 +J2003.24,B1996.52,38.3199756112,18.8080489808,38.3653094841,18.8221903901,38.2746486329,18.7938987191 +J2005.52,B1990.02,107.533336957,-4.33088623215,107.601845445,-4.34016819794,107.464824543,-4.32163930179 +J1977.27,B1984.04,236.30802591,14.3162535375,236.043743614,14.3866995821,236.572362968,14.2462932004 +J2024.27,B1960.36,291.532518915,-33.7960784017,291.927410812,-33.7460496092,291.137240582,-33.8452405537 +J1980.19,B1987.08,313.983328941,27.7572327639,313.771329108,27.6807919311,314.195342452,27.8339672537 +J1995.29,B1984.85,347.273135054,-13.6880685538,347.211387919,-13.7136412695,347.334872743,-13.662489607 +J2008.28,B1969.09,260.526724891,-37.6134342267,260.667857852,-37.6209601213,260.385615908,-37.6057963361 +J1984.85,B1992.51,231.291118043,-27.2371455509,231.063254934,-27.1842630084,231.519165836,-27.2897662439 +J1987.09,B1976.41,258.283303492,-30.1025933842,258.077147166,-30.0878669846,258.489514237,-30.1170665366 +J2006.16,B1994.65,168.335642599,-44.084769302,168.407881134,-44.1183592869,168.263437199,-44.0511880472 +J2014.94,B1991.03,117.210483914,32.8708634152,117.449614999,32.8326715727,116.971180598,32.9087464534 +J2002.23,B1961.43,158.272058119,-29.286471988,158.29805553,-29.2980114305,158.246062428,-29.2749346296 +J1984.88,B1991.03,262.688069789,-48.1516431413,262.401200048,-48.1407150038,262.975034556,-48.1621531697 +J2014.21,B1956.93,357.845250924,19.2890677934,358.026315201,19.3681291925,357.664269464,19.2100157767 +J2015.72,B1974.12,243.674536239,-10.0431678136,243.889881509,-10.0818251308,243.459271586,-10.0042157281 +J2010.54,B1957.44,284.696106425,19.6051067047,284.810926274,19.6200552,284.581280582,19.5902719604 +J2022.20,B1972.41,61.5291328053,18.6403709997,61.8503393647,18.6989763949,61.2081620218,18.581156754 +J2017.75,B1983.30,9.66573928438,-22.9075078717,9.88608757274,-22.8101292831,9.44526590432,-23.0049503113 +J2023.18,B1989.45,288.133287813,-36.6947385674,288.521507272,-36.654154333,287.744731719,-36.7344915409 +J1998.23,B1983.10,325.340113758,-33.7758802174,325.313691637,-33.783980295,325.366532233,-33.7677775537 +J1999.25,B1985.58,8.88343575454,-49.4693354042,8.87458135076,-49.4734614153,8.89228952149,-49.4652094919 +J2004.32,B1994.40,177.029034641,-67.7755279684,177.081382811,-67.7995455131,176.976736518,-67.7515115552 +J2022.10,B1957.08,189.451860246,-68.7071945134,189.787950236,-68.8284977585,189.117915692,-68.5857730927 +J1993.61,B1957.38,214.691763751,-32.6160600699,214.596970957,-32.5867949166,214.786602083,-32.6452917256 +J2004.91,B1966.30,18.7047162369,-32.9080620608,18.7619437329,-32.8821737407,18.6474776276,-32.9339591431 +J2005.68,B1951.59,322.232230099,14.4669345738,322.300004441,14.4919497078,322.164454374,14.4419423495 +J2003.00,B1984.39,262.175824918,51.7319974933,262.193291036,51.7297325887,262.15835963,51.7342674421 +J1980.93,B1988.24,294.6060041,34.0181871087,294.426858562,33.9741356521,294.78513452,34.0625403768 +J1995.15,B1967.50,180.08019102,26.2892216009,180.018069261,26.3162194666,180.142298341,26.2622237714 +J1986.07,B1980.80,291.668187169,-22.2789167174,291.460165406,-22.3074160406,291.876124294,-22.2501557708 +J2014.41,B1997.92,34.548669268,-15.8924906144,34.7203476357,-15.826491503,34.3769912557,-15.9586260582 +J2013.20,B1964.55,78.8220157436,-37.4332268082,78.9359542832,-37.4190574603,78.7080839461,-37.4475395217 +J1983.72,B1984.33,93.1388621771,60.5731416456,92.7698274429,60.5778081354,93.5078078659,60.5678923219 +J2011.19,B1952.11,168.518071423,7.09229333513,168.662964922,7.03122231792,168.373145295,7.15333299716 +J2021.23,B1953.13,165.374352937,39.3890686842,165.670569356,39.2746286306,165.077550855,39.5033543186 +J1998.80,B1990.72,255.423520875,-17.5881075751,255.406106679,-17.5864187707,255.440935444,-17.5897944148 +J2020.65,B1971.83,64.0990821181,36.8289797648,64.4412908098,36.8788812849,63.757239339,36.77846091 +J1996.87,B1969.60,191.321958369,-52.3532066605,191.277444974,-52.3361209946,191.366491705,-52.3702896721 +J1978.29,B1966.53,60.3872023631,25.1025882655,60.0600049106,25.0425615489,60.7146932542,25.1620146503 +J1993.19,B1972.88,276.773010626,56.6051138031,276.742873164,56.6006572956,276.803141964,56.6095901107 +J1984.47,B1991.77,334.141397682,37.3852087993,333.971320286,37.3074623211,334.311570487,37.4630672642 +J1982.42,B1973.34,219.417716878,-20.2290328911,219.169713749,-20.1532857902,219.66593381,-20.3045108915 +J1985.55,B1971.06,54.0660580808,-29.3264933861,53.9175360432,-29.3737907652,54.2145819747,-29.2793648485 +J2018.98,B1978.54,176.26561333,-0.572718169429,176.5087243,-0.678171194716,176.022494179,-0.467294315659 +J2015.89,B1986.95,135.84418338,-9.94938261687,136.036951663,-10.0129567306,135.651382202,-9.88601582693 +J2006.58,B1952.75,305.496508312,-8.63421746611,305.585332083,-8.61291748186,305.407668201,-8.65547120765 +J2022.76,B1981.21,327.995002307,-58.3471659896,328.394703325,-58.2394830075,327.593625588,-58.4543795694 +J1980.95,B1981.05,138.185539617,11.9337947187,137.926465957,12.0126777715,138.444435852,11.854592026 +J2005.11,B1950.06,113.578525223,29.6301583121,113.658818144,29.6187548389,113.498216367,29.6415252375 +J1991.57,B1980.14,204.621895006,36.5235009134,204.528365616,36.5661830045,204.715395365,36.4808507277 +J2016.08,B1952.01,67.6144926088,-13.7094836718,67.8003322803,-13.675528411,67.4286781478,-13.7437074086 +J2007.99,B1979.29,45.3029557779,36.4639084123,45.4287375369,36.4951563695,45.1772514486,36.4325910517 +J1996.13,B1972.42,247.534489816,-3.23349952461,247.483791774,-3.22525417405,247.585191141,-3.24172726082 +J2010.80,B1967.69,287.858418461,26.2825631559,287.968526608,26.3010624761,287.748304904,26.2641738179 +J1985.76,B1996.68,206.473163472,-38.4312130715,206.262844929,-38.3601778797,206.683760191,-38.5021184668 +J1975.84,B1963.36,350.362793376,-7.51631961926,350.050245875,-7.64886538089,350.675192428,-7.38365103931 +J1989.04,B1964.06,228.259575769,40.311002157,228.157788783,40.3516658201,228.36135704,40.2704193663 +J2005.09,B1975.25,319.831820932,40.7337792676,319.881302594,40.7554460493,319.782343346,40.712128268 +J1998.03,B1982.34,178.349313153,-38.3854710615,178.324338212,-38.3745092745,178.374291779,-38.3964329888 +J2010.53,B1998.53,126.58195076,-73.6980337652,126.555725353,-73.7329650434,126.607757619,-73.6630811157 +J1983.23,B1951.79,257.122932676,24.0154376566,256.948650568,24.0363842696,257.297226196,23.9947678892 +J2022.01,B1971.16,181.414481921,-17.7858263698,181.697561318,-17.9083119018,181.131603746,-17.6633258663 +J2022.77,B1979.42,81.2295383474,-9.26450146427,81.5008624611,-9.24547745382,80.9582426792,-9.28411870238 +J2024.04,B1986.59,88.1907984871,32.4238226453,88.5837995469,32.4275810011,87.7978296174,32.4191468321 +J1977.94,B1958.78,285.408252018,67.7826509035,285.415288738,67.7500149744,285.400733562,67.815271794 +J2012.02,B1975.53,178.262069224,51.7327600597,178.418521574,51.6658699581,178.105379001,51.7996446322 +J2005.03,B1975.01,329.433722424,-46.8960749035,329.513358137,-46.8719488299,329.354038052,-46.9201811836 +J1979.45,B1994.64,340.333860195,36.5560891832,340.099269221,36.4484316911,340.568666175,36.6639044187 +J2024.47,B1969.13,191.963602676,21.3572019706,192.265985395,21.2240120738,191.661020584,21.4905409785 +J2002.44,B1983.14,90.8973340407,3.44588414281,90.9294194634,3.44566140242,90.8652485585,3.44609927685 +J2008.72,B1952.34,259.510340943,47.0512387915,259.570777662,47.0424288828,259.449910071,47.060099055 +J2011.24,B1987.56,132.277954966,30.4307232942,132.449103167,30.388553739,132.106687114,30.4727545196 +J2003.42,B1968.44,179.513439448,-54.44865752,179.557050535,-54.4676997913,179.469848483,-54.4296153679 +J2001.37,B1997.40,81.5670170865,-19.9451944488,81.5818413055,-19.9440843678,81.5521929287,-19.9463064817 +J1982.54,B1967.36,127.283632829,-10.0946390302,127.073706282,-10.0359014336,127.493515779,-10.1536599704 +J1987.01,B1984.19,234.306643184,-86.4404274379,233.208246223,-86.397666282,235.429405927,-86.482050156 +J1995.13,B1991.23,112.65584231,11.2521500479,112.588477624,11.262573342,112.723199816,11.2416973345 +J1978.39,B1974.31,276.744760981,21.4151577082,276.514780435,21.4012711846,276.974729777,21.4295237953 +J2012.92,B1999.21,281.461357214,-15.511897988,281.646447197,-15.4974841762,281.27623546,-15.5260840726 +J1992.13,B1980.19,306.867413859,-11.9467360888,306.759165107,-11.9729853099,306.975635305,-11.9204206469 +J2024.49,B1987.98,341.966066455,-2.82477813631,342.281869892,-2.69502407373,341.650132043,-2.95429956154 +J2019.43,B1984.23,38.6362483924,9.3322810896,38.8963811972,9.41661462037,38.3762808891,9.24764100258 +J2021.93,B1996.62,327.861128148,-46.529254733,328.210157236,-46.4256790337,327.511186361,-46.632434339 +J2011.96,B1997.49,120.979858288,87.22617179,122.295667673,87.1912385961,119.633038513,87.2597786682 +J1976.35,B1999.51,297.496953653,0.839666332936,297.195644583,0.779185153185,297.798143461,0.9007616283 +J1994.12,B1956.31,323.316228643,-0.794522598791,323.240624027,-0.820755621072,323.391823819,-0.768263773348 +J1975.53,B1998.83,15.3775095611,-38.7740290611,15.0928652608,-38.9054807438,15.6617662484,-38.6427567079 +J1978.26,B1961.46,70.486199672,-24.0682131367,70.2586642967,-24.1088709419,70.7137598878,-24.0280083925 +J2009.07,B1959.30,106.020475905,36.6574903487,106.172780811,36.6434848171,105.868125064,36.6713668422 +J2024.33,B1975.46,225.719957006,-24.2326924255,226.075567685,-24.326948892,225.364802775,-24.1378344642 +J2008.31,B1976.52,31.0403178442,23.2187819108,31.1570536178,23.258394038,30.9236362505,23.1791211798 +J1995.76,B1964.13,51.4602071324,-27.0058546166,51.4152973853,-27.0205700299,51.5051169729,-26.991153671 +J1977.06,B1965.51,185.697546923,55.594260797,185.421779304,55.721374348,185.972510783,55.4672081659 +J2019.71,B1965.49,248.162878677,-23.7609450888,248.460344259,-23.8014906584,247.865592952,-23.7198708623 +J2010.34,B1963.32,308.385291884,51.2349043028,308.461574811,51.2706847328,308.308996421,51.1991839517 +J1998.94,B1979.67,233.050205996,63.3093356498,233.046004532,63.3128868847,233.05440839,63.3057847603 +J1985.78,B1960.86,209.382723191,-41.4659129842,209.166390198,-41.3968581581,209.599369778,-41.5348210618 +J1979.09,B1970.12,256.001743835,-16.3448051664,255.700801743,-16.3163460002,256.302789611,-16.3726709454 +J2008.66,B1964.43,90.8700685367,21.3678694408,90.9998841203,21.3670776114,90.7402515844,21.3685520416 +J2024.74,B1958.69,324.057486054,57.4352750563,324.24791254,57.5469196438,323.867096755,57.3238991167 +J2004.68,B1961.29,159.225729446,-45.2472278228,159.276379685,-45.27159791,159.175093005,-45.2228659014 +J2017.01,B1999.43,7.38749687642,-53.1540997613,7.58899121668,-53.0602158752,7.18561693871,-53.2480265357 +J1982.65,B1971.70,345.477965039,-10.1831007688,345.251295994,-10.2765575516,345.704526845,-10.0895481742 +J2018.81,B1991.41,234.801152081,71.8511934075,234.781598949,71.7908263583,234.821698672,71.9115305128 +J2002.24,B1978.63,184.754250038,-66.4894904918,184.785352293,-66.5019187594,184.723164704,-66.4770616601 +J2024.18,B1982.60,245.64829793,-38.7682176459,246.056856393,-38.8232771335,245.240252475,-38.7122842298 +J2011.79,B1986.49,176.234540627,12.5643501076,176.386539261,12.4988499005,176.082498099,12.6298388968 +J1979.65,B1969.56,333.536461653,-55.645568776,333.201327008,-55.7468423589,333.870449248,-55.544000369 +J1989.61,B1969.64,185.716717981,-21.5568171888,185.58137022,-21.4992561733,185.852126151,-21.6143646005 +J1988.65,B1992.98,25.9775574253,12.7249831044,25.8259425625,12.6681381133,26.129235078,12.7817548524 +J1978.56,B1990.50,204.302987352,-36.6989586206,203.992013028,-36.590035009,204.614547277,-36.8076153687 +J2009.00,B1991.83,221.487546141,22.5689795999,221.589063682,22.531481696,221.386026462,22.6065363227 +J1986.24,B1959.40,338.956666009,-30.7135370512,338.763951811,-30.7849831444,339.149190984,-30.6419984779 +J2002.57,B1967.98,149.5308077,21.1458572723,149.566540611,21.1335179376,149.495070016,21.1581920836 +J2013.49,B1974.10,95.1983908472,-1.61163007915,95.3691226237,-1.61855225484,95.0276507441,-1.6049307767 +J1985.59,B1998.30,35.0615395317,-28.6207880841,34.9020739253,-28.6865248849,35.2209739544,-28.5551795263 +J1989.64,B1978.17,174.903919876,-25.7547140538,174.773704705,-25.6972724215,175.03419171,-25.8121673429 +J1992.82,B1991.38,167.27863063,54.1842744725,167.174390005,54.2232575861,167.382755502,54.1452753419 +J2022.82,B1953.81,10.7133541168,-26.6356033619,10.9937676648,-26.5108341533,10.4326984418,-26.7604882278 +J2008.01,B1977.66,249.939886269,43.0233288254,250.003422167,43.0080617632,249.876355463,43.0386423389 +J2022.53,B1977.40,258.100960451,-37.3838036503,258.483519166,-37.4092478087,257.718621316,-37.3575402646 +J1979.84,B1995.27,262.732112385,-19.8057986634,262.43378889,-19.7913038792,263.030493701,-19.8197136836 +J1988.23,B1968.47,149.166366188,63.2857703333,148.948549069,63.3419675102,149.383595682,63.2294456404 +J1988.61,B1995.06,5.4355841259,0.695799807062,5.28960631728,0.632663567066,5.5815705005,0.758920757926 +J2016.75,B1957.03,327.231056694,-11.1377396332,327.455533267,-11.0592260753,327.006447871,-11.2160554034 +J2015.64,B1954.96,284.17633852,-71.0631656787,284.622383668,-71.0415076498,283.729510587,-71.08416592 +J1989.69,B1998.66,59.4717008987,14.0960045791,59.3272356842,14.0667790018,59.6162113325,14.1251054777 +J2007.49,B1997.10,112.602946077,-17.7763932222,112.686574032,-17.7924540137,112.519313748,-17.7603886122 +J1996.05,B1979.55,219.940310095,-26.5130440909,219.882677955,-26.4961740042,219.997955886,-26.5298999803 +J1978.73,B1952.60,131.216503219,-60.6790709392,131.102499987,-60.6011373948,131.330276761,-60.7571815411 +J2011.94,B1952.51,56.1738921125,-19.3427782341,56.3074873507,-19.3058404816,56.0403066499,-19.3798447522 +J2004.27,B1966.23,63.8293728328,-59.8347944156,63.8473703919,-59.8243161934,63.8113850715,-59.8452793392 +J1992.23,B1968.79,312.440281577,-82.909075449,312.082844158,-82.9381618829,312.795193361,-82.879790561 +J1987.90,B1988.21,104.43408064,-66.6447299251,104.430099425,-66.6279457743,104.437942894,-66.6615185415 +J1989.59,B1992.96,210.664663673,-17.5831928536,210.521977043,-17.533300504,210.807417956,-17.6330115873 +J2013.49,B1977.29,163.438155327,-54.6954182678,163.580861698,-54.7674320028,163.295621486,-54.6234578045 +J1996.22,B1966.19,148.024127582,2.32865180198,147.975248991,2.34649291874,148.073002076,2.31080117706 +J1989.43,B1970.29,317.748400264,-34.6457182874,317.585582699,-34.6892153211,317.911087895,-34.6021088555 +J1988.21,B1955.48,249.374885326,79.5246095403,249.556636954,79.5476344368,249.19427904,79.5013904045 +J1988.85,B1956.86,100.53840787,-27.7507223648,100.427671298,-27.7394319384,100.64914055,-27.7621307317 +J2017.12,B1987.27,23.1984832267,21.1208388177,23.4324436323,21.2083599648,22.9647269089,21.0331644062 +J1983.48,B1993.82,71.5045009532,3.00896662959,71.2883142486,2.97961964121,71.7207379936,3.03798447641 +J1987.60,B1962.95,335.405788093,-6.90098238794,335.243429575,-6.9637085665,335.56809315,-6.83817480211 +J2004.59,B1984.28,307.588884401,18.8511389183,307.640784808,18.8667407469,307.536982665,18.8355554286 +J2023.77,B1967.96,343.704504442,-46.9224252956,344.048269178,-46.7952999698,343.359747105,-47.0493275593 +J1975.21,B1950.30,18.8112053675,35.1485289159,18.4626544919,35.0177535414,19.1604681331,35.2790332993 +J1987.00,B1988.06,208.609805013,-46.3894275721,208.40705329,-46.3258250272,208.812873725,-46.4529073994 +J2011.33,B1970.70,172.978655994,15.4172636989,173.125918709,15.3546485543,172.831339838,15.4798590369 +J1987.54,B1966.69,7.8152324312,-34.9365736294,7.662140954,-35.0053080694,7.96821251179,-34.8678643727 +J2020.91,B1963.90,134.503366944,-72.4111269318,134.508752259,-72.4927321248,134.496713839,-72.3295304626 +J2000.41,B1979.63,149.073048424,14.7065160273,149.078614359,14.7045538676,149.067482395,14.7084780734 +J2000.13,B1966.26,217.406604209,16.5186514295,217.408141458,16.5180765377,217.40506696,16.5192263332 +J2010.62,B1996.84,241.829541848,16.5114334946,241.950169443,16.4835846733,241.708924453,16.5393920451 +J2006.99,B1954.80,301.991652158,46.8228690265,302.04602973,46.8435076393,301.937270072,46.8022617404 +J1989.22,B1994.16,280.629434995,-19.0017596678,280.47101531,-19.0127425519,280.787831403,-18.9906136966 +J1975.49,B1978.40,144.252375855,-10.2581330338,143.952794662,-10.1475953709,144.551902691,-10.3690875087 +J2004.74,B1953.10,286.0305233,12.7464714044,286.085513107,12.7537759609,285.975531683,12.739191194 +J2017.05,B1993.75,321.524751743,61.8464645226,321.632828791,61.9208329855,321.416592726,61.7722074849 +J1999.33,B1961.24,94.4962887092,-44.0946278203,94.4913067992,-44.0943400421,94.5012706073,-44.0949159215 +J2014.04,B1989.97,356.110922656,-39.1892569317,356.295020794,-39.1112673044,355.926608129,-39.2672295394 +J1995.63,B1990.09,307.190555646,-43.7191034979,307.116027145,-43.7337921796,307.265056341,-43.7043896052 +J1993.99,B1951.45,263.331776174,25.1917278571,263.270410907,25.195633174,263.393142235,25.187858127 +J2019.92,B1981.35,128.003624894,58.8666544649,128.402920612,58.7980654005,127.60315064,58.9346336939 +J2019.84,B1980.23,317.984216655,-8.89508525523,318.249905253,-8.81284951457,317.718360008,-8.97697809843 +J2011.02,B1953.91,312.465272698,5.18400310772,312.602344189,5.22548362633,312.328177207,5.1426308705 +J1989.24,B1988.65,344.0759205,-20.8070551085,343.931796087,-20.8646386849,344.219970948,-20.7494301859 +J1991.99,B1957.17,0.0386123471053,-42.7336081023,359.935984167,-42.778197083,0.141166805258,-42.6890191696 +J1989.26,B1973.18,5.95477509083,23.9728714179,5.81446857607,23.9133953285,6.09515408275,24.0323323244 +J2013.98,B1954.86,113.065220613,27.4191705733,113.281430058,27.3885381062,112.848903077,27.4495327526 +J1975.23,B1978.49,358.313822853,67.0446512684,358.006936646,66.906817269,358.62239279,67.1825070772 +J1979.23,B1970.19,53.5839203362,-15.011852649,53.3428201185,-15.0806959511,53.8250625845,-14.9434009383 +J1997.07,B1979.33,60.2557627351,25.6833225299,60.211425166,25.6752201005,60.3001057813,25.6914140019 +J1987.55,B1987.44,273.08593329,76.4393919681,273.213340941,76.4355890802,272.958409407,76.443040877 +J2020.29,B1994.48,25.0306798156,-51.1202356021,25.2312583612,-51.0179789716,24.8298733815,-51.2226596567 +J2019.04,B1968.97,253.970437895,31.094899255,254.152950904,31.0657978691,253.787939628,31.1243251572 +J2010.83,B1964.62,168.89950144,-43.2270950714,169.027402777,-43.286276106,168.771701635,-43.167939929 +J1986.93,B1975.46,3.66775780511,39.2622225734,3.49661533708,39.1896011422,3.8390874932,39.3348301065 +J2021.26,B1976.64,278.936590632,6.21231840756,279.196246371,6.23097561081,278.676905991,6.19419108431 +J2023.48,B1955.27,285.91236301,9.40548699672,286.192352454,9.44163731007,285.632321786,9.36995103333 +J2003.91,B1952.30,53.8450026285,60.7259893436,53.9264872004,60.7388195386,53.763567111,60.7131341506 +J1988.45,B1981.10,8.53330744443,-7.54498028811,8.38660351469,-7.60858303157,8.6800005788,-7.48140196135 +J1990.05,B1991.12,274.342957522,-1.24603088049,274.214291508,-1.25015780077,274.471619291,-1.24177991998 +J2006.27,B1952.75,80.5212647616,19.4060625392,80.6137303362,19.4117801816,80.4288063349,19.4002893257 +J2013.99,B1989.90,94.3827831954,15.0883386826,94.5829613625,15.0822437507,94.1825907513,15.0941622997 +J1996.06,B1962.21,164.473020999,-47.6965440186,164.429008903,-47.6754169753,164.51704615,-47.7176755752 +J2007.85,B1990.18,89.9736906625,-16.9964263489,90.0609144086,-16.9964467144,89.8864669212,-16.9964725118 +J1996.18,B1964.91,204.582082173,15.6789515837,204.535627332,15.698292886,204.628535832,15.6596174499 diff --git a/astropy/coordinates/tests/accuracy/test_fk4_no_e_fk4.py b/astropy/coordinates/tests/accuracy/test_fk4_no_e_fk4.py new file mode 100644 index 0000000..7c18185 --- /dev/null +++ b/astropy/coordinates/tests/accuracy/test_fk4_no_e_fk4.py @@ -0,0 +1,63 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import os + +import numpy as np + +from .... import units as u +from ...builtin_frames import FK4NoETerms, FK4 +from ....time import Time +from ....table import Table +from ...angle_utilities import angular_separation +from ....utils.data import get_pkg_data_contents + +#the number of tests to run +from . import N_ACCURACY_TESTS + +# It looks as though SLALIB, which AST relies on, assumes a simplified version +# of the e-terms corretion, so we have to up the tolerance a bit to get things +# to agree. +TOLERANCE = 1.e-5 # arcseconds + +def test_fk4_no_e_fk4(): + lines = get_pkg_data_contents('fk4_no_e_fk4.csv').split('\n') + t = Table.read(lines, format='ascii', delimiter=',', guess=False) + + if N_ACCURACY_TESTS >= len(t): + idxs = range(len(t)) + else: + idxs = np.random.randint(len(t), size=N_ACCURACY_TESTS) + + diffarcsec1 = [] + diffarcsec2 = [] + for i in idxs: + # Extract row + r = t[int(i)] # int here is to get around a py 3.x astropy.table bug + + # FK4 to FK4NoETerms + c1 = FK4(ra=r['ra_in']*u.deg, dec=r['dec_in']*u.deg, + obstime=Time(r['obstime'], scale='utc')) + c2 = c1.transform_to(FK4NoETerms) + + # Find difference + diff = angular_separation(c2.ra.radian, c2.dec.radian, + np.radians(r['ra_fk4ne']), np.radians(r['dec_fk4ne'])) + + diffarcsec1.append(np.degrees(diff) * 3600.) + + # FK4NoETerms to FK4 + c1 = FK4NoETerms(ra=r['ra_in']*u.deg, dec=r['dec_in']*u.deg, + obstime=Time(r['obstime'], scale='utc')) + c2 = c1.transform_to(FK4) + + # Find difference + diff = angular_separation(c2.ra.radian, c2.dec.radian, + np.radians(r['ra_fk4']), + np.radians(r['dec_fk4'])) + + diffarcsec2.append(np.degrees(diff) * 3600.) + + np.testing.assert_array_less(diffarcsec1, TOLERANCE) + np.testing.assert_array_less(diffarcsec2, TOLERANCE) diff --git a/astropy/coordinates/tests/accuracy/test_fk4_no_e_fk5.py b/astropy/coordinates/tests/accuracy/test_fk4_no_e_fk5.py new file mode 100644 index 0000000..2864777 --- /dev/null +++ b/astropy/coordinates/tests/accuracy/test_fk4_no_e_fk5.py @@ -0,0 +1,65 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import os + +import numpy as np + +from .... import units as u +from ...builtin_frames import FK4NoETerms, FK5 +from ....time import Time +from ....table import Table +from ...angle_utilities import angular_separation +from ....utils.data import get_pkg_data_contents + +#the number of tests to run +from . import N_ACCURACY_TESTS + +TOLERANCE = 0.03 # arcseconds + + +def test_fk4_no_e_fk5(): + lines = get_pkg_data_contents('fk4_no_e_fk5.csv').split('\n') + t = Table.read(lines, format='ascii', delimiter=',', guess=False) + + if N_ACCURACY_TESTS >= len(t): + idxs = range(len(t)) + else: + idxs = np.random.randint(len(t), size=N_ACCURACY_TESTS) + + diffarcsec1 = [] + diffarcsec2 = [] + for i in idxs: + # Extract row + r = t[int(i)] # int here is to get around a py 3.x astropy.table bug + + # FK4NoETerms to FK5 + c1 = FK4NoETerms(ra=r['ra_in']*u.deg, dec=r['dec_in']*u.deg, + obstime=Time(r['obstime'], scale='utc'), + equinox=Time(r['equinox_fk4'], scale='utc')) + c2 = c1.transform_to(FK5(equinox=Time(r['equinox_fk5'], scale='utc'))) + + # Find difference + diff = angular_separation(c2.ra.radian, c2.dec.radian, + np.radians(r['ra_fk5']), + np.radians(r['dec_fk5'])) + + diffarcsec1.append(np.degrees(diff) * 3600.) + + # FK5 to FK4NoETerms + c1 = FK5(ra=r['ra_in']*u.deg, dec=r['dec_in']*u.deg, + equinox=Time(r['equinox_fk5'], scale='utc')) + fk4neframe = FK4NoETerms(obstime=Time(r['obstime'], scale='utc'), + equinox=Time(r['equinox_fk4'], scale='utc')) + c2 = c1.transform_to(fk4neframe) + + # Find difference + diff = angular_separation(c2.ra.radian, c2.dec.radian, + np.radians(r['ra_fk4']), + np.radians(r['dec_fk4'])) + + diffarcsec2.append(np.degrees(diff) * 3600.) + + np.testing.assert_array_less(diffarcsec1, TOLERANCE) + np.testing.assert_array_less(diffarcsec2, TOLERANCE) diff --git a/astropy/coordinates/tests/accuracy/test_galactic_fk4.py b/astropy/coordinates/tests/accuracy/test_galactic_fk4.py new file mode 100644 index 0000000..ceb5eae --- /dev/null +++ b/astropy/coordinates/tests/accuracy/test_galactic_fk4.py @@ -0,0 +1,61 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import os + +import numpy as np + +from .... import units as u +from ...builtin_frames import Galactic, FK4 +from ....time import Time +from ....table import Table +from ...angle_utilities import angular_separation +from ....utils.data import get_pkg_data_contents + +#the number of tests to run +from . import N_ACCURACY_TESTS + +TOLERANCE = 0.3 # arcseconds + +def test_galactic_fk4(): + lines = get_pkg_data_contents('galactic_fk4.csv').split('\n') + t = Table.read(lines, format='ascii', delimiter=',', guess=False) + + if N_ACCURACY_TESTS >= len(t): + idxs = range(len(t)) + else: + idxs = np.random.randint(len(t), size=N_ACCURACY_TESTS) + + diffarcsec1 = [] + diffarcsec2 = [] + for i in idxs: + # Extract row + r = t[int(i)] # int here is to get around a py 3.x astropy.table bug + + # Galactic to FK4 + c1 = Galactic(l=r['lon_in']*u.deg, b=r['lat_in']*u.deg) + c2 = c1.transform_to(FK4(equinox=Time(r['equinox_fk4'], scale='utc'))) + + # Find difference + diff = angular_separation(c2.ra.radian, c2.dec.radian, + np.radians(r['ra_fk4']), + np.radians(r['dec_fk4'])) + + diffarcsec1.append(np.degrees(diff) * 3600.) + + # FK4 to Galactic + c1 = FK4(ra=r['lon_in']*u.deg, dec=r['lat_in']*u.deg, + obstime=Time(r['obstime'], scale='utc'), + equinox=Time(r['equinox_fk4'], scale='utc')) + c2 = c1.transform_to(Galactic) + + # Find difference + diff = angular_separation(c2.l.radian, c2.b.radian, + np.radians(r['lon_gal']), + np.radians(r['lat_gal'])) + + diffarcsec2.append(np.degrees(diff) * 3600.) + + np.testing.assert_array_less(diffarcsec1, TOLERANCE) + np.testing.assert_array_less(diffarcsec2, TOLERANCE) diff --git a/astropy/coordinates/tests/accuracy/test_icrs_fk5.py b/astropy/coordinates/tests/accuracy/test_icrs_fk5.py new file mode 100644 index 0000000..5ef86d2 --- /dev/null +++ b/astropy/coordinates/tests/accuracy/test_icrs_fk5.py @@ -0,0 +1,61 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import os + +import numpy as np + +from .... import units as u +from ...builtin_frames import ICRS, FK5 +from ....time import Time +from ....table import Table +from ...angle_utilities import angular_separation +from ....utils.data import get_pkg_data_contents + +#the number of tests to run +from . import N_ACCURACY_TESTS + +TOLERANCE = 0.03 # arcseconds + + +def test_icrs_fk5(): + lines = get_pkg_data_contents('icrs_fk5.csv').split('\n') + t = Table.read(lines, format='ascii', delimiter=',', guess=False) + + if N_ACCURACY_TESTS >= len(t): + idxs = range(len(t)) + else: + idxs = np.random.randint(len(t), size=N_ACCURACY_TESTS) + + diffarcsec1 = [] + diffarcsec2 = [] + for i in idxs: + # Extract row + r = t[int(i)] # int here is to get around a py 3.x astropy.table bug + + # ICRS to FK5 + c1 = ICRS(ra=r['ra_in']*u.deg, dec=r['dec_in']*u.deg) + c2 = c1.transform_to(FK5(equinox=Time(r['equinox_fk5'], scale='utc'))) + + # Find difference + diff = angular_separation(c2.ra.radian, c2.dec.radian, + np.radians(r['ra_fk5']), + np.radians(r['dec_fk5'])) + + diffarcsec1.append(np.degrees(diff) * 3600.) + + # FK5 to ICRS + c1 = FK5(ra=r['ra_in']*u.deg, dec=r['dec_in']*u.deg, + equinox=Time(r['equinox_fk5'], scale='utc')) + c2 = c1.transform_to(ICRS) + + # Find difference + diff = angular_separation(c2.ra.radian, c2.dec.radian, + np.radians(r['ra_icrs']), + np.radians(r['dec_icrs'])) + + diffarcsec2.append(np.degrees(diff) * 3600.) + + np.testing.assert_array_less(diffarcsec1, TOLERANCE) + np.testing.assert_array_less(diffarcsec2, TOLERANCE) diff --git a/astropy/coordinates/tests/test_angles.py b/astropy/coordinates/tests/test_angles.py new file mode 100644 index 0000000..e03bd72 --- /dev/null +++ b/astropy/coordinates/tests/test_angles.py @@ -0,0 +1,817 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +# TEST_UNICODE_LITERALS + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +"""Test initalization and other aspects of Angle and subclasses""" + +import numpy as np +from numpy.testing.utils import assert_allclose, assert_array_equal + +from ..angles import Longitude, Latitude, Angle +from ...tests.helper import pytest +from ... import units as u +from ..errors import IllegalSecondError, IllegalMinuteError, IllegalHourError + + +def test_create_angles(): + """ + Tests creating and accessing Angle objects + """ + + ''' The "angle" is a fundamental object. The internal + representation is stored in radians, but this is transparent to the user. + Units *must* be specified rather than a default value be assumed. This is + as much for self-documenting code as anything else. + + Angle objects simply represent a single angular coordinate. More specific + angular coordinates (e.g. Longitude, Latitude) are subclasses of Angle.''' + + a1 = Angle(54.12412, unit=u.degree) + a2 = Angle("54.12412", unit=u.degree) + a3 = Angle("54:07:26.832", unit=u.degree) + a4 = Angle("54.12412 deg") + a5 = Angle("54.12412 degrees") + a6 = Angle("54.12412°") # because we like Unicode + a7 = Angle((54, 7, 26.832), unit=u.degree) + a8 = Angle("54°07'26.832\"") + # (deg,min,sec) *tuples* are acceptable, but lists/arrays are *not* + # because of the need to eventually support arrays of coordinates + a9 = Angle([54, 7, 26.832], unit=u.degree) + assert_allclose(a9.value, [54, 7, 26.832]) + assert a9.unit is u.degree + + a10 = Angle(3.60827466667, unit=u.hour) + a11 = Angle("3:36:29.7888000120", unit=u.hour) + a12 = Angle((3, 36, 29.7888000120), unit=u.hour) # *must* be a tuple + + Angle(0.944644098745, unit=u.radian) + + with pytest.raises(u.UnitsError): + Angle(54.12412) + #raises an exception because this is ambiguous + + with pytest.raises(ValueError): + a13 = Angle(12.34, unit="not a unit") + + a14 = Angle("12h43m32") # no trailing 's', but unambiguous + + a15 = Angle("5h4m3s") # single digits, no decimal + + a16 = Angle("1 d") + a17 = Angle("1 degree") + assert a16.degree == 1 + assert a17.degree == 1 + + #ensure the above angles that should match do + assert a1 == a2 == a3 == a4 == a5 == a6 == a7 + assert_allclose(a1.radian, a2.radian) + assert_allclose(a2.degree, a3.degree) + assert_allclose(a3.radian, a4.radian) + assert_allclose(a4.radian, a5.radian) + assert_allclose(a5.radian, a6.radian) + assert_allclose(a6.radian, a7.radian) + #assert a10 == a11 == a12 + + # check for illegal ranges / values + with pytest.raises(IllegalSecondError): + a = Angle("12 32 99", unit=u.degree) + + with pytest.raises(IllegalMinuteError): + a = Angle("12 99 23", unit=u.degree) + + with pytest.raises(IllegalSecondError): + a = Angle("12 32 99", unit=u.hour) + + with pytest.raises(IllegalMinuteError): + a = Angle("12 99 23", unit=u.hour) + + with pytest.raises(IllegalHourError): + a = Angle("99 25 51.0", unit=u.hour) + + with pytest.raises(ValueError): + a = Angle("12 25 51.0xxx", unit=u.hour) + + with pytest.raises(ValueError): + a = Angle("12h34321m32.2s") + + assert a1 is not None + +def test_angle_ops(): + """ + Tests operations on Angle objects + """ + + # Angles can be added and subtracted. Multiplication and division by a + # scalar is also permitted. A negative operator is also valid. All of + # these operate in a single dimension. Attempting to multiply or divide two + # Angle objects will return a quantity. An exception will be raised if it + # is attempted to store output with a non-angular unit in an Angle [#2718]. + + a1 = Angle(3.60827466667, unit=u.hour) + a2 = Angle("54:07:26.832", unit=u.degree) + a1 + a2 # creates new Angle object + a1 - a2 + -a1 + + assert_allclose((a1 * 2).hour, 2 * 3.6082746666700003) + assert abs((a1 / 3.123456).hour - 3.60827466667 / 3.123456) < 1e-10 + + # commutativity + assert (2 * a1).hour == (a1 * 2).hour + + a3 = Angle(a1) # makes a *copy* of the object, but identical content as a1 + assert_allclose(a1.radian, a3.radian) + assert a1 is not a3 + + a4 = abs(-a1) + assert a4.radian == a1.radian + + a5 = Angle(5.0, unit=u.hour) + assert a5 > a1 + assert a5 >= a1 + assert a1 < a5 + assert a1 <= a5 + + a6 = Angle(45., u.degree) + a7 = a6 * a5 + assert type(a7) is u.Quantity + + with pytest.raises(TypeError): + a6 *= a5 + + with pytest.raises(TypeError): + np.sin(a6, out=a6) + + +def test_angle_convert(): + """ + Test unit conversion of Angle objects + """ + angle = Angle("54.12412", unit=u.degree) + + assert_allclose(angle.hour, 3.60827466667) + assert_allclose(angle.radian, 0.944644098745) + assert_allclose(angle.degree, 54.12412) + + assert len(angle.hms) == 3 + assert isinstance(angle.hms, tuple) + assert angle.hms[0] == 3 + assert angle.hms[1] == 36 + assert_allclose(angle.hms[2], 29.78879999999947) + #also check that the namedtuple attribute-style access works: + assert angle.hms.h == 3 + assert angle.hms.m == 36 + assert_allclose(angle.hms.s, 29.78879999999947) + + assert len(angle.dms) == 3 + assert isinstance(angle.dms, tuple) + assert angle.dms[0] == 54 + assert angle.dms[1] == 7 + assert_allclose(angle.dms[2], 26.831999999992036) + #also check that the namedtuple attribute-style access works: + assert angle.dms.d == 54 + assert angle.dms.m == 7 + assert_allclose(angle.dms.s, 26.831999999992036) + + assert isinstance(angle.dms[0], float) + assert isinstance(angle.hms[0], float) + + #now make sure dms and signed_dms work right for negative angles + negangle = Angle("-54.12412", unit=u.degree) + + assert negangle.dms.d == -54 + assert negangle.dms.m == -7 + assert_allclose(negangle.dms.s, -26.831999999992036) + assert negangle.signed_dms.sign == -1 + assert negangle.signed_dms.d == 54 + assert negangle.signed_dms.m == 7 + assert_allclose(negangle.signed_dms.s, 26.831999999992036) + + +def test_angle_formatting(): + """ + Tests string formatting for Angle objects + """ + + ''' + The string method of Angle has this signature: + def string(self, unit=DEGREE, decimal=False, sep=" ", precision=5, + pad=False): + + The "decimal" parameter defaults to False since if you need to print the + Angle as a decimal, there's no need to use the "format" method (see + above). + ''' + + angle = Angle("54.12412", unit=u.degree) + + #__str__ is the default `format` + assert str(angle) == angle.to_string() + + res = 'Angle as HMS: 3h36m29.7888s' + assert "Angle as HMS: {0}".format(angle.to_string(unit=u.hour)) == res + + res = 'Angle as HMS: 3:36:29.7888' + assert "Angle as HMS: {0}".format(angle.to_string(unit=u.hour, sep=":")) == res + + res = 'Angle as HMS: 3:36:29.79' + assert "Angle as HMS: {0}".format(angle.to_string(unit=u.hour, sep=":", + precision=2)) == res + + # Note that you can provide one, two, or three separators passed as a + # tuple or list + + res = 'Angle as HMS: 3h36m29.7888s' + assert "Angle as HMS: {0}".format(angle.to_string(unit=u.hour, + sep=("h", "m", "s"), + precision=4)) == res + + res = 'Angle as HMS: 3-36|29.7888' + assert "Angle as HMS: {0}".format(angle.to_string(unit=u.hour, sep=["-", "|"], + precision=4)) == res + + res = 'Angle as HMS: 3-36-29.7888' + assert "Angle as HMS: {0}".format(angle.to_string(unit=u.hour, sep="-", + precision=4)) == res + + res = 'Angle as HMS: 03h36m29.7888s' + assert "Angle as HMS: {0}".format(angle.to_string(unit=u.hour, precision=4, + pad=True)) == res + + # Same as above, in degrees + + angle = Angle("3 36 29.78880", unit=u.degree) + + res = 'Angle as DMS: 3d36m29.7888s' + assert "Angle as DMS: {0}".format(angle.to_string(unit=u.degree)) == res + + res = 'Angle as DMS: 3:36:29.7888' + assert "Angle as DMS: {0}".format(angle.to_string(unit=u.degree, sep=":")) == res + + res = 'Angle as DMS: 3:36:29.79' + assert "Angle as DMS: {0}".format(angle.to_string(unit=u.degree, sep=":", + precision=2)) == res + + # Note that you can provide one, two, or three separators passed as a + # tuple or list + + res = 'Angle as DMS: 3d36m29.7888s' + assert "Angle as DMS: {0}".format(angle.to_string(unit=u.degree, + sep=("d", "m", "s"), + precision=4)) == res + + res = 'Angle as DMS: 3-36|29.7888' + assert "Angle as DMS: {0}".format(angle.to_string(unit=u.degree, sep=["-", "|"], + precision=4)) == res + + res = 'Angle as DMS: 3-36-29.7888' + assert "Angle as DMS: {0}".format(angle.to_string(unit=u.degree, sep="-", + precision=4)) == res + + res = 'Angle as DMS: 03d36m29.7888s' + assert "Angle as DMS: {0}".format(angle.to_string(unit=u.degree, precision=4, + pad=True)) == res + + res = 'Angle as rad: 0.0629763rad' + assert "Angle as rad: {0}".format(angle.to_string(unit=u.radian)) == res + + res = 'Angle as rad decimal: 0.0629763' + assert "Angle as rad decimal: {0}".format(angle.to_string(unit=u.radian, decimal=True)) == res + + + # check negative angles + + angle = Angle(-1.23456789, unit=u.degree) + angle2 = Angle(-1.23456789, unit=u.hour) + + assert angle.to_string() == '-1d14m04.4444s' + assert angle.to_string(pad=True) == '-01d14m04.4444s' + assert angle.to_string(unit=u.hour) == '-0h04m56.2963s' + assert angle2.to_string(unit=u.hour, pad=True) == '-01h14m04.4444s' + assert angle.to_string(unit=u.radian, decimal=True) == '-0.0215473' + +def test_angle_format_roundtripping(): + """ + Ensures that the string represtation of an angle can be used to create a + new valid Angle. + """ + + a1 = Angle(0, unit=u.radian) + a2 = Angle(10, unit=u.degree) + a3 = Angle(0.543, unit=u.degree) + a4 = Angle('1d2m3.4s') + + assert Angle(str(a1)).degree == a1.degree + assert Angle(str(a2)).degree == a2.degree + assert Angle(str(a3)).degree == a3.degree + assert Angle(str(a4)).degree == a4.degree + + #also check Longitude/Latitude + ra = Longitude('1h2m3.4s') + dec = Latitude('1d2m3.4s') + + assert_allclose(Angle(str(ra)).degree, ra.degree) + assert_allclose(Angle(str(dec)).degree, dec.degree) + + +def test_radec(): + """ + Tests creation/operations of Longitude and Latitude objects + """ + + ''' + Longitude and Latitude are objects that are subclassed from Angle. As with Angle, Longitude + and Latitude can parse any unambiguous format (tuples, formatted strings, etc.). + + The intention is not to create an Angle subclass for every possible + coordinate object (e.g. galactic l, galactic b). However, equatorial Longitude/Latitude + are so prevalent in astronomy that it's worth creating ones for these + units. They will be noted as "special" in the docs and use of the just the + Angle class is to be used for other coordinate systems. + ''' + + with pytest.raises(u.UnitsError): + ra = Longitude("4:08:15.162342") # error - hours or degrees? + with pytest.raises(u.UnitsError): + ra = Longitude("-4:08:15.162342") + + # the "smart" initializer allows >24 to automatically do degrees, but the + #Angle-based one does not + #TODO: adjust in 0.3 for whatever behavior is decided on + + #ra = Longitude("26:34:15.345634") # unambiguous b/c hours don't go past 24 + #assert_allclose(ra.degree, 26.570929342) + with pytest.raises(u.UnitsError): + ra = Longitude("26:34:15.345634") + + #ra = Longitude(68) + with pytest.raises(u.UnitsError): + ra = Longitude(68) + + with pytest.raises(u.UnitsError): + ra = Longitude(12) + + with pytest.raises(ValueError): + ra = Longitude("garbage containing a d and no units") + + ra = Longitude("12h43m23s") + assert_allclose(ra.hour, 12.7230555556) + + ra = Longitude((56, 14, 52.52), unit=u.degree) # can accept tuples + #TODO: again, fix based on >24 behavior + #ra = Longitude((56,14,52.52)) + with pytest.raises(u.UnitsError): + ra = Longitude((56, 14, 52.52)) + with pytest.raises(u.UnitsError): + ra = Longitude((12, 14, 52)) # ambiguous w/o units + ra = Longitude((12, 14, 52), unit=u.hour) + + ra = Longitude([56, 64, 52.2], unit=u.degree) # ...but not arrays (yet) + + # Units can be specified + ra = Longitude("4:08:15.162342", unit=u.hour) + + #TODO: this was the "smart" initializer behavior - adjust in 0.3 appropriately + ## Where Longitude values are commonly found in hours or degrees, declination is + ## nearly always specified in degrees, so this is the default. + #dec = Latitude("-41:08:15.162342") + with pytest.raises(u.UnitsError): + dec = Latitude("-41:08:15.162342") + dec = Latitude("-41:08:15.162342", unit=u.degree) # same as above + + +def test_negative_zero_dms(): + # Test for DMS parser + a = Angle('-00:00:10', u.deg) + assert_allclose(a.degree, -10. / 3600.) + + # Unicode minus + a = Angle('−00:00:10', u.deg) + assert_allclose(a.degree, -10. / 3600.) + + +def test_negative_zero_dm(): + # Test for DM parser + a = Angle('-00:10', u.deg) + assert_allclose(a.degree, -10. / 60.) + + +def test_negative_zero_hms(): + # Test for HMS parser + a = Angle('-00:00:10', u.hour) + assert_allclose(a.hour, -10. / 3600.) + + +def test_negative_zero_hm(): + # Test for HM parser + a = Angle('-00:10', u.hour) + assert_allclose(a.hour, -10. / 60.) + + +def test_negative_sixty_hm(): + # Test for HM parser + a = Angle('-00:60', u.hour) + assert_allclose(a.hour, -1.) + + +def test_plus_sixty_hm(): + # Test for HM parser + a = Angle('00:60', u.hour) + assert_allclose(a.hour, 1.) + + +def test_negative_fifty_nine_sixty_dms(): + # Test for DMS parser + a = Angle('-00:59:60', u.deg) + assert_allclose(a.degree, -1.) + + +def test_plus_fifty_nine_sixty_dms(): + # Test for DMS parser + a = Angle('+00:59:60', u.deg) + assert_allclose(a.degree, 1.) + + +def test_negative_sixty_dms(): + # Test for DMS parser + a = Angle('-00:00:60', u.deg) + assert_allclose(a.degree, -1. / 60.) + + +def test_plus_sixty_dms(): + # Test for DMS parser + a = Angle('+00:00:60', u.deg) + assert_allclose(a.degree, 1. / 60.) + + +def test_angle_to_is_angle(): + a = Angle('00:00:60', u.deg) + assert isinstance(a, Angle) + assert isinstance(a.to(u.rad), Angle) + + +def test_angle_to_quantity(): + a = Angle('00:00:60', u.deg) + q = u.Quantity(a) + assert isinstance(q, u.Quantity) + assert q.unit is u.deg + + +def test_quantity_to_angle(): + a = Angle(1.0*u.deg) + assert isinstance(a, Angle) + with pytest.raises(u.UnitsError): + Angle(1.0*u.meter) + a = Angle(1.0*u.hour) + assert isinstance(a, Angle) + assert a.unit is u.hourangle + with pytest.raises(u.UnitsError): + Angle(1.0*u.min) + + +def test_angle_string(): + a = Angle('00:00:60', u.deg) + assert str(a) == '0d01m00s' + a = Angle('-00:00:10', u.hour) + assert str(a) == '-0h00m10s' + a = Angle(3.2, u.radian) + assert str(a) == '3.2rad' + a = Angle(4.2, u.microarcsecond) + assert str(a) == '4.2uarcsec' + a = Angle('1.0uarcsec') + assert a.value == 1.0 + assert a.unit == u.microarcsecond + a = Angle("3d") + assert_allclose(a.value, 3.0) + assert a.unit == u.degree + a = Angle('10"') + assert_allclose(a.value, 10.0) + assert a.unit == u.arcsecond + a = Angle("10'") + assert_allclose(a.value, 10.0) + assert a.unit == u.arcminute + + +def test_angle_repr(): + assert 'Angle' in repr(Angle(0, u.deg)) + assert 'Longitude' in repr(Longitude(0, u.deg)) + assert 'Latitude' in repr(Latitude(0, u.deg)) + + a = Angle(0, u.deg) + repr(a) + + +def test_large_angle_representation(): + """Test that angles above 360 degrees can be output as strings, + in repr, str, and to_string. (regression test for #1413)""" + a = Angle(350, u.deg) + Angle(350, u.deg) + a.to_string() + a.to_string(u.hourangle) + repr(a) + repr(a.to(u.hourangle)) + str(a) + str(a.to(u.hourangle)) + + +def test_wrap_at_inplace(): + a = Angle([-20, 150, 350, 360] * u.deg) + out = a.wrap_at('180d', inplace=True) + assert out is None + assert np.all(a.degree == np.array([-20., 150., -10., 0.])) + + +def test_latitude(): + with pytest.raises(ValueError): + lat = Latitude(['91d', '89d']) + with pytest.raises(ValueError): + lat = Latitude('-91d') + + lat = Latitude(['90d', '89d']) + # check that one can get items + assert lat[0] == 90 * u.deg + assert lat[1] == 89 * u.deg + # and that comparison with angles works + assert np.all(lat == Angle(['90d', '89d'])) + # check setitem works + lat[1] = 45. * u.deg + assert np.all(lat == Angle(['90d', '45d'])) + # but not with values out of range + with pytest.raises(ValueError): + lat[0] = 90.001 * u.deg + with pytest.raises(ValueError): + lat[0] = -90.001 * u.deg + # these should also not destroy input (#1851) + assert np.all(lat == Angle(['90d', '45d'])) + + # conserve type on unit change (closes #1423) + angle = lat.to('radian') + assert type(angle) is Latitude + # but not on calculations + angle = lat - 190 * u.deg + assert type(angle) is Angle + assert angle[0] == -100 * u.deg + + lat = Latitude('80d') + angle = lat / 2. + assert type(angle) is Angle + assert angle == 40 * u.deg + + angle = lat * 2. + assert type(angle) is Angle + assert angle == 160 * u.deg + + angle = -lat + assert type(angle) is Angle + assert angle == -80 * u.deg + + # Test errors when trying to interoperate with longitudes. + with pytest.raises(TypeError) as excinfo: + lon = Longitude(10, 'deg') + lat = Latitude(lon) + assert "A Latitude angle cannot be created from a Longitude angle" in str(excinfo) + + with pytest.raises(TypeError) as excinfo: + lon = Longitude(10, 'deg') + lat = Latitude([20], 'deg') + lat[0] = lon + assert "A Longitude angle cannot be assigned to a Latitude angle" in str(excinfo) + + # Check we can work around the Lat vs Long checks by casting explicitly to Angle. + lon = Longitude(10,'deg') + lat = Latitude(Angle(lon)) + assert lat.value == 10.0 + # Check setitem. + lon = Longitude(10,'deg') + lat = Latitude([20], 'deg') + lat[0] = Angle(lon) + assert lat.value[0] == 10.0 + +def test_longitude(): + # Default wrapping at 360d with an array input + lon = Longitude(['370d', '88d']) + assert np.all(lon == Longitude(['10d', '88d'])) + assert np.all(lon == Angle(['10d', '88d'])) + + # conserve type on unit change and keep wrap_angle (closes #1423) + angle = lon.to('hourangle') + assert type(angle) is Longitude + assert angle.wrap_angle == lon.wrap_angle + angle = lon[0] + assert type(angle) is Longitude + assert angle.wrap_angle == lon.wrap_angle + angle = lon[1:] + assert type(angle) is Longitude + assert angle.wrap_angle == lon.wrap_angle + + # but not on calculations + angle = lon / 2. + assert np.all(angle == Angle(['5d', '44d'])) + assert type(angle) is Angle + assert not hasattr(angle, 'wrap_angle') + + angle = lon * 2. + 400 * u.deg + assert np.all(angle == Angle(['420d', '576d'])) + assert type(angle) is Angle + + # Test setting a mutable value and having it wrap + lon[1] = -10 * u.deg + assert np.all(lon == Angle(['10d', '350d'])) + + # Test wrapping and try hitting some edge cases + lon = Longitude(np.array([0, 0.5, 1.0, 1.5, 2.0]) * np.pi, unit=u.radian) + assert np.all(lon.degree == np.array([0., 90, 180, 270, 0])) + + lon = Longitude(np.array([0, 0.5, 1.0, 1.5, 2.0]) * np.pi, unit=u.radian, wrap_angle='180d') + assert np.all(lon.degree == np.array([0., 90, -180, -90, 0])) + + # Wrap on setting wrap_angle property (also test auto-conversion of wrap_angle to an Angle) + lon = Longitude(np.array([0, 0.5, 1.0, 1.5, 2.0]) * np.pi, unit=u.radian) + lon.wrap_angle = '180d' + assert np.all(lon.degree == np.array([0., 90, -180, -90, 0])) + + lon = Longitude('460d') + assert lon == Angle('100d') + lon.wrap_angle = '90d' + assert lon == Angle('-260d') + + #check that if we initialize a longitude with another longitude, + #wrap_angle is kept by default + lon2 = Longitude(lon) + assert lon2.wrap_angle == lon.wrap_angle + #but not if we explicitly set it + lon3 = Longitude(lon, wrap_angle='180d') + assert lon3.wrap_angle == 180 * u.deg + + #check for problem reported in #2037 about Longitude initializing to -0 + lon = Longitude(0, u.deg) + lonstr = lon.to_string() + assert not lonstr.startswith('-') + + #also make sure dtype is correctly conserved + assert Longitude(0, u.deg, dtype=float).dtype == np.dtype(float) + assert Longitude(0, u.deg, dtype=int).dtype == np.dtype(int) + + # Test errors when trying to interoperate with latitudes. + with pytest.raises(TypeError) as excinfo: + lat = Latitude(10, 'deg') + lon = Longitude(lat) + assert "A Longitude angle cannot be created from a Latitude angle" in str(excinfo) + + with pytest.raises(TypeError) as excinfo: + lat = Latitude(10, 'deg') + lon = Longitude([20], 'deg') + lon[0] = lat + assert "A Latitude angle cannot be assigned to a Longitude angle" in str(excinfo) + + # Check we can work around the Lat vs Long checks by casting explicitly to Angle. + lat = Latitude(10,'deg') + lon = Longitude(Angle(lat)) + assert lon.value == 10.0 + # Check setitem. + lat = Latitude(10,'deg') + lon = Longitude([20], 'deg') + lon[0] = Angle(lat) + assert lon.value[0] == 10.0 + + +def test_wrap_at(): + a = Angle([-20, 150, 350, 360] * u.deg) + assert np.all(a.wrap_at(360 * u.deg).degree == np.array([340., 150., 350., 0.])) + assert np.all(a.wrap_at(Angle(360, unit=u.deg)).degree == np.array([340., 150., 350., 0.])) + assert np.all(a.wrap_at('360d').degree == np.array([340., 150., 350., 0.])) + assert np.all(a.wrap_at('180d').degree == np.array([-20., 150., -10., 0.])) + assert np.all(a.wrap_at(np.pi * u.rad).degree == np.array([-20., 150., -10., 0.])) + + # Test wrapping a scalar Angle + a = Angle('190d') + assert a.wrap_at('180d') == Angle('-170d') + + a = Angle(np.arange(-1000.0, 1000.0, 0.125), unit=u.deg) + for wrap_angle in (270, 0.2, 0.0, 360.0, 500, -2000.125): + aw = a.wrap_at(wrap_angle * u.deg) + assert np.all(aw.degree >= wrap_angle - 360.0) + assert np.all(aw.degree < wrap_angle) + + aw = a.to(u.rad).wrap_at(wrap_angle * u.deg) + assert np.all(aw.degree >= wrap_angle - 360.0) + assert np.all(aw.degree < wrap_angle) + + +def test_is_within_bounds(): + a = Angle([-20, 150, 350] * u.deg) + assert a.is_within_bounds('0d', '360d') is False + assert a.is_within_bounds(None, '360d') is True + assert a.is_within_bounds(-30 * u.deg, None) is True + + a = Angle('-20d') + assert a.is_within_bounds('0d', '360d') is False + assert a.is_within_bounds(None, '360d') is True + assert a.is_within_bounds(-30 * u.deg, None) is True + + +def test_angle_mismatched_unit(): + a = Angle('+6h7m8s', unit=u.degree) + assert_allclose(a.value, 91.78333333333332) + + +def test_regression_formatting_negative(): + # Regression test for a bug that caused: + # + # >>> Angle(-1., unit='deg').to_string() + # '-1d00m-0s' + assert Angle(-0., unit='deg').to_string() == '-0d00m00s' + assert Angle(-1., unit='deg').to_string() == '-1d00m00s' + assert Angle(-0., unit='hour').to_string() == '-0h00m00s' + assert Angle(-1., unit='hour').to_string() == '-1h00m00s' + +def test_empty_sep(): + a = Angle('05h04m31.93830s') + + assert a.to_string(sep='', precision=2, pad=True) == '050431.94' + +def test_create_tuple(): + """ + Tests creation of an angle with a (d,m,s) or (h,m,s) tuple + """ + a1 = Angle((1, 30, 0), unit=u.degree) + assert a1.value == 1.5 + + a1 = Angle((1, 30, 0), unit=u.hourangle) + assert a1.value == 1.5 + +def test_list_of_quantities(): + a1 = Angle([1*u.deg, 1*u.hourangle]) + assert a1.unit == u.deg + assert_allclose(a1.value, [1, 15]) + + a2 = Angle([1*u.hourangle, 1*u.deg], u.deg) + assert a2.unit == u.deg + assert_allclose(a2.value, [15, 1]) + +def test_multiply_divide(): + # Issue #2273 + a1 = Angle([1, 2, 3], u.deg) + a2 = Angle([4, 5, 6], u.deg) + a3 = a1 * a2 + assert_allclose(a3.value, [4, 10, 18]) + assert a3.unit == (u.deg * u.deg) + + a3 = a1 / a2 + assert_allclose(a3.value, [.25, .4, .5]) + assert a3.unit == u.dimensionless_unscaled + +def test_mixed_string_and_quantity(): + a1 = Angle(['1d', 1. * u.deg]) + assert_array_equal(a1.value, [1., 1.]) + assert a1.unit == u.deg + + a2 = Angle(['1d', 1 * u.rad * np.pi, '3d']) + assert_array_equal(a2.value, [1., 180., 3.]) + assert a2.unit == u.deg + +def test_rotation_matrix(): + from ..angles import rotation_matrix + + assert_array_equal(rotation_matrix(0*u.deg, 'x'), np.eye(3)) + + assert_allclose(rotation_matrix(90*u.deg, 'y'), [[ 0, 0,-1], + [ 0, 1, 0], + [ 1, 0, 0]], atol=1e-12) + + assert_allclose(rotation_matrix(-90*u.deg, 'z'), [[ 0,-1, 0], + [ 1, 0, 0], + [ 0, 0, 1]], atol=1e-12) + + assert_allclose(rotation_matrix(45*u.deg, 'x'), + rotation_matrix(45*u.deg, [1, 0, 0])) + assert_allclose(rotation_matrix(125*u.deg, 'y'), + rotation_matrix(125*u.deg, [0, 1, 0])) + assert_allclose(rotation_matrix(-30*u.deg, 'z'), + rotation_matrix(-30*u.deg, [0, 0, 1])) + + assert_allclose(np.dot(rotation_matrix(180*u.deg, [1, 1, 0]).A, [1, 0, 0]), + [0, 1, 0], atol=1e-12) + + #make sure it also works for very small angles + assert_allclose(rotation_matrix(0.000001*u.deg, 'x'), + rotation_matrix(0.000001*u.deg, [1, 0, 0])) + +def test_angle_axis(): + from ..angles import rotation_matrix, angle_axis + + m1 = rotation_matrix(35*u.deg, 'x') + an1, ax1 = angle_axis(m1) + + assert an1 - 35*u.deg < 1e-10*u.deg + assert_allclose(ax1, [1, 0, 0]) + + + m2 = rotation_matrix(-89*u.deg, [1, 1, 0]) + an2, ax2 = angle_axis(m2) + + assert an2 - 89*u.deg < 1e-10*u.deg + assert_allclose(ax2, [-2**-0.5, -2**-0.5, 0]) diff --git a/astropy/coordinates/tests/test_angular_separation.py b/astropy/coordinates/tests/test_angular_separation.py new file mode 100644 index 0000000..d7bbdff --- /dev/null +++ b/astropy/coordinates/tests/test_angular_separation.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +""" +Tests for the projected separation stuff +""" + +import numpy as np +from numpy import testing as npt + +from ...tests.helper import pytest +from ... import units as u +from ..builtin_frames import ICRS, FK5, Galactic +from .. import Angle, Distance + +# lon1, lat1, lon2, lat2 in degrees +coords = [(1, 0, 0, 0), + (0, 1, 0, 0), + (0, 0, 1, 0), + (0, 0, 0, 1), + (0, 0, 10, 0), + (0, 0, 90, 0), + (0, 0, 180, 0), + (0, 45, 0, -45), + (0, 60, 0, -30), + (-135, -15, 45, 15), + (100, -89, -80, 89), + (0, 0, 0, 0), + (0, 0, 1. / 60., 1. / 60.)] +correct_seps = [1, 1, 1, 1, 10, 90, 180, 90, 90, 180, 180, 0, + 0.023570225877234643] +correctness_margin = 2e-10 + + +def test_angsep(): + """ + Tests that the angular separation object also behaves correctly. + """ + from ..angle_utilities import angular_separation + + # check it both works with floats in radians, Quantities, or Angles + for conv in (np.deg2rad, + lambda x: u.Quantity(x, "deg"), + lambda x: Angle(x, "deg")): + for (lon1, lat1, lon2, lat2), corrsep in zip(coords, correct_seps): + angsep = angular_separation(conv(lon1), conv(lat1), + conv(lon2), conv(lat2)) + assert np.fabs(angsep - conv(corrsep)) < conv(correctness_margin) + + +def test_fk5_seps(): + """ + This tests if `separation` works for FK5 objects. + + This is a regression test for github issue #891 + """ + a = FK5(1.*u.deg, 1.*u.deg) + b = FK5(2.*u.deg, 2.*u.deg) + a.separation(b) + + +def test_proj_separations(): + """ + Test angular separation functionality + """ + c1 = ICRS(ra=0*u.deg, dec=0*u.deg) + c2 = ICRS(ra=0*u.deg, dec=1*u.deg) + + sep = c2.separation(c1) + #returns an Angle object + assert isinstance(sep, Angle) + + assert sep.degree == 1 + npt.assert_allclose(sep.arcminute, 60.) + + # these operations have ambiguous interpretations for points on a sphere + with pytest.raises(TypeError): + c1 + c2 + with pytest.raises(TypeError): + c1 - c2 + + ngp = Galactic(l=0*u.degree, b=90*u.degree) + ncp = ICRS(ra=0*u.degree, dec=90*u.degree) + + # if there is a defined conversion between the relevant coordinate systems, + # it will be automatically performed to get the right angular separation + npt.assert_allclose(ncp.separation(ngp.transform_to(ICRS)).degree, + ncp.separation(ngp).degree) + + # distance from the north galactic pole to celestial pole + npt.assert_allclose(ncp.separation(ngp.transform_to(ICRS)).degree, + 62.8716627659) + + +def test_3d_separations(): + """ + Test 3D separation functionality + """ + c1 = ICRS(ra=1*u.deg, dec=1*u.deg, distance=9*u.kpc) + c2 = ICRS(ra=1*u.deg, dec=1*u.deg, distance=10*u.kpc) + + sep3d = c2.separation_3d(c1) + + assert isinstance(sep3d, Distance) + npt.assert_allclose(sep3d - 1*u.kpc, 0, atol=1e-12) diff --git a/astropy/coordinates/tests/test_api_ape5.py b/astropy/coordinates/tests/test_api_ape5.py new file mode 100644 index 0000000..2181470 --- /dev/null +++ b/astropy/coordinates/tests/test_api_ape5.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +""" +This is the APE5 coordinates API document re-written to work as a series of test +functions. + +Note that new tests for coordinates functionality should generally *not* be +added to this file - instead, add them to other appropriate test modules in +this package, like ``test_sky_coord.py``, ``test_frames.py``, or +``test_representation.py``. This file is instead meant mainly to keep track of +deviations from the original APE5 plan. +""" + +import numpy as np +from numpy.random import randn +from numpy import testing as npt +from ...tests.helper import pytest +raises = pytest.raises + +from ...extern import six + +from ... import units as u +from ... import time +from ... import coordinates as coords +from ..errors import * + +try: + import scipy +except ImportError: + HAS_SCIPY = False +else: + HAS_SCIPY = True + + +def test_representations_api(): + from ..representation import SphericalRepresentation, \ + UnitSphericalRepresentation, PhysicsSphericalRepresentation, \ + CartesianRepresentation + from ... coordinates import Angle, Longitude, Latitude, Distance + + #<-----------------Classes for representation of coordinate data---------------> + # These classes inherit from a common base class and internally contain Quantity + # objects, which are arrays (although they may act as scalars, like numpy's + # length-0 "arrays") + + # They can be initialized with a variety of ways that make intuitive sense. + # Distance is optional. + UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg) + UnitSphericalRepresentation(lon=8*u.hourangle, lat=5*u.deg) + SphericalRepresentation(lon=8*u.hourangle, lat=5*u.deg, distance=10*u.kpc) + + # In the initial implementation, the lat/lon/distance arguments to the + # initializer must be in order. A *possible* future change will be to allow + # smarter guessing of the order. E.g. `Latitude` and `Longitude` objects can be + # given in any order. + UnitSphericalRepresentation(Longitude(8, u.hour), Latitude(5, u.deg)) + SphericalRepresentation(Longitude(8, u.hour), Latitude(5, u.deg), Distance(10, u.kpc)) + + # Arrays of any of the inputs are fine + UnitSphericalRepresentation(lon=[8, 9]*u.hourangle, lat=[5, 6]*u.deg) + + # Default is to copy arrays, but optionally, it can be a reference + UnitSphericalRepresentation(lon=[8, 9]*u.hourangle, lat=[5, 6]*u.deg, copy=False) + + # strings are parsed by `Latitude` and `Longitude` constructors, so no need to + # implement parsing in the Representation classes + UnitSphericalRepresentation(lon=Angle('2h6m3.3s'), lat=Angle('0.1rad')) + + # Or, you can give `Quantity`s with keywords, and they will be internally + # converted to Angle/Distance + c1 = SphericalRepresentation(lon=8*u.hourangle, lat=5*u.deg, distance=10*u.kpc) + + # Can also give another representation object with the `reprobj` keyword. + c2 = SphericalRepresentation.from_representation(c1) + + # distance, lat, and lon typically will just match in shape + SphericalRepresentation(lon=[8, 9]*u.hourangle, lat=[5, 6]*u.deg, distance=[10, 11]*u.kpc) + # if the inputs are not the same, if possible they will be broadcast following + # numpy's standard broadcasting rules. + c2 = SphericalRepresentation(lon=[8, 9]*u.hourangle, lat=[5, 6]*u.deg, distance=10*u.kpc) + assert len(c2.distance) == 2 + #when they can't be broadcast, it is a ValueError (same as Numpy) + with raises(ValueError): + c2 = UnitSphericalRepresentation(lon=[8, 9, 10]*u.hourangle, lat=[5, 6]*u.deg) + + # It's also possible to pass in scalar quantity lists with mixed units. These + # are converted to array quantities following the same rule as `Quantity`: all + # elements are converted to match the first element's units. + c2 = UnitSphericalRepresentation(lon=Angle([8*u.hourangle, 135*u.deg]), + lat=Angle([5*u.deg, (6*np.pi/180)*u.rad])) + assert c2.lat.unit == u.deg and c2.lon.unit == u.hourangle + npt.assert_almost_equal(c2.lon[1].value, 9) + + # The Quantity initializer itself can also be used to force the unit even if the + # first element doesn't have the right unit + lon = u.Quantity([120*u.deg, 135*u.deg], u.hourangle) + lat = u.Quantity([(5*np.pi/180)*u.rad, 0.4*u.hourangle], u.deg) + c2 = UnitSphericalRepresentation(lon, lat) + + # regardless of how input, the `lat` and `lon` come out as angle/distance + assert isinstance(c1.lat, Angle) + assert isinstance(c1.lat, Latitude) # `Latitude` is an `Angle` subclass + assert isinstance(c1.distance, Distance) + + # but they are read-only, as representations are immutable once created + with raises(AttributeError): + c1.lat = Latitude(5, u.deg) + # Note that it is still possible to modify the array in-place, but this is not + # sanctioned by the API, as this would prevent things like caching. + c2.lat[:] = [0] * u.deg # possible, but NOT SUPPORTED + + # To address the fact that there are various other conventions for how spherical + # coordinates are defined, other conventions can be included as new classes. + # Later there may be other conventions that we implement - for now just the + # physics convention, as it is one of the most common cases. + c3 = PhysicsSphericalRepresentation(phi=120*u.deg, theta=85*u.deg, r=3*u.kpc) + + # first dimension must be length-3 if a lone `Quantity` is passed in. + c1 = CartesianRepresentation(randn(3, 100) * u.kpc) + assert c1.xyz.shape[0] == 3 + assert c1.xyz.unit == u.kpc + assert c1.x.shape[0] == 100 + assert c1.y.shape[0] == 100 + assert c1.z.shape[0] == 100 + # can also give each as separate keywords + CartesianRepresentation(x=randn(100)*u.kpc, y=randn(100)*u.kpc, z=randn(100)*u.kpc) + # if the units don't match but are all distances, they will automatically be + # converted to match `x` + xarr, yarr, zarr = randn(3, 100) + c1 = CartesianRepresentation(x=xarr*u.kpc, y=yarr*u.kpc, z=zarr*u.kpc) + c2 = CartesianRepresentation(x=xarr*u.kpc, y=yarr*u.kpc, z=zarr*u.pc) + assert c1.xyz.unit == c2.xyz.unit == u.kpc + npt.assert_allclose((c1.z / 1000) - c2.z, 0, atol=1e-10) + + # representations convert into other representations via `represent_as` + srep = SphericalRepresentation(lon=90*u.deg, lat=0*u.deg, distance=1*u.pc) + crep = srep.represent_as(CartesianRepresentation) + npt.assert_allclose(crep.x.value, 0, atol=1e-10) + npt.assert_allclose(crep.y.value, 1, atol=1e-10) + npt.assert_allclose(crep.z.value, 0, atol=1e-10) + # The functions that actually do the conversion are defined via methods on the + # representation classes. This may later be expanded into a full registerable + # transform graph like the coordinate frames, but initially it will be a simpler + # method system + + +def test_frame_api(): + from ..representation import SphericalRepresentation, \ + UnitSphericalRepresentation + from ..builtin_frames import ICRS, FK5 + #<---------------------Reference Frame/"Low-level" classes---------------------> + # The low-level classes have a dual role: they act as specifiers of coordinate + # frames and they *may* also contain data as one of the representation objects, + # in which case they are the actual coordinate objects themselves. + + # They can always accept a representation as a first argument + icrs = ICRS(UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg)) + + # which is stored as the `data` attribute + assert icrs.data.lat == 5*u.deg + assert icrs.data.lon == 8*u.hourangle + + # Frames that require additional information like equinoxs or obstimes get them + # as keyword parameters to the frame constructor. Where sensible, defaults are + # used. E.g., FK5 is almost always J2000 equinox + fk5 = FK5(UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg)) + J2000 = time.Time('J2000', scale='utc') + fk5_2000 = FK5(UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg), equinox=J2000) + assert fk5.equinox == fk5_2000.equinox + + # the information required to specify the frame is immutable + J2001 = time.Time('J2001', scale='utc') + with raises(AttributeError): + fk5.equinox = J2001 + + # Similar for the representation data. + with raises(AttributeError): + fk5.data = UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg) + + # There is also a class-level attribute that lists the attributes needed to + # identify the frame. These include attributes like `equinox` shown above. + assert all([nm in ('equinox', 'obstime') for nm in fk5.get_frame_attr_names()]) + + # the result of `get_frame_attr_names` is called for particularly in the + # high-level class (discussed below) to allow round-tripping between various + # frames. It is also part of the public API for other similar developer / + # advanced users' use. + + # The actual position information is accessed via the representation objects + npt.assert_allclose(icrs.represent_as(SphericalRepresentation).lat.to(u.deg), 5*u.deg) + npt.assert_allclose(icrs.spherical.lat.to(u.deg), 5*u.deg) # shorthand for the above + assert icrs.cartesian.z.value > 0 + + # Many frames have a "default" representation, the one in which they are + # conventionally described, often with a special name for some of the + # coordinates. E.g., most equatorial coordinate systems are spherical with RA and + # Dec. This works simply as a shorthand for the longer form above + + npt.assert_allclose(icrs.dec.to(u.deg), 5*u.deg) + npt.assert_allclose(fk5.ra.to(u.hourangle), 8*u.hourangle) + + assert icrs.representation == SphericalRepresentation + + # low-level classes can also be initialized with names valid for that representation + # and frame: + icrs_2 = ICRS(ra=8*u.hour, dec=5*u.deg, distance=1*u.kpc) + npt.assert_allclose(icrs.ra.to(u.deg), icrs_2.ra.to(u.deg)) + + # and these are taken as the default if keywords are not given: + #icrs_nokwarg = ICRS(8*u.hour, 5*u.deg, distance=1*u.kpc) + #assert icrs_nokwarg.ra == icrs_2.ra and icrs_nokwarg.dec == icrs_2.dec + + # they also are capable of computing on-sky or 3d separations from each other, + # which will be a direct port of the existing methods: + coo1 = ICRS(ra=0*u.hour, dec=0*u.deg) + coo2 = ICRS(ra=0*u.hour, dec=1*u.deg) + # `separation` is the on-sky separation + assert coo1.separation(coo2).degree == 1.0 + + # while `separation_3d` includes the 3D distance information + coo3 = ICRS(ra=0*u.hour, dec=0*u.deg, distance=1*u.kpc) + coo4 = ICRS(ra=0*u.hour, dec=0*u.deg, distance=2*u.kpc) + assert coo3.separation_3d(coo4).kpc == 1.0 + + # The next example fails because `coo1` and `coo2` don't have distances + with raises(ValueError): + assert coo1.separation_3d(coo2).kpc == 1.0 + + # repr/str also shows info, with frame and data + #assert repr(fk5) == '' + + +def test_transform_api(): + from ..representation import UnitSphericalRepresentation + from ..builtin_frames import ICRS, FK5 + from ..baseframe import frame_transform_graph, BaseCoordinateFrame + from ..transformations import DynamicMatrixTransform + #<-------------------------Transformations-------------------------------------> + # Transformation functionality is the key to the whole scheme: they transform + # low-level classes from one frame to another. + + #(used below but defined above in the API) + fk5 = FK5(ra=8*u.hour, dec=5*u.deg) + + # If no data (or `None`) is given, the class acts as a specifier of a frame, but + # without any stored data. + J2001 = time.Time('J2001', scale='utc') + fk5_J2001_frame = FK5(equinox=J2001) + + # if they do not have data, the string instead is the frame specification + assert repr(fk5_J2001_frame) == "" + + # Note that, although a frame object is immutable and can't have data added, it + # can be used to create a new object that does have data by giving the + # `realize_frame` method a representation: + srep = UnitSphericalRepresentation(lon=8*u.hour, lat=5*u.deg) + fk5_j2001_with_data = fk5_J2001_frame.realize_frame(srep) + assert fk5_j2001_with_data.data is not None + # Now `fk5_j2001_with_data` is in the same frame as `fk5_J2001_frame`, but it + # is an actual low-level coordinate, rather than a frame without data. + + # These frames are primarily useful for specifying what a coordinate should be + # transformed *into*, as they are used by the `transform_to` method + # E.g., this snippet precesses the point to the new equinox + newfk5 = fk5.transform_to(fk5_J2001_frame) + assert newfk5.equinox == J2001 + + # classes can also be given to `transform_to`, which then uses the defaults for + # the frame information: + samefk5 = fk5.transform_to(FK5) + # `fk5` was initialized using default `obstime` and `equinox`, so: + npt.assert_allclose(samefk5.ra - fk5.ra, 0, atol=1e-10) + npt.assert_allclose(samefk5.dec - fk5.dec, 0, atol=1e-10) + + # transforming to a new frame necessarily loses framespec information if that + # information is not applicable to the new frame. This means transforms are not + # always round-trippable: + fk5_2 = FK5(ra=8*u.hour, dec=5*u.deg, equinox=J2001) + ic_trans = fk5_2.transform_to(ICRS) + + # `ic_trans` does not have an `equinox`, so now when we transform back to FK5, + # it's a *different* RA and Dec + fk5_trans = ic_trans.transform_to(FK5) + assert not np.allclose(fk5_2.ra.to(u.deg), fk5_trans.ra.to(u.deg), rtol=0, atol=1e-10) + + # But if you explicitly give the right equinox, all is fine + fk5_trans_2 = fk5_2.transform_to(FK5(equinox=J2001)) + npt.assert_allclose(fk5_2.ra.to(u.deg), fk5_trans_2.ra.to(u.deg), rtol=0, atol=1e-10) + + # Trying to tansforming a frame with no data is of course an error: + with raises(ValueError): + FK5(equinox=J2001).transform_to(ICRS) + + + # To actually define a new transformation, the same scheme as in the + # 0.2/0.3 coordinates framework can be re-used - a graph of transform functions + # connecting various coordinate classes together. The main changes are: + # 1) The transform functions now get the frame object they are transforming the + # current data into. + # 2) Frames with additional information need to have a way to transform between + # objects of the same class, but with different framespecinfo values + + # An example transform function: + class SomeNewSystem(BaseCoordinateFrame): + pass + @frame_transform_graph.transform(DynamicMatrixTransform, SomeNewSystem, FK5) + def new_to_fk5(newobj, fk5frame): + ot = newobj.obstime + eq = fk5frame.equinox + # ... build a *cartesian* transform matrix using `eq` that transforms from + # the `newobj` frame as observed at `ot` to FK5 an equinox `eq` + matrix = np.eye(3) + return matrix + + # Other options for transform functions include one that simply returns the new + # coordinate object, and one that returns a cartesian matrix but does *not* + # require `newobj` or `fk5frame` - this allows optimization of the transform. + + +def test_highlevel_api(): + J2001 = time.Time('J2001', scale='utc') + + #<---------------------------"High-level" class--------------------------------> + # The "high-level" class is intended to wrap the lower-level classes in such a + # way that they can be round-tripped, as well as providing a variety of + # convenience functionality. This document is not intended to show *all* of the + # possible high-level functionality, rather how the high-level classes are + # initialized and interact with the low-level classes + + # this creates an object that contains an `ICRS` low-level class, initialized + # identically to the first ICRS example further up. + + sc = coords.SkyCoord(coords.SphericalRepresentation(lon=8 * u.hour, + lat=5 * u.deg, distance=1 * u.kpc), frame='icrs') + + # Other representations and `system` keywords delegate to the appropriate + # low-level class. The already-existing registry for user-defined coordinates + # will be used by `SkyCoordinate` to figure out what various the `system` + # keyword actually means. + + sc = coords.SkyCoord(ra=8 * u.hour, dec=5 * u.deg, frame='icrs') + sc = coords.SkyCoord(l=120 * u.deg, b=5 * u.deg, frame='galactic') + + # High-level classes can also be initialized directly from low-level objects + sc = coords.SkyCoord(coords.ICRS(ra=8 * u.hour, dec=5 * u.deg)) + + # The next example raises an error because the high-level class must always + # have position data. + with pytest.raises(ValueError): + sc = coords.SkyCoord(coords.FK5(equinox=J2001)) # raises ValueError + + # similarly, the low-level object can always be accessed + + #this is how it's supposed to look, but sometimes the numbers get rounded in + #funny ways + #assert repr(sc.frame) == '' + rscf = repr(sc.frame) + assert '' in rscf + + # and the string representation will be inherited from the low-level class. + + # same deal, should loook like this, but different archituectures/ python + # versions may round the numbers differently + #assert repr(sc) == '' + rsc = repr(sc) + assert '' in rsc + + # Supports a variety of possible complex string formats + sc = coords.SkyCoord('8h00m00s +5d00m00.0s', frame='icrs') + + # In the next example, the unit is only needed b/c units are ambiguous. In + # general, we *never* accept ambiguity + sc = coords.SkyCoord('8:00:00 +5:00:00.0', unit=(u.hour, u.deg), frame='icrs') + + # The next one would yield length-2 array coordinates, because of the comma + + sc = coords.SkyCoord(['8h 5d', '2°2′3″ 0.3rad'], frame='icrs') + + # It should also interpret common designation styles as a coordinate + # NOT YET + # sc = coords.SkyCoord('SDSS J123456.89-012345.6', frame='icrs') + + # but it should also be possible to provide formats for outputting to strings, + # similar to `Time`. This can be added right away or at a later date. + + # transformation is done the same as for low-level classes, which it delegates to + + sc_fk5_j2001 = sc.transform_to(coords.FK5(equinox=J2001)) + assert sc_fk5_j2001.equinox == J2001 + + # The key difference is that the high-level class remembers frame information + # necessary for round-tripping, unlike the low-level classes: + sc1 = coords.SkyCoord(ra=8 * u.hour, dec=5 * u.deg, equinox=J2001, frame='fk5') + sc2 = sc1.transform_to('icrs') + + # The next assertion succeeds, but it doesn't mean anything for ICRS, as ICRS + # isn't defined in terms of an equinox + assert sc2.equinox == J2001 + + # But it *is* necessary once we transform to FK5 + sc3 = sc2.transform_to('fk5') + assert sc3.equinox == J2001 + npt.assert_allclose(sc1.ra, sc3.ra) + + # `SkyCoord` will also include the attribute-style access that is in the + # v0.2/0.3 coordinate objects. This will *not* be in the low-level classes + sc = coords.SkyCoord(ra=8 * u.hour, dec=5 * u.deg, frame='icrs') + scgal = sc.galactic + assert str(scgal).startswith('' + + if HAS_SCIPY: + cat1 = coords.SkyCoord(ra=[1, 2]*u.hr, dec=[3, 4.01]*u.deg, distance=[5, 6]*u.kpc, frame='icrs') + cat2 = coords.SkyCoord(ra=[1, 2, 2.01]*u.hr, dec=[3, 4, 5]*u.deg, distance=[5, 200, 6]*u.kpc, frame='icrs') + idx1, sep2d1, dist3d1 = cat1.match_to_catalog_sky(cat2) + idx2, sep2d2, dist3d2 = cat1.match_to_catalog_3d(cat2) + + assert np.any(idx1 != idx2) + + # additional convenience functionality for the future should be added as methods + # on `SkyCoord`, *not* the low-level classes. + + +@pytest.mark.remote_data +def test_highlevel_api_remote(): + m31icrs = coords.SkyCoord.from_name('M31', frame='icrs') + + assert str(m31icrs) == '' + + m31fk4 = coords.SkyCoord.from_name('M31', frame='fk4') + + assert m31icrs.frame != m31fk4.frame + assert np.abs(m31icrs.ra - m31fk4.ra) > .5*u.deg diff --git a/astropy/coordinates/tests/test_arrays.py b/astropy/coordinates/tests/test_arrays.py new file mode 100644 index 0000000..28e1f4e --- /dev/null +++ b/astropy/coordinates/tests/test_arrays.py @@ -0,0 +1,291 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +# TEST_UNICODE_LITERALS + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from ...extern import six +from ...tests.helper import pytest + +import numpy as np +from numpy import testing as npt + +from ... import units as u + + +def test_angle_arrays(): + """ + Test arrays values with Angle objects. + """ + from .. import Angle + + # Tests incomplete + a1 = Angle([0, 45, 90, 180, 270, 360, 720.], unit=u.degree) + npt.assert_almost_equal([0., 45., 90., 180., 270., 360., 720.], a1.value) + + a2 = Angle(np.array([-90, -45, 0, 45, 90, 180, 270, 360]), unit=u.degree) + npt.assert_almost_equal([-90, -45, 0, 45, 90, 180, 270, 360], + a2.value) + + a3 = Angle(["12 degrees", "3 hours", "5 deg", "4rad"]) + npt.assert_almost_equal([12., 45., 5., 229.18311805], + a3.value) + assert a3.unit == u.degree + + a4 = Angle(["12 degrees", "3 hours", "5 deg", "4rad"], u.radian) + npt.assert_almost_equal(a4.degree, a3.value) + assert a4.unit == u.radian + + a5 = Angle([0, 45, 90, 180, 270, 360], unit=u.degree) + a6 = a5.sum() + npt.assert_almost_equal(a6.value, 945.0) + assert a6.unit is u.degree + + with pytest.raises((TypeError, ValueError)): # ValueError for numpy 1.5.x + # Arrays where the elements are Angle objects are not supported -- it's + # really tricky to do correctly, if at all, due to the possibility of + # nesting. + a7 = Angle([a1, a2, a3], unit=u.degree) + + a8 = Angle(["04:02:02", "03:02:01", "06:02:01"], unit=u.degree) + npt.assert_almost_equal(a8.value, [4.03388889, 3.03361111, 6.03361111]) + + a9 = Angle(np.array(["04:02:02", "03:02:01", "06:02:01"]), unit=u.degree) + npt.assert_almost_equal(a9.value, a8.value) + + with pytest.raises(u.UnitsError): + a10 = Angle(["04:02:02", "03:02:01", "06:02:01"]) + + +def test_dms(): + from .. import Angle + from ..angle_utilities import dms_to_degrees + + a1 = Angle([0, 45.5, -45.5], unit=u.degree) + d, m, s = a1.dms + npt.assert_almost_equal(d, [0, 45, -45]) + npt.assert_almost_equal(m, [0, 30, -30]) + npt.assert_almost_equal(s, [0, 0, -0]) + + dms = a1.dms + degrees = dms_to_degrees(*dms) + npt.assert_almost_equal(a1.degree, degrees) + + a2 = Angle(dms, unit=u.degree) + + npt.assert_almost_equal(a2.radian, a1.radian) + + +def test_hms(): + from .. import Angle + from ..angle_utilities import hms_to_hours + + a1 = Angle([0, 11.5, -11.5], unit=u.hour) + h, m, s = a1.hms + npt.assert_almost_equal(h, [0, 11, -11]) + npt.assert_almost_equal(m, [0, 30, -30]) + npt.assert_almost_equal(s, [0, 0, -0]) + + hms = a1.hms + hours = hms_to_hours(*hms) + npt.assert_almost_equal(a1.hour, hours) + + a2 = Angle(hms, unit=u.hour) + + npt.assert_almost_equal(a2.radian, a1.radian) + + +def test_array_coordinates_creation(): + """ + Test creating coordinates from arrays. + """ + from .. import Angle + from .. import ICRS, CartesianRepresentation + + c = ICRS(np.array([1, 2])*u.deg, np.array([3, 4])*u.deg) + assert not c.ra.isscalar + + with pytest.raises(ValueError): + c = ICRS(np.array([1, 2])*u.deg, np.array([3, 4, 5])*u.deg) + with pytest.raises(ValueError): + c = ICRS(np.array([1, 2, 4, 5])*u.deg, np.array([[3, 4], [5, 6]])*u.deg) + + #make sure cartesian initialization also works + cart = CartesianRepresentation(x=[1., 2.]*u.kpc, y=[3., 4.]*u.kpc, z=[5., 6.]*u.kpc) + c = ICRS(cart) + + #also ensure strings can be arrays + c = ICRS(['1d0m0s', '2h02m00.3s'], ['3d', '4d']) + + #but invalid strings cannot + with pytest.raises(ValueError): + c = ICRS(Angle(['10m0s', '2h02m00.3s']), Angle(['3d', '4d'])) + with pytest.raises(ValueError): + c = ICRS(Angle(['1d0m0s', '2h02m00.3s']), Angle(['3x', '4d'])) + + +def test_array_coordinates_distances(): + """ + Test creating coordinates from arrays and distances. + """ + from .. import ICRS + + #correct way + ICRS(ra=np.array([1, 2])*u.deg, dec=np.array([3, 4])*u.deg, distance= [.1, .2] * u.kpc) + + with pytest.raises(ValueError): + #scalar distance and mismatched array coordinates + ICRS(ra=np.array([1, 2, 3])*u.deg, dec=np.array([[3, 4], [5, 6]])*u.deg, distance= 2. * u.kpc) + with pytest.raises(ValueError): + #more distance values than coordinates + ICRS(ra=np.array([1, 2])*u.deg, dec=np.array([3, 4])*u.deg, distance= [.1, .2, 3.] * u.kpc) + + +@pytest.mark.parametrize(('arrshape', 'distance'), [((2, ), None), ((4, 2, 5), None), ((4, 2, 5), 2 * u.kpc)]) +def test_array_coordinates_transformations(arrshape, distance): + """ + Test transformation on coordinates with array content (first length-2 1D, then a 3D array) + """ + from .. import ICRS, FK4, FK5, Galactic + + #M31 coordinates from test_transformations + raarr = np.ones(arrshape) * 10.6847929 + decarr = np.ones(arrshape) * 41.2690650 + if distance is not None: + distance = np.ones(arrshape) * distance + + print(raarr, decarr,distance) + c = ICRS(ra=raarr*u.deg, dec=decarr*u.deg, distance=distance) + g = c.transform_to(Galactic) + + assert g.l.shape == arrshape + + npt.assert_array_almost_equal(g.l.degree, 121.17447049007306) + npt.assert_array_almost_equal(g.b.degree, -21.57291080408368) + + if distance is not None: + assert g.distance.unit == c.distance.unit + + #now make sure round-tripping works through FK5 + c2 = c.transform_to(FK5).transform_to(ICRS) + npt.assert_array_almost_equal(c.ra.radian, c2.ra.radian) + npt.assert_array_almost_equal(c.dec.radian, c2.dec.radian) + + assert c2.ra.shape == arrshape + + if distance is not None: + assert c2.distance.unit == c.distance.unit + + #also make sure it's possible to get to FK4, which uses a direct transform function. + fk4 = c.transform_to(FK4) + + npt.assert_array_almost_equal(fk4.ra.degree, 10.0004, decimal=4) + npt.assert_array_almost_equal(fk4.dec.degree, 40.9953, decimal=4) + + assert fk4.ra.shape == arrshape + if distance is not None: + assert fk4.distance.unit == c.distance.unit + + #now check the reverse transforms run + cfk4 = fk4.transform_to(ICRS) + assert cfk4.ra.shape == arrshape + + +def test_array_precession(): + """ + Ensures that FK5 coordinates as arrays precess their equinoxes + """ + from ...time import Time + from .. import FK5 + + j2000 = Time('J2000', scale='utc') + j1975 = Time('J1975', scale='utc') + + fk5 = FK5([1, 1.1]*u.radian, [0.5, 0.6]*u.radian) + assert fk5.equinox.jyear == j2000.jyear + fk5_2 = fk5.transform_to(FK5(equinox=j1975)) + assert fk5_2.equinox.jyear == j1975.jyear + + npt.assert_array_less(0.05, np.abs(fk5.ra.degree - fk5_2.ra.degree)) + npt.assert_array_less(0.05, np.abs(fk5.dec.degree - fk5_2.dec.degree)) + +def test_array_separation(): + from .. import ICRS + + c1 = ICRS([0 , 0]*u.deg, [0, 0]*u.deg) + c2 = ICRS([1, 2]*u.deg, [0, 0]*u.deg) + + npt.assert_array_almost_equal(c1.separation(c2).degree, [1, 2]) + + c3 = ICRS([0 , 3.]*u.deg, [0., 0]*u.deg, distance=[1 ,1.] * u.kpc) + c4 = ICRS([1, 1.]*u.deg, [0., 0]*u.deg, distance=[1 ,1.] * u.kpc) + + #the 3-1 separation should be twice the 0-1 separation, but not *exactly* the same + sep = c3.separation_3d(c4) + sepdiff = sep[1] - (2 * sep[0]) + + assert abs(sepdiff.value) < 1e-5 + assert sepdiff != 0 + +def test_array_indexing(): + from .. import FK5 + from ...time import Time + + ra = np.linspace(0, 360, 10) + dec = np.linspace(-90, 90, 10) + j1975 = Time(1975, format='jyear', scale='utc') + + c1 = FK5(ra*u.deg, dec*u.deg, equinox=j1975) + + c2 = c1[4] + assert c2.ra.degree == 160 + assert c2.dec.degree == -10 + + c3 = c1[2:5] + npt.assert_allclose(c3.ra.degree, [80, 120, 160]) + npt.assert_allclose(c3.dec.degree, [-50, -30, -10]) + + c4 = c1[np.array([2, 5, 8])] + + npt.assert_allclose(c4.ra.degree, [80, 200, 320]) + npt.assert_allclose(c4.dec.degree, [-50, 10, 70]) + + #now make sure the equinox is preserved + assert c2.equinox == c1.equinox + assert c3.equinox == c1.equinox + assert c4.equinox == c1.equinox + +def test_array_len(): + from .. import ICRS + + input_length = [1, 5] + for length in input_length: + ra = np.linspace(0, 360, length) + dec = np.linspace(0, 90, length) + + c = ICRS(ra*u.deg, dec*u.deg) + + assert len(c) == length + + assert c.shape == (length,) + + with pytest.raises(TypeError): + c = ICRS(0*u.deg, 0*u.deg) + len(c) + + assert c.shape == tuple() + +def test_array_eq(): + from .. import ICRS + + c1 = ICRS([1, 2]*u.deg, [3, 4]*u.deg) + c2 = ICRS([1, 2]*u.deg, [3, 5]*u.deg) + c3 = ICRS([1, 3]*u.deg, [3, 4]*u.deg) + c4 = ICRS([1, 2]*u.deg, [3, 4.2]*u.deg) + + assert c1 == c1 + assert c1 != c2 + assert c1 != c3 + assert c1 != c4 diff --git a/astropy/coordinates/tests/test_distance.py b/astropy/coordinates/tests/test_distance.py new file mode 100644 index 0000000..40de8ae --- /dev/null +++ b/astropy/coordinates/tests/test_distance.py @@ -0,0 +1,258 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +""" +This includes tests for the Distance class and related calculations +""" + +import numpy as np +from numpy import testing as npt + +from ...tests.helper import pytest +from ... import units as u +from .. import Longitude, Latitude, Distance, CartesianRepresentation +from ..builtin_frames import ICRS, Galactic + +try: + import scipy # pylint: disable=W0611 +except ImportError: + HAS_SCIPY = False +else: + HAS_SCIPY = True + + +def test_distances(): + """ + Tests functionality for Coordinate class distances and cartesian + transformations. + """ + + ''' + Distances can also be specified, and allow for a full 3D definition of a + coordinate. + ''' + + #try all the different ways to initialize a Distance + distance = Distance(12, u.parsec) + Distance(40, unit=u.au) + Distance(value=5, unit=u.kpc) + + # need to provide a unit + with pytest.raises(u.UnitsError): + Distance(12) + + # standard units are pre-defined + npt.assert_allclose(distance.lyr, 39.138765325702551) + npt.assert_allclose(distance.km, 370281309776063.0) + + # Coordinate objects can be assigned a distance object, giving them a full + # 3D position + c = Galactic(l=158.558650*u.degree, b=-43.350066*u.degree, + distance=Distance(12, u.parsec)) + + #or initialize distances via redshifts - this is actually tested in the + #function below that checks for scipy. This is kept here as an example + #c.distance = Distance(z=0.2) # uses current cosmology + #with whatever your preferred cosmology may be + #c.distance = Distance(z=0.2, cosmology=WMAP5) + + + # Coordinate objects can be initialized with a distance using special + # syntax + c1 = Galactic(l=158.558650*u.deg, b=-43.350066*u.deg, distance=12 * u.kpc) + + # Coordinate objects can be instantiated with cartesian coordinates + # Internally they will immediately be converted to two angles + a distance + cart = CartesianRepresentation(x=2 * u.pc, y=4 * u.pc, z=8 * u.pc) + c2 = Galactic(cart) + + sep12 = c1.separation_3d(c2) + # returns a *3d* distance between the c1 and c2 coordinates + # not that this does *not* + assert isinstance(sep12, Distance) + npt.assert_allclose(sep12.pc, 12005.784163916317, 10) + + ''' + All spherical coordinate systems with distances can be converted to + cartesian coordinates. + ''' + + cartrep2 = c2.cartesian + assert isinstance(cartrep2.x, u.Quantity) + npt.assert_allclose(cartrep2.x.value, 2) + npt.assert_allclose(cartrep2.y.value, 4) + npt.assert_allclose(cartrep2.z.value, 8) + + # with no distance, the unit sphere is assumed when converting to cartesian + c3 = Galactic(l=158.558650*u.degree, b=-43.350066*u.degree, distance=None) + unitcart = c3.cartesian + npt.assert_allclose(((unitcart.x**2 + unitcart.y**2 + + unitcart.z**2)**0.5).value, 1.0) + + # TODO: choose between these when CartesianRepresentation gets a definite + # decision on whether or not it gets __add__ + # + # CartesianRepresentation objects can be added and subtracted, which are + # vector/elementwise they can also be given as arguments to a coordinate + # system + #csum = ICRS(c1.cartesian + c2.cartesian) + csumrep = CartesianRepresentation(c1.cartesian.xyz + c2.cartesian.xyz) + csum = ICRS(csumrep) + + npt.assert_allclose(csumrep.x.value, -8.12016610185) + npt.assert_allclose(csumrep.y.value, 3.19380597435) + npt.assert_allclose(csumrep.z.value, -8.2294483707) + npt.assert_allclose(csum.ra.degree, 158.529401774) + npt.assert_allclose(csum.dec.degree, -43.3235825777) + npt.assert_allclose(csum.distance.kpc, 11.9942200501) + + +@pytest.mark.skipif(str('not HAS_SCIPY')) +def test_distances_scipy(): + """ + The distance-related tests that require scipy due to the cosmology + module needing scipy integration routines + """ + from ...cosmology import WMAP5 + + #try different ways to initialize a Distance + d4 = Distance(z=0.23) # uses default cosmology - as of writing, WMAP7 + npt.assert_allclose(d4.z, 0.23, rtol=1e-8) + + d5 = Distance(z=0.23, cosmology=WMAP5) + npt.assert_allclose(d5.compute_z(WMAP5), 0.23, rtol=1e-8) + + d6 = Distance(z=0.23, cosmology=WMAP5, unit=u.km) + npt.assert_allclose(d6.value, 3.5417046898762366e+22) + + +def test_distance_change(): + + ra = Longitude("4:08:15.162342", unit=u.hour) + dec = Latitude("-41:08:15.162342", unit=u.degree) + c1 = ICRS(ra, dec, Distance(1, unit=u.kpc)) + + oldx = c1.cartesian.x.value + assert (oldx - 0.35284083171901953) < 1e-10 + + #first make sure distances are immutible + with pytest.raises(AttributeError): + c1.distance = Distance(2, unit=u.kpc) + + #now x should increase with a bigger distance increases + c2 = ICRS(ra, dec, Distance(2, unit=u.kpc)) + assert c2.cartesian.x.value == oldx * 2 + + +def test_distance_is_quantity(): + """ + test that distance behaves like a proper quantity + """ + + Distance(2 * u.kpc) + + d = Distance([2, 3.1], u.kpc) + + assert d.shape == (2,) + + a = d.view(np.ndarray) + q = d.view(u.Quantity) + a[0] = 1.2 + q.value[1] = 5.4 + + assert d[0].value == 1.2 + assert d[1].value == 5.4 + + q = u.Quantity(d, copy=True) + q.value[1] = 0 + assert q.value[1] == 0 + assert d.value[1] != 0 + + # regression test against #2261 + d = Distance([2 * u.kpc, 250. * u.pc]) + assert d.unit is u.kpc + assert np.all(d.value == np.array([2., 0.25])) + + +def test_distmod(): + + d = Distance(10, u.pc) + assert d.distmod.value == 0 + + d = Distance(distmod=20) + assert d.distmod.value == 20 + assert d.kpc == 100 + + d = Distance(distmod=-1., unit=u.au) + npt.assert_allclose(d.value, 1301442.9440836983) + + with pytest.raises(ValueError): + d = Distance(value=d, distmod=20) + + with pytest.raises(ValueError): + d = Distance(z=.23, distmod=20) + + #check the Mpc/kpc/pc behavior + assert Distance(distmod=1).unit == u.pc + assert Distance(distmod=11).unit == u.kpc + assert Distance(distmod=26).unit == u.Mpc + assert Distance(distmod=-21).unit == u.AU + + #if an array, uses the mean of the log of the distances + assert Distance(distmod=[1, 11, 26]).unit == u.kpc + + + +def test_distance_in_coordinates(): + """ + test that distances can be created from quantities and that cartesian + representations come out right + """ + + ra = Longitude("4:08:15.162342", unit=u.hour) + dec = Latitude("-41:08:15.162342", unit=u.degree) + coo = ICRS(ra, dec, distance=2*u.kpc) + + cart = coo.cartesian + + assert isinstance(cart.xyz, u.Quantity) + + +def test_negative_distance(): + """ Test optional kwarg allow_negative """ + + with pytest.raises(ValueError): + Distance([-2, 3.1], u.kpc) + + with pytest.raises(ValueError): + Distance([-2, -3.1], u.kpc) + + with pytest.raises(ValueError): + Distance(-2, u.kpc) + + d = Distance(-2, u.kpc, allow_negative=True) + assert d.value == -2 + + +def test_distance_comparison(): + """Ensure comparisons of distances work (#2206, #2250)""" + a = Distance(15*u.kpc) + b = Distance(15*u.kpc) + assert a == b + c = Distance(1.*u.Mpc) + assert a < c + + +def test_distance_to_quantity_when_not_units_of_length(): + """Any operatation that leaves units other than those of length + should turn a distance into a quantity (#2206, #2250)""" + d = Distance(15*u.kpc) + twice = 2.*d + assert isinstance(twice, Distance) + area = 4.*np.pi*d**2 + assert area.unit.is_equivalent(u.m**2) + assert not isinstance(area, Distance) + assert type(area) is u.Quantity diff --git a/astropy/coordinates/tests/test_earth.py b/astropy/coordinates/tests/test_earth.py new file mode 100644 index 0000000..f617fa4 --- /dev/null +++ b/astropy/coordinates/tests/test_earth.py @@ -0,0 +1,252 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +# TEST_UNICODE_LITERALS + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +"""Test initalization of angles not already covered by the API tests""" + +import functools +from distutils import version +import numpy as np + +from ..earth import EarthLocation, ELLIPSOIDS +from ..angles import Longitude, Latitude +from ...tests.helper import pytest +from ... import units as u + +NUMPY_VERSION = version.LooseVersion(np.__version__) + +allclose_m14 = functools.partial(np.allclose, rtol=1.e-14, atol=1.e-14) +allclose_m8 = functools.partial(np.allclose, rtol=1.e-8, atol=1.e-8) + +if NUMPY_VERSION >= version.LooseVersion('1.7.0'): + isclose_m14 = functools.partial(np.isclose, rtol=1.e-14, atol=1.e-14) + isclose_m8 = functools.partial(np.isclose, rtol=1.e-8, atol=1.e-8) +else: + def isclose_m14(val, ref): + return np.array([allclose_m14(v, r, rtol=1.e-14, atol=1.e-14) + for (v, r) in zip(val, ref)]) + + def isclose_m8(val, ref): + return np.array([allclose_m8(v, r, rtol=1.e-8, atol=1.e-8) + for (v, r) in zip(val, ref)]) + + +def vvd(val, valok, dval, func, test, status): + """Mimic routine of erfa/src/t_erfa_c.c (to help copy & paste)""" + assert np.allclose(val, valok, atol=dval) + + +def test_gc2gd(): + """Test that we reproduce erfa/src/t_erfa_c.c t_gc2gd""" + x, y, z = (2e6, 3e6, 5.244e6) + + status = 0 # help for copy & paste of vvd + + location = EarthLocation.from_geocentric(x, y, z, u.m) + e, p, h = location.to_geodetic('WGS84') + e, p, h = e.to(u.radian).value, p.to(u.radian).value, h.to(u.m).value + vvd(e, 0.98279372324732907, 1e-14, "eraGc2gd", "e2", status) + vvd(p, 0.97160184820607853, 1e-14, "eraGc2gd", "p2", status) + vvd(h, 331.41731754844348, 1e-8, "eraGc2gd", "h2", status) + + e, p, h = location.to_geodetic('GRS80') + e, p, h = e.to(u.radian).value, p.to(u.radian).value, h.to(u.m).value + vvd(e, 0.98279372324732907, 1e-14, "eraGc2gd", "e2", status) + vvd(p, 0.97160184820607853, 1e-14, "eraGc2gd", "p2", status) + vvd(h, 331.41731754844348, 1e-8, "eraGc2gd", "h2", status) + + e, p, h = location.to_geodetic('WGS72') + e, p, h = e.to(u.radian).value, p.to(u.radian).value, h.to(u.m).value + vvd(e, 0.98279372324732907, 1e-14, "eraGc2gd", "e3", status) + vvd(p, 0.97160181811015119, 1e-14, "eraGc2gd", "p3", status) + vvd(h, 333.27707261303181, 1e-8, "eraGc2gd", "h3", status) + + +def test_gd2gc(): + """Test that we reproduce erfa/src/t_erfa_c.c t_gd2gc""" + e = 3.1 * u.rad + p = -0.5 * u.rad + h = 2500.0 * u.m + + status = 0 # help for copy & paste of vvd + + location = EarthLocation.from_geodetic(e, p, h, ellipsoid='WGS84') + xyz = location.to_geocentric() + vvd(xyz[0], -5599000.5577049947, 1e-7, "eraGd2gc", "0/1", status) + vvd(xyz[1], 233011.67223479203, 1e-7, "eraGd2gc", "1/1", status) + vvd(xyz[2], -3040909.4706983363, 1e-7, "eraGd2gc", "2/1", status) + + location = EarthLocation.from_geodetic(e, p, h, ellipsoid='GRS80') + xyz = location.to_geocentric() + vvd(xyz[0], -5599000.5577260984, 1e-7, "eraGd2gc", "0/2", status) + vvd(xyz[1], 233011.6722356703, 1e-7, "eraGd2gc", "1/2", status) + vvd(xyz[2], -3040909.4706095476, 1e-7, "eraGd2gc", "2/2", status) + + location = EarthLocation.from_geodetic(e, p, h, ellipsoid='WGS72') + xyz = location.to_geocentric() + vvd(xyz[0], -5598998.7626301490, 1e-7, "eraGd2gc", "0/3", status) + vvd(xyz[1], 233011.5975297822, 1e-7, "eraGd2gc", "1/3", status) + vvd(xyz[2], -3040908.6861467111, 1e-7, "eraGd2gc", "2/3", status) + + +class TestInput(): + def setup(self): + self.lon = Longitude([0., 45., 90., 135., 180., -180, -90, -45], u.deg, + wrap_angle=180*u.deg) + self.lat = Latitude([+0., 30., 60., +90., -90., -60., -30., 0.], u.deg) + self.h = u.Quantity([0.1, 0.5, 1.0, -0.5, -1.0, +4.2, -11.,-.1], u.m) + self.location = EarthLocation.from_geodetic(self.lon, self.lat, self.h) + self.x, self.y, self.z = self.location.to_geocentric() + + def test_default_ellipsoid(self): + assert self.location.ellipsoid == EarthLocation._ellipsoid + + def test_geo_attributes(self): + assert all([np.all(_1 == _2) + for _1, _2 in zip(self.location.geodetic, + self.location.to_geodetic())]) + assert all([np.all(_1 == _2) + for _1, _2 in zip(self.location.geocentric, + self.location.to_geocentric())]) + + def test_attribute_classes(self): + """Test that attribute classes are correct (and not EarthLocation)""" + assert type(self.location.x) is u.Quantity + assert type(self.location.y) is u.Quantity + assert type(self.location.z) is u.Quantity + assert type(self.location.longitude) is Longitude + assert type(self.location.latitude) is Latitude + assert type(self.location.height) is u.Quantity + + def test_input(self): + """Check input is parsed correctly""" + + # units of length should be assumed geocentric + geocentric = EarthLocation(self.x, self.y, self.z) + assert np.all(geocentric == self.location) + geocentric2 = EarthLocation(self.x.value, self.y.value, self.z.value, + self.x.unit) + assert np.all(geocentric2 == self.location) + geodetic = EarthLocation(self.lon, self.lat, self.h) + assert np.all(geodetic == self.location) + geodetic2 = EarthLocation(self.lon.to(u.degree).value, + self.lat.to(u.degree).value, + self.h.to(u.m).value) + assert np.all(geodetic2 == self.location) + geodetic3 = EarthLocation(self.lon, self.lat) + assert allclose_m14(geodetic3.longitude.value, + self.location.longitude.value) + assert allclose_m14(geodetic3.latitude.value, + self.location.latitude.value) + assert not np.any(isclose_m14(geodetic3.height.value, + self.location.height.value)) + geodetic4 = EarthLocation(self.lon, self.lat, self.h[-1]) + assert allclose_m14(geodetic4.longitude.value, + self.location.longitude.value) + assert allclose_m14(geodetic4.latitude.value, + self.location.latitude.value) + assert allclose_m14(geodetic4.height[-1].value, + self.location.height[-1].value) + assert not np.any(isclose_m14(geodetic4.height[:-1].value, + self.location.height[:-1].value)) + # check length unit preservation + geocentric5 = EarthLocation(self.x, self.y, self.z, u.pc) + assert geocentric5.unit is u.pc + assert geocentric5.x.unit is u.pc + assert geocentric5.height.unit is u.pc + assert allclose_m14(geocentric5.x.to(self.x.unit).value, self.x.value) + geodetic5 = EarthLocation(self.lon, self.lat, self.h.to(u.pc)) + assert geodetic5.unit is u.pc + assert geodetic5.x.unit is u.pc + assert geodetic5.height.unit is u.pc + assert allclose_m14(geodetic5.x.to(self.x.unit).value, self.x.value) + + def test_invalid_input(self): + """Check invalid input raises exception""" + # incomprehensible by either raises TypeError + with pytest.raises(TypeError): + EarthLocation(self.lon, self.y, self.z) + + # wrong units + with pytest.raises(u.UnitsError): + EarthLocation.from_geocentric(self.lon, self.lat, self.lat) + # inconsistent units + with pytest.raises(u.UnitsError): + EarthLocation.from_geocentric(self.h, self.lon, self.lat) + # floats without a unit + with pytest.raises(TypeError): + EarthLocation.from_geocentric(self.x.value, self.y.value, + self.z.value) + # inconsistent shape + with pytest.raises(ValueError): + EarthLocation.from_geocentric(self.x, self.y, self.z[:5]) + + # inconsistent units + with pytest.raises(u.UnitsError): + EarthLocation.from_geodetic(self.x, self.y, self.z) + # inconsistent shape + with pytest.raises(ValueError): + EarthLocation.from_geodetic(self.lon, self.lat, self.h[:5]) + + def test_slicing(self): + # test on WGS72 location, so we can check the ellipsoid is passed on + locwgs72 = EarthLocation.from_geodetic(self.lon, self.lat, self.h, + ellipsoid='WGS72') + loc_slice1 = locwgs72[4] + assert isinstance(loc_slice1, EarthLocation) + assert loc_slice1.unit is locwgs72.unit + assert loc_slice1.ellipsoid == locwgs72.ellipsoid == 'WGS72' + assert not loc_slice1.shape + with pytest.raises(IndexError): + loc_slice1[0] + with pytest.raises(IndexError): + len(loc_slice1) + + loc_slice2 = locwgs72[4:6] + assert isinstance(loc_slice2, EarthLocation) + assert len(loc_slice2) == 2 + assert loc_slice2.unit is locwgs72.unit + assert loc_slice2.ellipsoid == locwgs72.ellipsoid + assert loc_slice2.shape == (2,) + loc_x = locwgs72['x'] + assert type(loc_x) is u.Quantity + assert loc_x.shape == locwgs72.shape + assert loc_x.unit is locwgs72.unit + + def test_invalid_ellipsoid(self): + # unknown ellipsoid + with pytest.raises(ValueError): + EarthLocation.from_geodetic(self.lon, self.lat, self.h, + ellipsoid='foo') + with pytest.raises(TypeError): + EarthLocation(self.lon, self.lat, self.h, ellipsoid='foo') + + with pytest.raises(ValueError): + self.location.ellipsoid = 'foo' + + with pytest.raises(ValueError): + self.location.to_geodetic('foo') + + @pytest.mark.parametrize('ellipsoid', ELLIPSOIDS.keys()) + def test_ellipsoid(self, ellipsoid): + """Test that different ellipsoids are understood, and differ""" + # check that heights differ for different ellipsoids + # need different tolerance, since heights are relative to ~6000 km + lon, lat, h = self.location.to_geodetic(ellipsoid) + if ellipsoid == self.location.ellipsoid: + assert allclose_m8(h.value, self.h.value) + else: + # Some heights are very similar for some; some lon, lat identical. + assert not np.all(isclose_m8(h.value, self.h.value)) + + # given lon, lat, height, check that x,y,z differ + location = EarthLocation.from_geodetic(self.lon, self.lat, self.h, + ellipsoid=ellipsoid) + if ellipsoid == self.location.ellipsoid: + assert allclose_m14(location.z.value, self.z.value) + else: + assert not np.all(isclose_m14(location.z.value, self.z.value)) diff --git a/astropy/coordinates/tests/test_formatting.py b/astropy/coordinates/tests/test_formatting.py new file mode 100644 index 0000000..2bf23ae --- /dev/null +++ b/astropy/coordinates/tests/test_formatting.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- + +# TEST_UNICODE_LITERALS +""" +Tests the Angle string formatting capabilities. SkyCoord formatting is in +test_sky_coord +""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from ...extern import six +from ...tests.helper import pytest + +from ..angles import Angle +from ... import units as u + + +def test_to_string_precision(): + # There are already some tests in test_api.py, but this is a regression + # test for the bug in issue #1319 which caused incorrect formatting of the + # seconds for precision=0 + + angle = Angle(-1.23456789, unit=u.degree) + + assert angle.to_string(precision=3) == '-1d14m04.444s' + assert angle.to_string(precision=1) == '-1d14m04.4s' + assert angle.to_string(precision=0) == '-1d14m04s' + + angle2 = Angle(-1.23456789, unit=u.hourangle) + + assert angle2.to_string(precision=3, unit=u.hour) == '-1h14m04.444s' + assert angle2.to_string(precision=1, unit=u.hour) == '-1h14m04.4s' + assert angle2.to_string(precision=0, unit=u.hour) == '-1h14m04s' + + +def test_to_string_decimal(): + + # There are already some tests in test_api.py, but this is a regression + # test for the bug in issue #1323 which caused decimal formatting to not + # work + + angle1 = Angle(2., unit=u.degree) + + assert angle1.to_string(decimal=True, precision=3) == '2.000' + assert angle1.to_string(decimal=True, precision=1) == '2.0' + assert angle1.to_string(decimal=True, precision=0) == '2' + + angle2 = Angle(3., unit=u.hourangle) + + assert angle2.to_string(decimal=True, precision=3) == '3.000' + assert angle2.to_string(decimal=True, precision=1) == '3.0' + assert angle2.to_string(decimal=True, precision=0) == '3' + + angle3 = Angle(4., unit=u.radian) + + assert angle3.to_string(decimal=True, precision=3) == '4.000' + assert angle3.to_string(decimal=True, precision=1) == '4.0' + assert angle3.to_string(decimal=True, precision=0) == '4' + + +def test_to_string_formats(): + a = Angle(1.113355, unit=u.deg) + assert a.to_string(format='latex') == r'$1^\circ06{}^\prime48.078{}^{\prime\prime}$' + assert a.to_string(format='unicode') == '1°06′48.078″' + + a = Angle(1.113355, unit=u.hour) + assert a.to_string(format='latex') == r'$1^\mathrm{h}06^\mathrm{m}48.078^\mathrm{s}$' + assert a.to_string(format='unicode') == '1ʰ06ᵐ48.078ˢ' + + a = Angle(1.113355, unit=u.radian) + assert a.to_string(format='latex') == r'$1.11336\mathrm{rad}$' + assert a.to_string(format='unicode') == '1.11336rad' + + +def test_to_string_fields(): + a = Angle(1.113355, unit=u.deg) + assert a.to_string(fields=1) == r'1d' + assert a.to_string(fields=2) == r'1d07m' + assert a.to_string(fields=3) == r'1d06m48.078s' + + +def test_to_string_padding(): + a = Angle(0.5653, unit=u.deg) + assert a.to_string(unit='deg', sep=':', pad=True) == r'00:33:55.08' + + # Test to make sure negative angles are padded correctly + a = Angle(-0.5653, unit=u.deg) + assert a.to_string(unit='deg', sep=':', pad=True) == r'-00:33:55.08' + + +def test_sexagesimal_rounding_up(): + a = Angle(359.9999999999, unit=u.deg) + + assert a.to_string(precision=None) == '360d00m00s' + assert a.to_string(precision=4) == '360d00m00.0000s' + assert a.to_string(precision=5) == '360d00m00.00000s' + assert a.to_string(precision=6) == '360d00m00.000000s' + assert a.to_string(precision=7) == '359d59m59.9999996s' + + a = Angle(3.999999, unit=u.deg) + assert a.to_string(fields=2, precision=None) == '4d00m' + assert a.to_string(fields=2, precision=1) == '4d00m' + assert a.to_string(fields=2, precision=5) == '4d00m' + assert a.to_string(fields=1, precision=1) == '4d' + assert a.to_string(fields=1, precision=5) == '4d' + + +def test_to_string_scalar(): + a = Angle(1.113355, unit=u.deg) + assert isinstance(a.to_string(), six.text_type) + + +def test_to_string_radian_with_precision(): + """ + Regression test for a bug that caused ``to_string`` to crash for angles in + radians when specifying the precision. + """ + + # Check that specifying the precision works + a = Angle(3., unit=u.rad) + assert a.to_string(precision=3, sep='fromunit') == '3.000rad' + + +def test_sexagesimal_round_down(): + a1 = Angle(1, u.deg).to(u.hourangle) + a2 = Angle(2, u.deg) + assert a1.to_string() == '0h04m00s' + assert a2.to_string() == '2d00m00s' + + +def test_to_string_fields_colon(): + a = Angle(1.113355, unit=u.deg) + assert a.to_string(fields=2, sep=':') == '1:07' + assert a.to_string(fields=3, sep=':') == '1:06:48.078' + assert a.to_string(fields=1, sep=':') == '1' diff --git a/astropy/coordinates/tests/test_frames.py b/astropy/coordinates/tests/test_frames.py new file mode 100644 index 0000000..3d26c8a --- /dev/null +++ b/astropy/coordinates/tests/test_frames.py @@ -0,0 +1,497 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import numpy as np +from numpy.testing import assert_allclose + +from ... import units as u +from ...tests.helper import pytest +from .. import representation + + +def test_frame_attribute_descriptor(): + """ Unit tests of the FrameAttribute descriptor """ + from ..baseframe import FrameAttribute + + class TestFrameAttributes(object): + attr_none = FrameAttribute() + attr_2 = FrameAttribute(default=2) + attr_3_attr2 = FrameAttribute(default=3, secondary_attribute='attr_2') + attr_none_attr2 = FrameAttribute(default=None, secondary_attribute='attr_2') + attr_none_nonexist = FrameAttribute(default=None, secondary_attribute='nonexist') + + t = TestFrameAttributes() + + # Defaults + assert t.attr_none is None + assert t.attr_2 == 2 + assert t.attr_3_attr2 == 3 + assert t.attr_none_attr2 == t.attr_2 + assert t.attr_none_nonexist is None # No default and non-existent secondary attr + + # Setting values via '_'-prefixed internal vars (as would normally done in __init__) + t._attr_none = 10 + assert t.attr_none == 10 + + t._attr_2 = 20 + assert t.attr_2 == 20 + assert t.attr_3_attr2 == 3 + assert t.attr_none_attr2 == t.attr_2 + + t._attr_none_attr2 = 40 + assert t.attr_none_attr2 == 40 + + # Make sure setting values via public attribute fails + with pytest.raises(AttributeError) as err: + t.attr_none = 5 + assert 'Cannot set frame attribute' in str(err) + + +def test_frame_subclass_attribute_descriptor(): + from ..builtin_frames import FK4 + from ..baseframe import FrameAttribute, TimeFrameAttribute + from astropy.time import Time + + _EQUINOX_B1980 = Time('B1980', scale='tai') + + class MyFK4(FK4): + # equinox inherited from FK4, obstime overridden, and newattr is new + obstime = TimeFrameAttribute(default=_EQUINOX_B1980) + newattr = FrameAttribute(default='newattr') + + mfk4 = MyFK4() + assert mfk4.equinox.value == 'B1950.000' + assert mfk4.obstime.value == 'B1980.000' + assert mfk4.newattr == 'newattr' + assert set(mfk4.get_frame_attr_names()) == set(['equinox', 'obstime', 'newattr']) + + mfk4 = MyFK4(equinox='J1980.0', obstime='J1990.0', newattr='world') + assert mfk4.equinox.value == 'J1980.000' + assert mfk4.obstime.value == 'J1990.000' + assert mfk4.newattr == 'world' + + +def test_create_data_frames(): + from ..builtin_frames import ICRS + + #from repr + i1 = ICRS(representation.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc)) + i2 = ICRS(representation.UnitSphericalRepresentation(lon=1*u.deg, lat=2*u.deg)) + + #from preferred name + i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc) + i4 = ICRS(ra=1*u.deg, dec=2*u.deg) + + assert i1.data.lat == i3.data.lat + assert i1.data.lon == i3.data.lon + assert i1.data.distance == i3.data.distance + + assert i2.data.lat == i4.data.lat + assert i2.data.lon == i4.data.lon + + #now make sure the preferred names work as properties + assert_allclose(i1.ra, i3.ra) + assert_allclose(i2.ra, i4.ra) + assert_allclose(i1.distance, i3.distance) + + with pytest.raises(AttributeError): + i1.ra = [11.]*u.deg + + +def test_create_orderered_data(): + from ..builtin_frames import ICRS, Galactic, AltAz + + TOL = 1e-10*u.deg + + i = ICRS(1*u.deg, 2*u.deg) + assert (i.ra - 1*u.deg) < TOL + assert (i.dec - 2*u.deg) < TOL + + g = Galactic(1*u.deg, 2*u.deg) + assert (g.l - 1*u.deg) < TOL + assert (g.b - 2*u.deg) < TOL + + a = AltAz(1*u.deg, 2*u.deg) + assert (a.az - 1*u.deg) < TOL + assert (a.alt - 2*u.deg) < TOL + + with pytest.raises(TypeError): + ICRS(1*u.deg, 2*u.deg, 1*u.deg, 2*u.deg) + + with pytest.raises(TypeError): + sph = representation.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) + ICRS(sph, 1*u.deg, 2*u.deg) + + +def test_create_nodata_frames(): + from ..builtin_frames import ICRS, FK4, FK5 + + i = ICRS() + assert len(i.get_frame_attr_names()) == 0 + + f5 = FK5() + assert f5.equinox == FK5.get_frame_attr_names()['equinox'] + + f4 = FK4() + assert f4.equinox == FK4.get_frame_attr_names()['equinox'] + + #obstime is special because it's a property that uses equinox if obstime is not set + assert f4.obstime in (FK4.get_frame_attr_names()['obstime'], + FK4.get_frame_attr_names()['equinox']) + + +def test_frame_repr(): + from ..builtin_frames import ICRS, FK5 + + i = ICRS() + assert repr(i) == '' + + f5 = FK5() + assert repr(f5).startswith('' + assert repr(i3) == ('') + + +def test_converting_units(): + import re + from ..baseframe import RepresentationMapping + from ..builtin_frames import ICRS, FK5 + from ..representation import SphericalRepresentation + + # this is a regular expression that with split (see below) removes what's + # the decimal point to fix rounding problems + rexrepr = re.compile(r'(.*?=\d\.).*?( .*?=\d\.).*?( .*)') + + # Use values that aren't subject to rounding down to X.9999... + i2 = ICRS(ra=1.1*u.deg, dec=2.1*u.deg) + + #converting from FK5 to ICRS and back changes the *internal* representation, + # but it should still come out in the preferred form + + i4 = i2.transform_to(FK5).transform_to(ICRS) + ri2 = ''.join(rexrepr.split(repr(i2))) + ri4 = ''.join(rexrepr.split(repr(i4))) + assert ri2 == ri4 + assert i2.data.lon.unit != i4.data.lon.unit # Internal repr changed + + #but that *shouldn't* hold if we turn off units for the representation + class FakeICRS(ICRS): + frame_specific_representation_info = { + 'spherical': {'names': ('ra', 'dec', 'distance'), + 'units': (None, None, None)}, + 'unitspherical': {'names': ('ra', 'dec'), + 'units': (None, None)} + } + + frame_specific_representation_info = { + 'spherical': [RepresentationMapping('lon', 'ra', u.hourangle), + RepresentationMapping('lat', 'dec', None), + RepresentationMapping('distance', 'distance')] # should fall back to default of None unit + } + frame_specific_representation_info['unitspherical'] = \ + frame_specific_representation_info['spherical'] + + fi = FakeICRS(i4.data) + ri2 = ''.join(rexrepr.split(repr(i2))) + rfi = ''.join(rexrepr.split(repr(fi))) + rfi = re.sub('FakeICRS', 'ICRS', rfi) # Force frame name to match + assert ri2 != rfi + + # the attributes should also get the right units + assert i2.dec.unit == i4.dec.unit + # unless no/explicitly given units + assert i2.dec.unit != fi.dec.unit + assert i2.ra.unit != fi.ra.unit + assert fi.ra.unit == u.hourangle + + +def test_realizing(): + from ..builtin_frames import ICRS, FK5 + from ...time import Time + + rep = representation.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc) + + i = ICRS() + i2 = i.realize_frame(rep) + + assert not i.has_data + assert i2.has_data + + f = FK5(equinox=Time('J2001', scale='utc')) + f2 = f.realize_frame(rep) + + assert not f.has_data + assert f2.has_data + + assert f2.equinox == f.equinox + assert f2.equinox != FK5.get_frame_attr_names()['equinox'] + + +def test_getitem(): + from ..builtin_frames import ICRS + + rep = representation.SphericalRepresentation( + [1, 2, 3]*u.deg, [4, 5, 6]*u.deg, [7, 8, 9]*u.kpc) + + i = ICRS(rep) + assert len(i.ra) == 3 + + iidx = i[1:] + assert len(iidx.ra) == 2 + + iidx2 = i[0] + assert iidx2.ra.isscalar + +def test_transform(): + """ + This test just makes sure the transform architecture works, but does *not* + actually test all the builtin transforms themselves are accurate + """ + from ..builtin_frames import ICRS, FK4, FK5, Galactic + from ...time import Time + + i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) + f = i.transform_to(FK5) + i2 = f.transform_to(ICRS) + + assert i2.data.__class__ == representation.UnitSphericalRepresentation + + assert_allclose(i.ra, i2.ra) + assert_allclose(i.dec, i2.dec) + + + i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) + f = i.transform_to(FK5) + i2 = f.transform_to(ICRS) + + assert i2.data.__class__ != representation.UnitSphericalRepresentation + + + f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001', scale='utc')) + f4 = f.transform_to(FK4) + f4_2 = f.transform_to(FK4(equinox=f.equinox)) + + #make sure attributes are copied over correctly + assert f4.equinox == FK4.get_frame_attr_names()['equinox'] + assert f4_2.equinox == f.equinox + + + #make sure self-transforms also work + i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) + i2 = i.transform_to(ICRS) + + assert_allclose(i.ra, i2.ra) + assert_allclose(i.dec, i2.dec) + + f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001', scale='utc')) + f2 = f.transform_to(FK5) # default equinox, so should be *different* + assert f2.equinox == FK5().equinox + with pytest.raises(AssertionError): + assert_allclose(f.ra, f2.ra) + with pytest.raises(AssertionError): + assert_allclose(f.dec, f2.dec) + + + #finally, check Galactic round-tripping + i1 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg) + i2 = i1.transform_to(Galactic).transform_to(ICRS) + + assert_allclose(i1.ra, i2.ra) + assert_allclose(i1.dec, i2.dec) + +def test_sep(): + from ..builtin_frames import ICRS + + i1 = ICRS(ra=0*u.deg, dec=1*u.deg) + i2 = ICRS(ra=0*u.deg, dec=2*u.deg) + + sep = i1.separation(i2) + assert sep.deg == 1 + + i3 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc) + i4 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[4, 5]*u.kpc) + + sep3d = i3.separation_3d(i4) + assert_allclose(sep3d.to(u.kpc).value, np.array([1, 1])) + + +def test_time_inputs(): + """ + Test validation and conversion of inputs for equinox and obstime attributes. + """ + from ...time import Time + from ..builtin_frames import FK4 + + c = FK4(1 * u.deg, 2 * u.deg, equinox='J2001.5', obstime='2000-01-01 12:00:00') + assert c.equinox == Time('J2001.5') + assert c.obstime == Time('2000-01-01 12:00:00') + + with pytest.raises(ValueError) as err: + c = FK4(1 * u.deg, 2 * u.deg, equinox=1.5) + assert 'Invalid time input' in str(err) + + with pytest.raises(ValueError) as err: + c = FK4(1 * u.deg, 2 * u.deg, obstime='hello') + assert 'Invalid time input' in str(err) + + with pytest.raises(ValueError) as err: + c = FK4(1 * u.deg, 2 * u.deg, obstime=['J2000', 'J2001']) + assert "must be a single (scalar) value" in str(err) + + +def test_is_frame_attr_default(): + """ + Check that the `is_frame_attr_default` machinery works as expected + """ + from ...time import Time + from ..builtin_frames import FK5 + + c1 = FK5(ra=1*u.deg, dec=1*u.deg) + c2 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=FK5.get_frame_attr_names()['equinox']) + c3 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=Time('J2001.5')) + + assert c1.equinox == c2.equinox + assert c1.equinox != c3.equinox + + assert c1.is_frame_attr_default('equinox') + assert not c2.is_frame_attr_default('equinox') + assert not c3.is_frame_attr_default('equinox') + + c4 = c1.realize_frame(representation.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) + c5 = c2.realize_frame(representation.UnitSphericalRepresentation(3*u.deg, 4*u.deg)) + + assert c4.is_frame_attr_default('equinox') + assert not c5.is_frame_attr_default('equinox') + + +def test_altaz_attributes(): + from ...time import Time + from .. import EarthLocation, AltAz + + aa = AltAz(1*u.deg, 2*u.deg) + assert aa.obstime is None + assert aa.location is None + + aa2 = AltAz(1*u.deg, 2*u.deg, obstime='J2000') + assert aa2.obstime == Time('J2000') + + aa3 = AltAz(1*u.deg, 2*u.deg, location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m)) + assert isinstance(aa3.location, EarthLocation) + + +def test_representation(): + """ + Test the getter and setter properties for `representation` + """ + from ..builtin_frames import ICRS + + # Create the frame object. + icrs = ICRS(ra=1*u.deg, dec=1*u.deg) + data = icrs.data + + # Create some representation objects. + icrs_cart = icrs.cartesian + icrs_spher = icrs.spherical + + # Testing when `_representation` set to `CartesianRepresentation`. + icrs.representation = representation.CartesianRepresentation + + assert icrs.representation == representation.CartesianRepresentation + assert icrs_cart.x == icrs.x + assert icrs_cart.y == icrs.y + assert icrs_cart.z == icrs.z + assert icrs.data == data + + # Testing that an ICRS object in CartesianRepresentation must not have spherical attributes. + for attr in ('ra', 'dec', 'distance'): + with pytest.raises(AttributeError) as err: + getattr(icrs, attr) + assert 'object has no attribute' in str(err) + + # Testing when `_representation` set to `CylindricalRepresentation`. + icrs.representation = representation.CylindricalRepresentation + + assert icrs.representation == representation.CylindricalRepresentation + assert icrs.data == data + + # Testing setter input using text argument for spherical. + icrs.representation = 'spherical' + + assert icrs.representation is representation.SphericalRepresentation + assert icrs_spher.lat == icrs.dec + assert icrs_spher.lon == icrs.ra + assert icrs_spher.distance == icrs.distance + assert icrs.data == data + + # Testing that an ICRS object in SphericalRepresentation must not have cartesian attributes. + for attr in ('x', 'y', 'z'): + with pytest.raises(AttributeError) as err: + getattr(icrs, attr) + assert 'object has no attribute' in str(err) + + # Testing setter input using text argument for cylindrical. + icrs.representation = 'cylindrical' + + assert icrs.representation is representation.CylindricalRepresentation + assert icrs.data == data + + with pytest.raises(ValueError) as err: + icrs.representation = 'WRONG' + assert 'but must be a BaseRepresentation class' in str(err) + + with pytest.raises(ValueError) as err: + icrs.representation = ICRS + assert 'but must be a BaseRepresentation class' in str(err) + + +def test_represent_as(): + from ..builtin_frames import ICRS + + icrs = ICRS(ra=1*u.deg, dec=1*u.deg) + + cart1 = icrs.represent_as('cartesian') + cart2 = icrs.represent_as(representation.CartesianRepresentation) + + cart1.x == cart2.x + cart1.y == cart2.y + cart1.z == cart2.z + + +def test_dynamic_attrs(): + from ..builtin_frames import ICRS + c = ICRS(1*u.deg, 2*u.deg) + assert 'ra' in dir(c) + assert 'dec' in dir(c) + + with pytest.raises(AttributeError) as err: + c.blahblah + assert "object has no attribute 'blahblah'" in str(err) + + with pytest.raises(AttributeError) as err: + c.ra = 1 + assert "Cannot set any frame attribute" in str(err) + + c.blahblah = 1 + assert c.blahblah == 1 + +def test_nodata_error(): + from ..builtin_frames import ICRS + + i = ICRS() + with pytest.raises(ValueError): + i.data + +def test_len0_data(): + from ..builtin_frames import ICRS + + i = ICRS([]*u.deg, []*u.deg) + assert i.has_data + repr(i) diff --git a/astropy/coordinates/tests/test_matching.py b/astropy/coordinates/tests/test_matching.py new file mode 100644 index 0000000..eef51d0 --- /dev/null +++ b/astropy/coordinates/tests/test_matching.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import numpy as np +from numpy import testing as npt +from ...tests.helper import pytest + +from ... import units as u + + +""" +These are the tests for coordinate matching. + +Note that this requires scipy. +""" + +try: + import scipy + HAS_SCIPY = True +except ImportError: + HAS_SCIPY = False + +@pytest.mark.skipif(str('not HAS_SCIPY')) +def test_matching_function(): + from .. import ICRS + from ..matching import match_coordinates_3d + #this only uses match_coordinates_3d because that's the actual implementation + + cmatch = ICRS([4, 2.1]*u.degree, [0, 0]*u.degree) + ccatalog = ICRS([1, 2, 3, 4]*u.degree, [0, 0, 0, 0]*u.degree) + + idx, d2d, d3d = match_coordinates_3d(cmatch, ccatalog) + npt.assert_array_equal(idx, [3, 1]) + npt.assert_array_almost_equal(d2d.degree, [0, 0.1]) + assert d3d.value[0] == 0 + + idx, d2d, d3d = match_coordinates_3d(cmatch, ccatalog, nthneighbor=2) + assert np.all(idx == 2) + npt.assert_array_almost_equal(d2d.degree, [1, 0.9]) + npt.assert_array_less(d3d.value, 0.02) + + +@pytest.mark.skipif(str('not HAS_SCIPY')) +def test_matching_function_3d_and_sky(): + from .. import ICRS + from ..matching import match_coordinates_3d, match_coordinates_sky + + cmatch = ICRS([4, 2.1]*u.degree, [0, 0]*u.degree, distance=[1, 5] * u.kpc) + ccatalog = ICRS([1, 2, 3, 4]*u.degree, [0, 0, 0, 0]*u.degree, distance=[1, 1, 1, 5] * u.kpc) + + idx, d2d, d3d = match_coordinates_3d(cmatch, ccatalog) + npt.assert_array_equal(idx, [2, 3]) + + + npt.assert_allclose(d2d.degree, [1, 1.9]) + assert np.abs(d3d[0].to(u.kpc).value - np.radians(1)) < 1e-6 + assert np.abs(d3d[1].to(u.kpc).value - 5*np.radians(1.9)) < 1e-5 + + idx, d2d, d3d = match_coordinates_sky(cmatch, ccatalog) + npt.assert_array_equal(idx, [3, 1]) + + npt.assert_allclose(d2d.degree, [0, 0.1]) + npt.assert_allclose(d3d.to(u.kpc).value, [4, 4.0000019]) + + +@pytest.mark.skipif(str('not HAS_SCIPY')) +def test_kdtree_storage(): + from .. import ICRS + from ..matching import match_coordinates_3d + + cmatch = ICRS([4, 2.1]*u.degree, [0, 0]*u.degree) + ccatalog = ICRS([1, 2, 3, 4]*u.degree, [0, 0, 0, 0]*u.degree) + + idx, d2d, d3d = match_coordinates_3d(cmatch, ccatalog, storekdtree=False) + assert not hasattr(ccatalog, '_kdtree') + + idx, d2d, d3d = match_coordinates_3d(cmatch, ccatalog, storekdtree=True) + assert hasattr(ccatalog, '_kdtree') + + assert not hasattr(ccatalog, 'tislit_cheese') + idx, d2d, d3d = match_coordinates_3d(cmatch, ccatalog, storekdtree='tislit_cheese') + assert hasattr(ccatalog, 'tislit_cheese') + assert not hasattr(cmatch, 'tislit_cheese') + + +@pytest.mark.skipif(str('not HAS_SCIPY')) +def test_matching_method(): + from .. import ICRS, SkyCoord + from ...utils import NumpyRNGContext + from ..matching import match_coordinates_3d, match_coordinates_sky + + with NumpyRNGContext(987654321): + cmatch = ICRS(np.random.rand(20) * 360.*u.degree, + (np.random.rand(20) * 180. - 90.)*u.degree) + ccatalog = ICRS(np.random.rand(100) * 360. * u.degree, + (np.random.rand(100) * 180. - 90.)*u.degree) + + idx1, d2d1, d3d1 = SkyCoord(cmatch).match_to_catalog_3d(ccatalog) + idx2, d2d2, d3d2 = match_coordinates_3d(cmatch, ccatalog) + + npt.assert_array_equal(idx1, idx2) + npt.assert_allclose(d2d1, d2d2) + npt.assert_allclose(d3d1, d3d2) + + #should be the same as above because there's no distance, but just make sure this method works + idx1, d2d1, d3d1 = SkyCoord(cmatch).match_to_catalog_sky(ccatalog) + idx2, d2d2, d3d2 = match_coordinates_sky(cmatch, ccatalog) + + npt.assert_array_equal(idx1, idx2) + npt.assert_allclose(d2d1, d2d2) + npt.assert_allclose(d3d1, d3d2) + + + assert len(idx1) == len(d2d1) == len(d3d1) == 20 diff --git a/astropy/coordinates/tests/test_name_resolve.py b/astropy/coordinates/tests/test_name_resolve.py new file mode 100644 index 0000000..2add0e7 --- /dev/null +++ b/astropy/coordinates/tests/test_name_resolve.py @@ -0,0 +1,160 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +# TEST_UNICODE_LITERALS + +""" +This module contains tests for the name resolve convenience module. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + + +import time + +import numpy as np + +from ..name_resolve import (get_icrs_coordinates, NameResolveError, + sesame_database, _parse_response, sesame_url) +from ..sky_coordinate import SkyCoord +from ...extern.six.moves import urllib +from ...tests.helper import remote_data, pytest +from ... import units as u + +_cached_ngc3642 = dict() +_cached_ngc3642["simbad"] = """# ngc 3642 #Q22523669 +#=S=Simbad (via url): 1 +%@ 503952 +%I.0 NGC 3642 +%C.0 LIN +%C.N0 15.15.01.00 +%J 170.5750583 +59.0742417 = 11:22:18.01 +59:04:27.2 +%V z 1593 0.005327 [0.000060] D 2002LEDA.........0P +%D 1.673 1.657 75 (32767) (I) C 2006AJ....131.1163S +%T 5 =32800000 D 2011A&A...532A..74B +%#B 140 + + +#====Done (2013-Feb-12,16:37:11z)====""" + +_cached_ngc3642["vizier"] = """# ngc 3642 #Q22523677 +#=V=VizieR (local): 1 +%J 170.56 +59.08 = 11:22.2 +59:05 +%I.0 {NGC} 3642 + + + +#====Done (2013-Feb-12,16:37:42z)====""" + +_cached_ngc3642["all"] = """# ngc3642 #Q22523722 +#=S=Simbad (via url): 1 +%@ 503952 +%I.0 NGC 3642 +%C.0 LIN +%C.N0 15.15.01.00 +%J 170.5750583 +59.0742417 = 11:22:18.01 +59:04:27.2 +%V z 1593 0.005327 [0.000060] D 2002LEDA.........0P +%D 1.673 1.657 75 (32767) (I) C 2006AJ....131.1163S +%T 5 =32800000 D 2011A&A...532A..74B +%#B 140 + + +#=V=VizieR (local): 1 +%J 170.56 +59.08 = 11:22.2 +59:05 +%I.0 {NGC} 3642 + + +#!N=NED : *** Could not access the server *** + +#====Done (2013-Feb-12,16:39:48z)====""" + +_cached_castor = dict() +_cached_castor["all"] = """# castor #Q22524249 +#=S=Simbad (via url): 1 +%@ 983633 +%I.0 NAME CASTOR +%C.0 ** +%C.N0 12.13.00.00 +%J 113.649471640 +31.888282216 = 07:34:35.87 +31:53:17.8 +%J.E [34.72 25.95 0] A 2007A&A...474..653V +%P -191.45 -145.19 [3.95 2.95 0] A 2007A&A...474..653V +%X 64.12 [3.75] A 2007A&A...474..653V +%S A1V+A2Vm =0.0000D200.0030.0110000000100000 C 2001AJ....122.3466M +%#B 179 + +#!V=VizieR (local): No table found for: castor + +#!N=NED: ****object name not recognized by NED name interpreter +#!N=NED: ***Not recognized by NED: castor + + + +#====Done (2013-Feb-12,16:52:02z)====""" + +_cached_castor["simbad"] = """# castor #Q22524495 +#=S=Simbad (via url): 1 +%@ 983633 +%I.0 NAME CASTOR +%C.0 ** +%C.N0 12.13.00.00 +%J 113.649471640 +31.888282216 = 07:34:35.87 +31:53:17.8 +%J.E [34.72 25.95 0] A 2007A&A...474..653V +%P -191.45 -145.19 [3.95 2.95 0] A 2007A&A...474..653V +%X 64.12 [3.75] A 2007A&A...474..653V +%S A1V+A2Vm =0.0000D200.0030.0110000000100000 C 2001AJ....122.3466M +%#B 179 + + +#====Done (2013-Feb-12,17:00:39z)====""" + +@remote_data +def test_names(): + + # First check that sesame is up + if urllib.request.urlopen("http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame").getcode() != 200: + pytest.skip("SESAME appears to be down, skipping test_name_resolve.py:test_names()...") + + with pytest.raises(NameResolveError): + get_icrs_coordinates("m87h34hhh") + + try: + icrs = get_icrs_coordinates("ngc 3642") + except NameResolveError: + ra, dec = _parse_response(_cached_ngc3642["all"]) + icrs = SkyCoord(ra=float(ra)*u.degree, dec=float(dec)*u.degree) + + icrs_true = SkyCoord(ra="11h 22m 18.014s", dec="59d 04m 27.27s") + np.testing.assert_almost_equal(icrs.ra.degree, icrs_true.ra.degree, 3) + np.testing.assert_almost_equal(icrs.dec.degree, icrs_true.dec.degree, 3) + + try: + icrs = get_icrs_coordinates("castor") + except NameResolveError: + ra,dec = _parse_response(_cached_castor["all"]) + icrs = SkyCoord(ra=float(ra)*u.degree, dec=float(dec)*u.degree) + + icrs_true = SkyCoord(ra="07h 34m 35.87s", dec="+31d 53m 17.8s") + np.testing.assert_almost_equal(icrs.ra.degree, icrs_true.ra.degree, 3) + np.testing.assert_almost_equal(icrs.dec.degree, icrs_true.dec.degree, 3) + + +@remote_data +@pytest.mark.parametrize(("name", "db_dict"), [('ngc 3642', _cached_ngc3642), + ('castor', _cached_castor)]) +def test_database_specify(name, db_dict): + # First check that at least some sesame mirror is up + for url in sesame_url.get(): + if urllib.request.urlopen(url).getcode() == 200: + break + else: + pytest.skip("All SESAME mirrors appear to be down, skipping " + "test_name_resolve.py:test_database_specify()...") + + for db in db_dict.keys(): + with sesame_database.set(db): + try: + icrs = SkyCoord.from_name(name) + except NameResolveError: + ra, dec = db_dict[db] + icrs = SkyCoord(ra=float(ra)*u.degree, dec=float(dec)*u.degree) + + time.sleep(1) diff --git a/astropy/coordinates/tests/test_pickle.py b/astropy/coordinates/tests/test_pickle.py new file mode 100644 index 0000000..77b526d --- /dev/null +++ b/astropy/coordinates/tests/test_pickle.py @@ -0,0 +1,15 @@ +from ...extern.six.moves import cPickle as pickle +from ...coordinates import Longitude + +def test_basic(): + lon1 = Longitude(1.23, "radian", wrap_angle='180d') + s = pickle.dumps(lon1) + lon2 = pickle.loads(s) + +def test_pickle_longitude_wrap_angle(): + a = Longitude(1.23, "radian", wrap_angle='180d') + s = pickle.dumps(a) + b = pickle.loads(s) + + assert a.rad == b.rad + assert a.wrap_angle == b.wrap_angle diff --git a/astropy/coordinates/tests/test_representation.py b/astropy/coordinates/tests/test_representation.py new file mode 100644 index 0000000..b138006 --- /dev/null +++ b/astropy/coordinates/tests/test_representation.py @@ -0,0 +1,933 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import numpy as np +from numpy.testing import assert_allclose + +from ... import units as u +from ...tests.helper import pytest +from ..angles import Longitude, Latitude, Angle +from ..distances import Distance +from ..representation import (SphericalRepresentation, + UnitSphericalRepresentation, + CartesianRepresentation, + CylindricalRepresentation, + PhysicsSphericalRepresentation) + + +def assert_allclose_quantity(q1, q2): + assert_allclose(q1.value, q2.to(q1.unit).value) + + +class TestSphericalRepresentation(object): + + def test_empty_init(self): + with pytest.raises(TypeError) as exc: + s = SphericalRepresentation() + + def test_init_quantity(self): + + s3 = SphericalRepresentation(lon=8 * u.hourangle, lat=5 * u.deg, distance=10 * u.kpc) + assert s3.lon == 8. * u.hourangle + assert s3.lat == 5. * u.deg + assert s3.distance == 10 * u.kpc + + assert isinstance(s3.lon, Longitude) + assert isinstance(s3.lat, Latitude) + assert isinstance(s3.distance, Distance) + + def test_init_lonlat(self): + + s2 = SphericalRepresentation(Longitude(8, u.hour), + Latitude(5, u.deg), + Distance(10, u.kpc)) + + assert s2.lon == 8. * u.hourangle + assert s2.lat == 5. * u.deg + assert s2.distance == 10. * u.kpc + + assert isinstance(s2.lon, Longitude) + assert isinstance(s2.lat, Latitude) + assert isinstance(s2.distance, Distance) + + # also test that wrap_angle is preserved + s3 = SphericalRepresentation(Longitude(-90, u.degree, + wrap_angle=180*u.degree), + Latitude(-45, u.degree), + Distance(1., u.Rsun)) + assert s3.lon == -90. * u.degree + assert s3.lon.wrap_angle == 180 * u.degree + + def test_init_array(self): + + s1 = SphericalRepresentation(lon=[8, 9] * u.hourangle, + lat=[5, 6] * u.deg, + distance=[1, 2] * u.kpc) + + assert_allclose(s1.lon.degree, [120, 135]) + assert_allclose(s1.lat.degree, [5, 6]) + assert_allclose(s1.distance.kpc, [1, 2]) + + assert isinstance(s1.lon, Longitude) + assert isinstance(s1.lat, Latitude) + assert isinstance(s1.distance, Distance) + + def test_init_array_nocopy(self): + + lon = Longitude([8, 9] * u.hourangle) + lat = Latitude([5, 6] * u.deg) + distance = Distance([1, 2] * u.kpc) + + s1 = SphericalRepresentation(lon=lon, lat=lat, distance=distance, copy=False) + + lon[:] = [1, 2] * u.rad + lat[:] = [3, 4] * u.arcmin + distance[:] = [8, 9] * u.Mpc + + assert_allclose_quantity(lon, s1.lon) + assert_allclose_quantity(lat, s1.lat) + assert_allclose_quantity(distance, s1.distance) + + def test_reprobj(self): + + s1 = SphericalRepresentation(lon=8 * u.hourangle, lat=5 * u.deg, distance=10 * u.kpc) + + s2 = SphericalRepresentation.from_representation(s1) + + assert_allclose_quantity(s2.lon, 8. * u.hourangle) + assert_allclose_quantity(s2.lat, 5. * u.deg) + assert_allclose_quantity(s2.distance, 10 * u.kpc) + + def test_broadcasting(self): + + s1 = SphericalRepresentation(lon=[8, 9] * u.hourangle, + lat=[5, 6] * u.deg, + distance=10 * u.kpc) + + assert_allclose_quantity(s1.lon, [120, 135] * u.degree) + assert_allclose_quantity(s1.lat, [5, 6] * u.degree) + assert_allclose_quantity(s1.distance, [10, 10] * u.kpc) + + def test_broadcasting_mismatch(self): + + with pytest.raises(ValueError) as exc: + s1 = SphericalRepresentation(lon=[8, 9, 10] * u.hourangle, + lat=[5, 6] * u.deg, + distance=[1, 2] * u.kpc) + assert exc.value.args[0] == "Input parameters lon, lat, and distance cannot be broadcast" + + # We deliberately disallow anything that is not directly a Quantity in + # these low-level classes, so we now check that initializing from a + # string or mixed unit lists raises a TypeError. + + def test_init_str(self): + + with pytest.raises(TypeError) as exc: + s1 = SphericalRepresentation(lon='2h6m3.3s', + lat='0.1rad', + distance=1 * u.kpc) + assert exc.value.args[0] == "lon should be a Quantity, Angle, or Longitude" + + def test_mixed_units(self): + + with pytest.raises(TypeError) as exc: + s1 = SphericalRepresentation(lon=[8 * u.hourangle, 135 * u.deg], + lat=[5 * u.deg, (6 * np.pi / 180) * u.rad], + distance=1 * u.kpc) + assert exc.value.args[0] == "lon should be a Quantity, Angle, or Longitude" + + def test_readonly(self): + + s1 = SphericalRepresentation(lon=8 * u.hourangle, + lat=5 * u.deg, + distance=1. * u.kpc) + + with pytest.raises(AttributeError): + s1.lon = 1. * u.deg + + with pytest.raises(AttributeError): + s1.lat = 1. * u.deg + + with pytest.raises(AttributeError): + s1.distance = 1. * u.kpc + + def test_getitem(self): + + s = SphericalRepresentation(lon=np.arange(10) * u.deg, + lat=-np.arange(10) * u.deg, + distance=1 * u.kpc) + + s_slc = s[2:8:2] + + assert_allclose_quantity(s_slc.lon, [2, 4, 6] * u.deg) + assert_allclose_quantity(s_slc.lat, [-2, -4, -6] * u.deg) + assert_allclose_quantity(s_slc.distance, [1, 1, 1] * u.kpc) + + def test_getitem_scalar(self): + + s = SphericalRepresentation(lon=1 * u.deg, + lat=-2 * u.deg, + distance=3 * u.kpc) + + with pytest.raises(TypeError): + s_slc = s[0] + + +class TestUnitSphericalRepresentation(object): + + def test_empty_init(self): + with pytest.raises(TypeError) as exc: + s = UnitSphericalRepresentation() + + def test_init_quantity(self): + + s3 = UnitSphericalRepresentation(lon=8 * u.hourangle, lat=5 * u.deg) + assert s3.lon == 8. * u.hourangle + assert s3.lat == 5. * u.deg + + assert isinstance(s3.lon, Longitude) + assert isinstance(s3.lat, Latitude) + + def test_init_lonlat(self): + + s2 = UnitSphericalRepresentation(Longitude(8, u.hour), + Latitude(5, u.deg)) + + assert s2.lon == 8. * u.hourangle + assert s2.lat == 5. * u.deg + + assert isinstance(s2.lon, Longitude) + assert isinstance(s2.lat, Latitude) + + def test_init_array(self): + + s1 = UnitSphericalRepresentation(lon=[8, 9] * u.hourangle, + lat=[5, 6] * u.deg) + + assert_allclose(s1.lon.degree, [120, 135]) + assert_allclose(s1.lat.degree, [5, 6]) + + assert isinstance(s1.lon, Longitude) + assert isinstance(s1.lat, Latitude) + + def test_init_array_nocopy(self): + + lon = Longitude([8, 9] * u.hourangle) + lat = Latitude([5, 6] * u.deg) + + s1 = UnitSphericalRepresentation(lon=lon, lat=lat, copy=False) + + lon[:] = [1, 2] * u.rad + lat[:] = [3, 4] * u.arcmin + + assert_allclose_quantity(lon, s1.lon) + assert_allclose_quantity(lat, s1.lat) + + def test_reprobj(self): + + s1 = UnitSphericalRepresentation(lon=8 * u.hourangle, lat=5 * u.deg) + + s2 = UnitSphericalRepresentation.from_representation(s1) + + assert_allclose_quantity(s2.lon, 8. * u.hourangle) + assert_allclose_quantity(s2.lat, 5. * u.deg) + + def test_broadcasting(self): + + s1 = UnitSphericalRepresentation(lon=[8, 9] * u.hourangle, + lat=[5, 6] * u.deg) + + assert_allclose_quantity(s1.lon, [120, 135] * u.degree) + assert_allclose_quantity(s1.lat, [5, 6] * u.degree) + + def test_broadcasting_mismatch(self): + + with pytest.raises(ValueError) as exc: + s1 = UnitSphericalRepresentation(lon=[8, 9, 10] * u.hourangle, + lat=[5, 6] * u.deg) + assert exc.value.args[0] == "Input parameters lon and lat cannot be broadcast" + + # We deliberately disallow anything that is not directly a Quantity in + # these low-level classes, so we now check that initializing from a + # string or mixed unit lists raises a TypeError. + + def test_init_str(self): + + with pytest.raises(TypeError) as exc: + s1 = UnitSphericalRepresentation(lon='2h6m3.3s', lat='0.1rad') + assert exc.value.args[0] == "lon should be a Quantity, Angle, or Longitude" + + def test_mixed_units(self): + + with pytest.raises(TypeError) as exc: + s1 = UnitSphericalRepresentation(lon=[8 * u.hourangle, 135 * u.deg], + lat=[5 * u.deg, (6 * np.pi / 180) * u.rad]) + assert exc.value.args[0] == "lon should be a Quantity, Angle, or Longitude" + + def test_readonly(self): + + s1 = UnitSphericalRepresentation(lon=8 * u.hourangle, + lat=5 * u.deg) + + with pytest.raises(AttributeError): + s1.lon = 1. * u.deg + + with pytest.raises(AttributeError): + s1.lat = 1. * u.deg + + def test_getitem(self): + + s = UnitSphericalRepresentation(lon=np.arange(10) * u.deg, + lat=-np.arange(10) * u.deg) + + s_slc = s[2:8:2] + + assert_allclose_quantity(s_slc.lon, [2, 4, 6] * u.deg) + assert_allclose_quantity(s_slc.lat, [-2, -4, -6] * u.deg) + + def test_getitem_scalar(self): + + s = UnitSphericalRepresentation(lon=1 * u.deg, + lat=-2 * u.deg) + + with pytest.raises(TypeError): + s_slc = s[0] + + +class TestPhysicsSphericalRepresentation(object): + + def test_empty_init(self): + with pytest.raises(TypeError) as exc: + s = PhysicsSphericalRepresentation() + + def test_init_quantity(self): + + s3 = PhysicsSphericalRepresentation(phi=8 * u.hourangle, theta=5 * u.deg, r=10 * u.kpc) + assert s3.phi == 8. * u.hourangle + assert s3.theta == 5. * u.deg + assert s3.r == 10 * u.kpc + + assert isinstance(s3.phi, Angle) + assert isinstance(s3.theta, Angle) + assert isinstance(s3.r, Distance) + + def test_init_phitheta(self): + + s2 = PhysicsSphericalRepresentation(Angle(8, u.hour), + Angle(5, u.deg), + Distance(10, u.kpc)) + + assert s2.phi == 8. * u.hourangle + assert s2.theta == 5. * u.deg + assert s2.r == 10. * u.kpc + + assert isinstance(s2.phi, Angle) + assert isinstance(s2.theta, Angle) + assert isinstance(s2.r, Distance) + + def test_init_array(self): + + s1 = PhysicsSphericalRepresentation(phi=[8, 9] * u.hourangle, + theta=[5, 6] * u.deg, + r=[1, 2] * u.kpc) + + assert_allclose(s1.phi.degree, [120, 135]) + assert_allclose(s1.theta.degree, [5, 6]) + assert_allclose(s1.r.kpc, [1, 2]) + + assert isinstance(s1.phi, Angle) + assert isinstance(s1.theta, Angle) + assert isinstance(s1.r, Distance) + + def test_init_array_nocopy(self): + + phi = Angle([8, 9] * u.hourangle) + theta = Angle([5, 6] * u.deg) + r = Distance([1, 2] * u.kpc) + + s1 = PhysicsSphericalRepresentation(phi=phi, theta=theta, r=r, copy=False) + + phi[:] = [1, 2] * u.rad + theta[:] = [3, 4] * u.arcmin + r[:] = [8, 9] * u.Mpc + + assert_allclose_quantity(phi, s1.phi) + assert_allclose_quantity(theta, s1.theta) + assert_allclose_quantity(r, s1.r) + + def test_reprobj(self): + + s1 = PhysicsSphericalRepresentation(phi=8 * u.hourangle, theta=5 * u.deg, r=10 * u.kpc) + + s2 = PhysicsSphericalRepresentation.from_representation(s1) + + assert_allclose_quantity(s2.phi, 8. * u.hourangle) + assert_allclose_quantity(s2.theta, 5. * u.deg) + assert_allclose_quantity(s2.r, 10 * u.kpc) + + def test_broadcasting(self): + + s1 = PhysicsSphericalRepresentation(phi=[8, 9] * u.hourangle, + theta=[5, 6] * u.deg, + r=10 * u.kpc) + + assert_allclose_quantity(s1.phi, [120, 135] * u.degree) + assert_allclose_quantity(s1.theta, [5, 6] * u.degree) + assert_allclose_quantity(s1.r, [10, 10] * u.kpc) + + def test_broadcasting_mismatch(self): + + with pytest.raises(ValueError) as exc: + s1 = PhysicsSphericalRepresentation(phi=[8, 9, 10] * u.hourangle, + theta=[5, 6] * u.deg, + r=[1, 2] * u.kpc) + assert exc.value.args[0] == "Input parameters phi, theta, and r cannot be broadcast" + + # We deliberately disallow anything that is not directly a Quantity in + # these low-level classes, so we now check that initializing from a + # string or mixed unit lists raises a TypeError. + + def test_init_str(self): + + with pytest.raises(TypeError) as exc: + s1 = PhysicsSphericalRepresentation(phi='2h6m3.3s', theta='0.1rad', r=1 * u.kpc) + assert exc.value.args[0] == "phi should be a Quantity or Angle" + + def test_mixed_units(self): + + with pytest.raises(TypeError) as exc: + s1 = PhysicsSphericalRepresentation(phi=[8 * u.hourangle, 135 * u.deg], + theta=[5 * u.deg, (6 * np.pi / 180) * u.rad], + r=[1. * u.kpc, 500 * u.pc]) + assert exc.value.args[0] == "phi should be a Quantity or Angle" + + def test_readonly(self): + + s1 = PhysicsSphericalRepresentation(phi=[8, 9] * u.hourangle, + theta=[5, 6] * u.deg, + r=[10, 20] * u.kpc) + + with pytest.raises(AttributeError): + s1.phi = 1. * u.deg + + with pytest.raises(AttributeError): + s1.theta = 1. * u.deg + + with pytest.raises(AttributeError): + s1.r = 1. * u.kpc + + def test_getitem(self): + + s = PhysicsSphericalRepresentation(phi=np.arange(10) * u.deg, + theta=np.arange(5, 15) * u.deg, + r=1 * u.kpc) + + s_slc = s[2:8:2] + + assert_allclose_quantity(s_slc.phi, [2, 4, 6] * u.deg) + assert_allclose_quantity(s_slc.theta, [7, 9, 11] * u.deg) + assert_allclose_quantity(s_slc.r, [1, 1, 1] * u.kpc) + + def test_getitem_scalar(self): + + s = PhysicsSphericalRepresentation(phi=1 * u.deg, + theta=2 * u.deg, + r=3 * u.kpc) + + with pytest.raises(TypeError): + s_slc = s[0] + + +class TestCartesianRepresentation(object): + + def test_empty_init(self): + with pytest.raises(TypeError) as exc: + s = CartesianRepresentation() + + def test_init_quantity(self): + + s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc) + + assert s1.x.unit is u.kpc + assert s1.y.unit is u.kpc + assert s1.z.unit is u.kpc + + assert_allclose(s1.x.value, 1) + assert_allclose(s1.y.value, 2) + assert_allclose(s1.z.value, 3) + + def test_init_singleunit(self): + + s1 = CartesianRepresentation(x=1 * u.kpc, y=2* u.kpc, z=3* u.kpc) + + assert s1.x.unit is u.kpc + assert s1.y.unit is u.kpc + assert s1.z.unit is u.kpc + + assert_allclose(s1.x.value, 1) + assert_allclose(s1.y.value, 2) + assert_allclose(s1.z.value, 3) + + def test_init_array(self): + + s1 = CartesianRepresentation(x=[1, 2, 3] * u.pc, + y=[2, 3, 4] * u.Mpc, + z=[3, 4, 5] * u.kpc) + + assert s1.x.unit is u.pc + assert s1.y.unit is u.Mpc + assert s1.z.unit is u.kpc + + assert_allclose(s1.x.value, [1, 2, 3]) + assert_allclose(s1.y.value, [2, 3, 4]) + assert_allclose(s1.z.value, [3, 4, 5]) + + def test_init_one_array(self): + + s1 = CartesianRepresentation(x=[1, 2, 3] * u.pc) + + assert s1.x.unit is u.pc + assert s1.y.unit is u.pc + assert s1.z.unit is u.pc + + assert_allclose(s1.x.value, 1) + assert_allclose(s1.y.value, 2) + assert_allclose(s1.z.value, 3) + + def test_init_one_array_size_fail(self): + + with pytest.raises(ValueError) as exc: + s1 = CartesianRepresentation(x=[1, 2, 3, 4] * u.pc) + + # exception text differs on Python 2 and Python 3 + if hasattr(exc.value, 'args'): + assert exc.value.args[0].startswith("too many values to unpack") + else: + #py 2.6 doesn't have `args` + assert exc.value == 'too many values to unpack' + + def test_init_one_array_yz_fail(self): + + with pytest.raises(ValueError) as exc: + s1 = CartesianRepresentation(x=[1, 2, 3, 4] * u.pc, y=[1, 2] * u.pc) + + assert exc.value.args[0] == "x, y, and z are required to instantiate CartesianRepresentation" + + def test_init_array_nocopy(self): + + x = [8, 9, 10] * u.pc + y = [5, 6, 7] * u.Mpc + z = [2, 3, 4] * u.kpc + + s1 = CartesianRepresentation(x=x, y=y, z=z, copy=False) + + x[:] = [1, 2, 3] * u.kpc + y[:] = [9, 9, 8] * u.kpc + z[:] = [1, 2, 1] * u.kpc + + assert_allclose_quantity(x, s1.x) + assert_allclose_quantity(y, s1.y) + assert_allclose_quantity(z, s1.z) + + def test_reprobj(self): + + s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc) + + s2 = CartesianRepresentation.from_representation(s1) + + assert s2.x == 1 * u.kpc + assert s2.y == 2 * u.kpc + assert s2.z == 3 * u.kpc + + def test_broadcasting(self): + + s1 = CartesianRepresentation(x=[1, 2] * u.kpc, y=[3, 4] * u.kpc, z=5 * u.kpc) + + assert s1.x.unit == u.kpc + assert s1.y.unit == u.kpc + assert s1.z.unit == u.kpc + + assert_allclose(s1.x.value, [1, 2]) + assert_allclose(s1.y.value, [3, 4]) + assert_allclose(s1.z.value, [5, 5]) + + def test_broadcasting_mismatch(self): + + with pytest.raises(ValueError) as exc: + s1 = CartesianRepresentation(x=[1, 2] * u.kpc, y=[3, 4] * u.kpc, z=[5, 6, 7] * u.kpc) + assert exc.value.args[0] == "Input parameters x, y, and z cannot be broadcast" + + # We deliberately disallow anything that is not directly a Quantity in + # these low-level classes, so we now check that initializing from a + # string or mixed unit lists raises a TypeError. + + def test_mixed_units(self): + + with pytest.raises(TypeError) as exc: + s1 = CartesianRepresentation(x=[1 * u.kpc, 2 * u.Mpc], + y=[3 * u.kpc, 4 * u.pc], + z=[5. * u.cm, 6 * u.m]) + assert exc.value.args[0] == "x should be a Quantity" + + def test_readonly(self): + + s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc) + + with pytest.raises(AttributeError): + s1.x = 1. * u.kpc + + with pytest.raises(AttributeError): + s1.y = 1. * u.kpc + + with pytest.raises(AttributeError): + s1.z = 1. * u.kpc + + def test_xyz(self): + + s1 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc) + + assert isinstance(s1.xyz, u.Quantity) + assert s1.xyz.unit is u.kpc + + assert_allclose(s1.xyz.value, [1, 2, 3]) + + def test_unit_mismatch(self): + + q_len = u.Quantity([1], u.km) + q_nonlen = u.Quantity([1], u.kg) + + with pytest.raises(u.UnitsError) as exc: + s1 = CartesianRepresentation(x=q_nonlen, y=q_len, z=q_len) + assert exc.value.args[0] == "x, y, and z should have matching physical types" + + with pytest.raises(u.UnitsError) as exc: + s1 = CartesianRepresentation(x=q_len, y=q_nonlen, z=q_len) + assert exc.value.args[0] == "x, y, and z should have matching physical types" + + with pytest.raises(u.UnitsError) as exc: + s1 = CartesianRepresentation(x=q_len, y=q_len, z=q_nonlen) + assert exc.value.args[0] == "x, y, and z should have matching physical types" + + def test_unit_non_length(self): + + s1 = CartesianRepresentation(x=1 * u.kg, y=2 * u.kg, z=3 * u.kg) + + s2 = CartesianRepresentation(x=1 * u.km / u.s, y=2 * u.km / u.s, z=3 * u.km / u.s) + + banana = u.def_unit('banana') + s3 = CartesianRepresentation(x=1 * banana, y=2 * banana, z=3 * banana) + + def test_getitem(self): + + s = CartesianRepresentation(x=np.arange(10) * u.m, + y=-np.arange(10) * u.m, + z=3 * u.km) + + s_slc = s[2:8:2] + + assert_allclose_quantity(s_slc.x, [2, 4, 6] * u.m) + assert_allclose_quantity(s_slc.y, [-2, -4, -6] * u.m) + assert_allclose_quantity(s_slc.z, [3, 3, 3] * u.km) + + def test_getitem_scalar(self): + + s = CartesianRepresentation(x=1 * u.m, + y=-2 * u.m, + z=3 * u.km) + + with pytest.raises(TypeError): + s_slc = s[0] + + +class TestCylindricalRepresentation(object): + + def test_empty_init(self): + with pytest.raises(TypeError) as exc: + s = CylindricalRepresentation() + + def test_init_quantity(self): + + s1 = CylindricalRepresentation(rho=1 * u.kpc, phi=2 * u.deg, z=3 * u.kpc) + + assert s1.rho.unit is u.kpc + assert s1.phi.unit is u.deg + assert s1.z.unit is u.kpc + + assert_allclose(s1.rho.value, 1) + assert_allclose(s1.phi.value, 2) + assert_allclose(s1.z.value, 3) + + def test_init_array(self): + + s1 = CylindricalRepresentation(rho=[1, 2, 3] * u.pc, + phi=[2, 3, 4] * u.deg, + z=[3, 4, 5] * u.kpc) + + assert s1.rho.unit is u.pc + assert s1.phi.unit is u.deg + assert s1.z.unit is u.kpc + + assert_allclose(s1.rho.value, [1, 2, 3]) + assert_allclose(s1.phi.value, [2, 3, 4]) + assert_allclose(s1.z.value, [3, 4, 5]) + + def test_init_array_nocopy(self): + + rho = [8, 9, 10] * u.pc + phi = [5, 6, 7] * u.deg + z = [2, 3, 4] * u.kpc + + s1 = CylindricalRepresentation(rho=rho, phi=phi, z=z, copy=False) + + rho[:] = [9, 2, 3] * u.kpc + phi[:] = [1, 2, 3] * u.arcmin + z[:] = [-2, 3, 8] * u.kpc + + assert_allclose_quantity(rho, s1.rho) + assert_allclose_quantity(phi, s1.phi) + assert_allclose_quantity(z, s1.z) + + def test_reprobj(self): + + s1 = CylindricalRepresentation(rho=1 * u.kpc, phi=2 * u.deg, z=3 * u.kpc) + + s2 = CylindricalRepresentation.from_representation(s1) + + assert s2.rho == 1 * u.kpc + assert s2.phi == 2 * u.deg + assert s2.z == 3 * u.kpc + + def test_broadcasting(self): + + s1 = CylindricalRepresentation(rho=[1, 2] * u.kpc, phi=[3, 4] * u.deg, z=5 * u.kpc) + + assert s1.rho.unit == u.kpc + assert s1.phi.unit == u.deg + assert s1.z.unit == u.kpc + + assert_allclose(s1.rho.value, [1, 2]) + assert_allclose(s1.phi.value, [3, 4]) + assert_allclose(s1.z.value, [5, 5]) + + def test_broadcasting_mismatch(self): + + with pytest.raises(ValueError) as exc: + s1 = CylindricalRepresentation(rho=[1, 2] * u.kpc, phi=[3, 4] * u.deg, z=[5, 6, 7] * u.kpc) + assert exc.value.args[0] == "Input parameters rho, phi, and z cannot be broadcast" + + # We deliberately disallow anything that is not directly a Quantity in + # these low-level classes, so we now check that initializing from a + # string or mixed unit lists raises a TypeError. + + def test_mixed_units(self): + + with pytest.raises(TypeError) as exc: + s1 = CylindricalRepresentation(rho=[1 * u.kpc, 2 * u.Mpc], + phi=[3 * u.deg, 4 * u.arcmin], + z=[5. * u.cm, 6 * u.m]) + assert exc.value.args[0] == "phi should be a Quantity or Angle" + + def test_readonly(self): + + s1 = CylindricalRepresentation(rho=1 * u.kpc, + phi=20 * u.deg, + z=3 * u.kpc) + + with pytest.raises(AttributeError): + s1.rho = 1. * u.kpc + + with pytest.raises(AttributeError): + s1.phi = 20 * u.deg + + with pytest.raises(AttributeError): + s1.z = 1. * u.kpc + + def unit_mismatch(self): + + q_len = u.Quantity([1], u.kpc) + q_nonlen = u.Quantity([1], u.kg) + + with pytest.raises(u.UnitsError) as exc: + s1 = CylindricalRepresentation(rho=q_nonlen, phi=10 * u.deg, z=q_len) + assert exc.value.args[0] == "rho and z should have matching physical types" + + with pytest.raises(u.UnitsError) as exc: + s1 = CylindricalRepresentation(rho=q_len, phi=10 * u.deg, z=q_nonlen) + assert exc.value.args[0] == "rho and z should have matching physical types" + + def test_getitem(self): + + s = CylindricalRepresentation(rho=np.arange(10) * u.pc, + phi=-np.arange(10) * u.deg, + z=1 * u.kpc) + + s_slc = s[2:8:2] + + assert_allclose_quantity(s_slc.rho, [2, 4, 6] * u.pc) + assert_allclose_quantity(s_slc.phi, [-2, -4, -6] * u.deg) + assert_allclose_quantity(s_slc.z, [1, 1, 1] * u.kpc) + + def test_getitem_scalar(self): + + s = CylindricalRepresentation(rho=1 * u.pc, + phi=-2 * u.deg, + z=3 * u.kpc) + + with pytest.raises(TypeError): + s_slc = s[0] + + +def test_cartesian_spherical_roundtrip(): + + s1 = CartesianRepresentation(x=[1, 2000.] * u.kpc, + y=[3000., 4.] * u.pc, + z=[5., 6000.] * u.pc) + + s2 = SphericalRepresentation.from_representation(s1) + + s3 = CartesianRepresentation.from_representation(s2) + + s4 = SphericalRepresentation.from_representation(s3) + + assert_allclose_quantity(s1.x, s3.x) + assert_allclose_quantity(s1.y, s3.y) + assert_allclose_quantity(s1.z, s3.z) + + assert_allclose_quantity(s2.lon, s4.lon) + assert_allclose_quantity(s2.lat, s4.lat) + assert_allclose_quantity(s2.distance, s4.distance) + + +def test_cartesian_physics_spherical_roundtrip(): + + s1 = CartesianRepresentation(x=[1, 2000.] * u.kpc, + y=[3000., 4.] * u.pc, + z=[5., 6000.] * u.pc) + + s2 = PhysicsSphericalRepresentation.from_representation(s1) + + s3 = CartesianRepresentation.from_representation(s2) + + s4 = PhysicsSphericalRepresentation.from_representation(s3) + + assert_allclose_quantity(s1.x, s3.x) + assert_allclose_quantity(s1.y, s3.y) + assert_allclose_quantity(s1.z, s3.z) + + assert_allclose_quantity(s2.phi, s4.phi) + assert_allclose_quantity(s2.theta, s4.theta) + assert_allclose_quantity(s2.r, s4.r) + + +def test_spherical_physics_spherical_roundtrip(): + + s1 = SphericalRepresentation(lon=3 * u.deg, lat=4 * u.deg, distance=3 * u.kpc) + + s2 = PhysicsSphericalRepresentation.from_representation(s1) + + s3 = SphericalRepresentation.from_representation(s2) + + s4 = PhysicsSphericalRepresentation.from_representation(s3) + + assert_allclose_quantity(s1.lon, s3.lon) + assert_allclose_quantity(s1.lat, s3.lat) + assert_allclose_quantity(s1.distance, s3.distance) + + assert_allclose_quantity(s2.phi, s4.phi) + assert_allclose_quantity(s2.theta, s4.theta) + assert_allclose_quantity(s2.r, s4.r) + + assert_allclose_quantity(s1.lon, s4.phi) + assert_allclose_quantity(s1.lat, 90. * u.deg - s4.theta) + assert_allclose_quantity(s1.distance, s4.r) + + +def test_cartesian_cylindrical_roundtrip(): + + s1 = CartesianRepresentation(x=np.array([1., 2000.]) * u.kpc, + y=np.array([3000., 4.]) * u.pc, + z=np.array([5., 600.]) * u.cm) + + s2 = CylindricalRepresentation.from_representation(s1) + + s3 = CartesianRepresentation.from_representation(s2) + + s4 = CylindricalRepresentation.from_representation(s3) + + assert_allclose_quantity(s1.x, s3.x) + assert_allclose_quantity(s1.y, s3.y) + assert_allclose_quantity(s1.z, s3.z) + + assert_allclose_quantity(s2.rho, s4.rho) + assert_allclose_quantity(s2.phi, s4.phi) + assert_allclose_quantity(s2.z, s4.z) + + +def test_unit_spherical_roundtrip(): + + s1 = UnitSphericalRepresentation(lon=[10., 30.] * u.deg, + lat=[5., 6.] * u.arcmin) + + s2 = CartesianRepresentation.from_representation(s1) + + s3 = SphericalRepresentation.from_representation(s2) + + s4 = UnitSphericalRepresentation.from_representation(s3) + + assert_allclose_quantity(s1.lon, s4.lon) + assert_allclose_quantity(s1.lat, s4.lat) + + +def test_representation_repr(): + r1 = SphericalRepresentation(lon=1 * u.deg, lat=2.5 * u.deg, distance=1 * u.kpc) + assert repr(r1) == '' + + r2 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc) + assert repr(r2) == '' + + r3 = CartesianRepresentation(x=[1, 2, 3] * u.kpc, y=4 * u.kpc, z=[9, 10, 11] * u.kpc) + assert repr(r3) == ('') + + +def test_representation_str(): + r1 = SphericalRepresentation(lon=1 * u.deg, lat=2.5 * u.deg, distance=1 * u.kpc) + assert str(r1) == '(1.0 deg, 2.5 deg, 1.0 kpc)' + + r2 = CartesianRepresentation(x=1 * u.kpc, y=2 * u.kpc, z=3 * u.kpc) + assert str(r2) == '(1.0, 2.0, 3.0) kpc' + + r3 = CartesianRepresentation(x=[1, 2, 3] * u.kpc, y=4 * u.kpc, z=[9, 10, 11] * u.kpc) + assert str(r3) == '[(1.0, 4.0, 9.0) (2.0, 4.0, 10.0) (3.0, 4.0, 11.0)] kpc' + + +def test_subclass_representation(): + from ...utils import OrderedDict + from ..builtin_frames import ICRS + + class Longitude180(Longitude): + def __new__(cls, angle, unit=None, wrap_angle=180 * u.deg, **kwargs): + self = super(Longitude180, cls).__new__(cls, angle, unit=unit, + wrap_angle=wrap_angle, **kwargs) + return self + + class SphericalWrap180Representation(SphericalRepresentation): + attr_classes = OrderedDict([('lon', Longitude180), + ('lat', Latitude), + ('distance', u.Quantity)]) + recommended_units = {'lon': u.deg, 'lat': u.deg} + + class ICRSWrap180(ICRS): + frame_specific_representation_info = ICRS._frame_specific_representation_info.copy() + frame_specific_representation_info['sphericalwrap180'] = \ + frame_specific_representation_info['spherical'] + default_representation = SphericalWrap180Representation + + c = ICRSWrap180(ra=-1 * u.deg, dec=-2 * u.deg, distance=1 * u.m) + assert c.ra.value == -1 + assert c.ra.unit is u.deg + assert c.dec.value == -2 + assert c.dec.unit is u.deg diff --git a/astropy/coordinates/tests/test_sky_coord.py b/astropy/coordinates/tests/test_sky_coord.py new file mode 100644 index 0000000..aa5a1e8 --- /dev/null +++ b/astropy/coordinates/tests/test_sky_coord.py @@ -0,0 +1,710 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +Tests for the SkyCoord class. Note that there are also SkyCoord tests in +test_api_ape5.py +""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import functools + +import numpy as np +from numpy import testing as npt + +from ... import units as u +from ...tests.helper import pytest +from ..representation import REPRESENTATION_CLASSES +from ...coordinates import (ICRS, FK4, FK5, Galactic, SkyCoord, Angle, + SphericalRepresentation, CartesianRepresentation) +from ...coordinates import Latitude, Longitude +from ...time import Time + +RA = 1.0 * u.deg +DEC = 2.0 * u.deg +C_ICRS = ICRS(RA, DEC) +C_FK5 = C_ICRS.transform_to(FK5) +J2001 = Time('J2001', scale='utc') + +allclose = functools.partial(np.allclose, rtol=0.0, atol=1e-8) + + +def test_transform_to(): + for frame in (FK5, FK5(equinox=Time('J1975.0')), + FK4, FK4(equinox=Time('J1975.0')), + SkyCoord(RA, DEC, 'fk4', equinox='J1980')): + c_frame = C_ICRS.transform_to(frame) + s_icrs = SkyCoord(RA, DEC, frame='icrs') + s_frame = s_icrs.transform_to(frame) + assert allclose(c_frame.ra, s_frame.ra) + assert allclose(c_frame.dec, s_frame.dec) + assert allclose(c_frame.distance, s_frame.distance) + + +# set up for parametrized test +rt_sets = [] +rt_frames = [ICRS, FK4, FK5, Galactic] +for rt_frame0 in rt_frames: + for rt_frame1 in rt_frames: + for equinox0 in (None, 'J1975.0'): + for obstime0 in (None, 'J1980.0'): + for equinox1 in (None, 'J1975.0'): + for obstime1 in (None, 'J1980.0'): + rt_sets.append([rt_frame0, rt_frame1, + equinox0, equinox1, + obstime0, obstime1]) +rt_args = 'frame0,frame1,equinox0,equinox1,obstime0,obstime1' + + +@pytest.mark.parametrize(rt_args, rt_sets) +def test_round_tripping(frame0, frame1, equinox0, equinox1, obstime0, obstime1): + """ + Test round tripping out and back using transform_to in every combination. + """ + attrs0 = {'equinox': equinox0, 'obstime': obstime0} + attrs1 = {'equinox': equinox1, 'obstime': obstime1} + + # Remove None values + attrs0 = dict((k, v) for k, v in attrs0.items() if v is not None) + attrs1 = dict((k, v) for k, v in attrs1.items() if v is not None) + + # Go out and back + sc = SkyCoord(frame0, RA, DEC, **attrs0) + + # Keep only frame attributes for frame1 + attrs1 = dict((attr, val) for attr, val in attrs1.items() + if attr in frame1.get_frame_attr_names()) + sc2 = sc.transform_to(frame1(**attrs1)) + + # When coming back only keep frame0 attributes for transform_to + attrs0 = dict((attr, val) for attr, val in attrs0.items() + if attr in frame0.get_frame_attr_names()) + # also, if any are None, fill in with defaults + for attrnm in frame0.get_frame_attr_names(): + if attrs0.get(attrnm, None) is None: + if attrnm == 'obstime' and frame0.get_frame_attr_names()[attrnm] is None: + if 'equinox' in attrs0: + attrs0[attrnm] = attrs0['equinox'] + else: + attrs0[attrnm] = frame0.get_frame_attr_names()[attrnm] + sc_rt = sc2.transform_to(frame0(**attrs0)) + + if frame0 is Galactic: + assert allclose(sc.l, sc_rt.l) + assert allclose(sc.b, sc_rt.b) + else: + assert allclose(sc.ra, sc_rt.ra) + assert allclose(sc.dec, sc_rt.dec) + if equinox0: + assert Time(sc.equinox) == Time(sc_rt.equinox) + if obstime0: + assert Time(sc.obstime) == Time(sc_rt.obstime) + + +def test_coord_init_string(): + """ + Spherical or Cartesian represenation input coordinates. + """ + sc = SkyCoord('1d 2d') + assert allclose(sc.ra, 1 * u.deg) + assert allclose(sc.dec, 2 * u.deg) + + sc = SkyCoord('1d', '2d') + assert allclose(sc.ra, 1 * u.deg) + assert allclose(sc.dec, 2 * u.deg) + + sc = SkyCoord('1°2′3″', '2°3′4″') + assert allclose(sc.ra, Angle('1°2′3″')) + assert allclose(sc.dec, Angle('2°3′4″')) + + sc = SkyCoord('1°2′3″ 2°3′4″') + assert allclose(sc.ra, Angle('1°2′3″')) + assert allclose(sc.dec, Angle('2°3′4″')) + + with pytest.raises(ValueError) as err: + SkyCoord('1d 2d 3d') + assert "Cannot parse longitude and latitude" in str(err) + + sc1 = SkyCoord('8 00 00 +5 00 00.0', unit=(u.hour, u.deg), frame='icrs') + assert isinstance(sc1, SkyCoord) + assert allclose(sc1.ra, Angle(120 * u.deg)) + assert allclose(sc1.dec, Angle(5 * u.deg)) + + with pytest.raises(ValueError) as err: + SkyCoord('8 00 -5 00 00.0', unit=(u.hour, u.deg), frame='icrs') + assert 'coordinates have 5 values but spherical representation only accepts 3' in str(err) + + +def test_coord_init_unit(): + """ + Test variations of the unit keyword. + """ + for unit in ('deg', 'deg,deg', ' deg , deg ', u.deg, (u.deg, u.deg), + np.array(['deg', 'deg'])): + sc = SkyCoord(1, 2, unit=unit) + assert allclose(sc.ra, Angle(1 * u.deg)) + assert allclose(sc.dec, Angle(2 * u.deg)) + + for unit in ('hourangle', 'hourangle,hourangle', ' hourangle , hourangle ', + u.hourangle, [u.hourangle, u.hourangle]): + sc = SkyCoord(1, 2, unit=unit) + assert allclose(sc.ra, Angle(15 * u.deg)) + assert allclose(sc.dec, Angle(30 * u.deg)) + + for unit in ('hourangle,deg', (u.hourangle, u.deg)): + sc = SkyCoord(1, 2, unit=unit) + assert allclose(sc.ra, Angle(15 * u.deg)) + assert allclose(sc.dec, Angle(2 * u.deg)) + + for unit in ('deg,deg,deg,deg', [u.deg, u.deg, u.deg, u.deg], None): + with pytest.raises(ValueError) as err: + SkyCoord(1, 2, unit=unit) + assert 'Unit keyword must have one to three unit values' in str(err) + + for unit in ('m', (u.m, u.deg), ''): + with pytest.raises(u.UnitsError) as err: + SkyCoord(1, 2, unit=unit) + + +def test_coord_init_list(): + """ + Spherical or Cartesian representation input coordinates. + """ + sc = SkyCoord([('1d', '2d'), + (1 * u.deg, 2 * u.deg), + '1d 2d', + ('1°', '2°'), + '1° 2°'], unit='deg') + assert allclose(sc.ra, Angle('1d')) + assert allclose(sc.dec, Angle('2d')) + + with pytest.raises(ValueError) as err: + SkyCoord(['1d 2d 3d']) + assert "Cannot parse longitude and latitude" in str(err) + + with pytest.raises(ValueError) as err: + SkyCoord([('1d', '2d', '3d')]) + assert "Cannot parse longitude and latitude" in str(err) + + sc = SkyCoord([1 * u.deg, 1 * u.deg], [2 * u.deg, 2 * u.deg]) + assert allclose(sc.ra, Angle('1d')) + assert allclose(sc.dec, Angle('2d')) + + with pytest.raises(ValueError) as err: + SkyCoord([1 * u.deg, 2 * u.deg]) # this list is taken as RA w/ missing dec + assert "One or more elements of input sequence does not have a length" in str(err) + + +def test_coord_init_array(): + """ + Input in the form of a list array or numpy array + """ + for a in (['1 2', '3 4'], + [['1', '2'], ['3', '4']], + [[1, 2], [3, 4]]): + sc = SkyCoord(a, unit='deg') + assert allclose(sc.ra - [1, 3] * u.deg, 0) + assert allclose(sc.dec - [2, 4] * u.deg, 0) + + sc = SkyCoord(np.array(a), unit='deg') + assert allclose(sc.ra - [1, 3] * u.deg, 0) + assert allclose(sc.dec - [2, 4] * u.deg, 0) + + +def test_coord_init_representation(): + """ + Spherical or Cartesian represenation input coordinates. + """ + coord = SphericalRepresentation(lon=8 * u.deg, lat=5 * u.deg, distance=1 * u.kpc) + sc = SkyCoord(coord, 'icrs') + assert allclose(sc.ra, coord.lon) + assert allclose(sc.dec, coord.lat) + assert allclose(sc.distance, coord.distance) + + with pytest.raises(ValueError) as err: + SkyCoord(coord, 'icrs', ra='1d') + assert "conflicts with keyword argument 'ra'" in str(err) + + coord = CartesianRepresentation(1 * u.one, 2 * u.one, 3 * u.one) + sc = SkyCoord(coord, 'icrs') + sc_cart = sc.represent_as(CartesianRepresentation) + assert allclose(sc_cart.x, 1.0) + assert allclose(sc_cart.y, 2.0) + assert allclose(sc_cart.z, 3.0) + + +def test_frame_init(): + """ + Different ways of providing the frame. + """ + sc = SkyCoord(RA, DEC, frame='icrs') + assert sc.frame.name == 'icrs' + + sc = SkyCoord(RA, DEC, frame=ICRS) + assert sc.frame.name == 'icrs' + + sc = SkyCoord(RA, DEC, 'icrs') + assert sc.frame.name == 'icrs' + + sc = SkyCoord(RA, DEC, ICRS) + assert sc.frame.name == 'icrs' + + sc = SkyCoord('icrs', RA, DEC) + assert sc.frame.name == 'icrs' + + sc = SkyCoord(ICRS, RA, DEC) + assert sc.frame.name == 'icrs' + + sc = SkyCoord(sc) + assert sc.frame.name == 'icrs' + + sc = SkyCoord(C_ICRS) + assert sc.frame.name == 'icrs' + + SkyCoord(C_ICRS, frame='icrs') + assert sc.frame.name == 'icrs' + + with pytest.raises(ValueError) as err: + SkyCoord(C_ICRS, frame='galactic') + assert 'Cannot override frame=' in str(err) + + +def test_attr_inheritance(): + """ + When initializing from an existing coord the representation attrs like + equinox should be inherited to the SkyCoord. If there is a conflict + then raise an exception. + """ + sc = SkyCoord('icrs', 1, 2, unit='deg', equinox='J1999', obstime='J2001') + sc2 = SkyCoord(sc) + assert sc2.equinox == sc.equinox + assert sc2.obstime == sc.obstime + assert allclose(sc2.ra, sc.ra) + assert allclose(sc2.dec, sc.dec) + assert allclose(sc2.distance, sc.distance) + + sc2 = SkyCoord(sc.frame) # Doesn't have equinox there so we get FK4 defaults + assert sc2.equinox != sc.equinox + assert sc2.obstime != sc.obstime + assert allclose(sc2.ra, sc.ra) + assert allclose(sc2.dec, sc.dec) + assert allclose(sc2.distance, sc.distance) + + sc = SkyCoord('fk4', 1, 2, unit='deg', equinox='J1999', obstime='J2001') + sc2 = SkyCoord(sc) + assert sc2.equinox == sc.equinox + assert sc2.obstime == sc.obstime + assert allclose(sc2.ra, sc.ra) + assert allclose(sc2.dec, sc.dec) + assert allclose(sc2.distance, sc.distance) + + sc2 = SkyCoord(sc.frame) # sc.frame has equinox, obstime + assert sc2.equinox == sc.equinox + assert sc2.obstime == sc.obstime + assert allclose(sc2.ra, sc.ra) + assert allclose(sc2.dec, sc.dec) + assert allclose(sc2.distance, sc.distance) + + +def test_attr_conflicts(): + """ + Check conflicts resolution between coordinate attributes and init kwargs. + """ + sc = SkyCoord('icrs', 1, 2, unit='deg', equinox='J1999', obstime='J2001') + + # OK if attrs both specified but with identical values + SkyCoord(sc, equinox='J1999', obstime='J2001') + + # OK because sc.frame doesn't have obstime + SkyCoord(sc.frame, equinox='J1999', obstime='J2100') + + # Not OK if attrs don't match + with pytest.raises(ValueError) as err: + SkyCoord(sc, equinox='J1999', obstime='J2002') + assert "Coordinate attribute 'obstime'=" in str(err) + + # Same game but with fk4 which has equinox and obstime frame attrs + sc = SkyCoord('fk4', 1, 2, unit='deg', equinox='J1999', obstime='J2001') + + # OK if attrs both specified but with identical values + SkyCoord(sc, equinox='J1999', obstime='J2001') + + # Not OK if SkyCoord attrs don't match + with pytest.raises(ValueError) as err: + SkyCoord(sc, equinox='J1999', obstime='J2002') + assert "Coordinate attribute 'obstime'=" in str(err) + + # Not OK because sc.frame has different attrs + with pytest.raises(ValueError) as err: + SkyCoord(sc.frame, equinox='J1999', obstime='J2002') + assert "Coordinate attribute 'obstime'=" in str(err) + + +def test_frame_attr_getattr(): + """ + When accessing frame attributes like equinox, the value should come + from self.frame when that object has the relevant attribute, otherwise + from self. + """ + sc = SkyCoord('icrs', 1, 2, unit='deg', equinox='J1999', obstime='J2001') + assert sc.equinox == 'J1999' # Just the raw value (not validated) + assert sc.obstime == 'J2001' + + sc = SkyCoord('fk4', 1, 2, unit='deg', equinox='J1999', obstime='J2001') + assert sc.equinox == Time('J1999') # Coming from the self.frame object + assert sc.obstime == Time('J2001') + + sc = SkyCoord('fk4', 1, 2, unit='deg', equinox='J1999') + assert sc.equinox == Time('J1999') + assert sc.obstime == Time('J1999') + + +def test_to_string(): + """ + Basic testing of converting SkyCoord to strings. This just tests + for a single input coordinate and and 1-element list. It does not + test the underlying `Angle.to_string` method itself. + """ + coord = '1h2m3s 1d2m3s' + for wrap in (lambda x: x, lambda x: [x]): + sc = SkyCoord(wrap(coord)) + assert sc.to_string() == wrap('15.5125 1.03417') + assert sc.to_string('dms') == wrap('15d30m45s 1d02m03s') + assert sc.to_string('hmsdms') == wrap('01h02m03s +01d02m03s') + with_kwargs = sc.to_string('hmsdms', precision=3, pad=True, alwayssign=True) + assert with_kwargs == wrap('+01h02m03.000s +01d02m03.000s') + + +def test_seps(): + sc1 = SkyCoord('icrs', 0 * u.deg, 1 * u.deg) + sc2 = SkyCoord('icrs', 0 * u.deg, 2 * u.deg) + + sep = sc1.separation(sc2) + + assert (sep - 1 * u.deg)/u.deg < 1e-10 + + with pytest.raises(ValueError): + sc1.separation_3d(sc2) + + sc3 = SkyCoord('icrs', 1 * u.deg, 1 * u.deg, distance=1 * u.kpc) + sc4 = SkyCoord('icrs', 1 * u.deg, 1 * u.deg, distance=2 * u.kpc) + sep3d = sc3.separation_3d(sc4) + + assert sep3d == 1 * u.kpc + + +def test_repr(): + # Repr tests must use exact floating point vals because Python 2.6 + # outputs values like 0.1 as 0.1000000000001. No workaround found. + sc1 = SkyCoord('icrs', 0 * u.deg, 1 * u.deg) + sc2 = SkyCoord('icrs', 1 * u.deg, 1 * u.deg, distance=1 * u.kpc) + + assert repr(sc1) == '' + assert repr(sc2) == '' + + sc3 = SkyCoord('icrs', 0.25 * u.deg, [1, 2.5] * u.deg) + assert repr(sc3) == ('') + + sc_default = SkyCoord(0 * u.deg, 1 * u.deg) + assert repr(sc_default) == '' + + +def test_ops(): + """ + Tests miscellaneous operations like `len` + """ + sc = SkyCoord('icrs', 0 * u.deg, 1 * u.deg) + sc_arr = SkyCoord('icrs', 0 * u.deg, [1, 2] * u.deg) + sc_empty = SkyCoord('icrs', [] * u.deg, [] * u.deg) + + assert sc.isscalar + assert not sc_arr.isscalar + assert not sc_empty.isscalar + + with pytest.raises(TypeError): + len(sc) + assert len(sc_arr) == 2 + assert len(sc_empty) == 0 + + assert bool(sc) + assert bool(sc_arr) + assert not bool(sc_empty) + + assert sc_arr[0].isscalar + assert len(sc_arr[:1]) == 1 + with pytest.raises(TypeError): + assert sc[0:] # scalar, so it shouldn't be indexable + + +def test_none_transform(): + """ + Ensure that transforming from a SkyCoord with no frame provided works like + ICRS + """ + sc = SkyCoord(0 * u.deg, 1 * u.deg) + sc_arr = SkyCoord(0 * u.deg, [1, 2] * u.deg) + + sc2 = sc.transform_to(ICRS) + assert sc.ra == sc2.ra and sc.dec == sc2.dec + + sc5 = sc.transform_to('fk5') + assert sc5.ra == sc2.transform_to('fk5').ra + + sc_arr2 = sc_arr.transform_to(ICRS) + sc_arr5 = sc_arr.transform_to('fk5') + npt.assert_array_equal(sc_arr5.ra, sc_arr2.transform_to('fk5').ra) + + +def test_position_angle(): + c1 = SkyCoord(0*u.deg, 0*u.deg) + + c2 = SkyCoord(1*u.deg, 0*u.deg) + npt.assert_allclose(c1.position_angle(c2) - 90.0 * u.deg, 0) + + c3 = SkyCoord(1*u.deg, 0.1*u.deg) + assert c1.position_angle(c3) < 90*u.deg + + c4 = SkyCoord(0*u.deg, 1*u.deg) + npt.assert_allclose(c1.position_angle(c4), 0) + + carr1 = SkyCoord(0*u.deg, [0, 1, 2]*u.deg) + carr2 = SkyCoord([-1, -2, -3]*u.deg, [0.1, 1.1, 2.1]*u.deg) + + res = carr1.position_angle(carr2) + assert res.shape == (3,) + assert np.all(res < 360*u.degree) + assert np.all(res > 270*u.degree) + + cicrs = SkyCoord(0*u.deg, 0*u.deg, frame='icrs') + cfk5 = SkyCoord(1*u.deg, 0*u.deg, frame='fk5') + # because of the frame transform, it's just a *bit* more than 90 degrees + assert cicrs.position_angle(cfk5) > 90.0 * u.deg + assert cicrs.position_angle(cfk5) < 91.0 * u.deg + + +def test_table_to_coord(): + """ + Checks "end-to-end" use of `Table` with `SkyCoord` - the `Quantity` + initializer is the intermediary that translate the table columns into + something coordinates understands. + + (Regression test for #1762 ) + """ + from ...table import Table, Column + + t = Table() + t.add_column(Column(data=[1, 2, 3], name='ra', unit=u.deg)) + t.add_column(Column(data=[4, 5, 6], name='dec', unit=u.deg)) + + c = SkyCoord(t['ra'], t['dec']) + + assert allclose(c.ra.to(u.deg), [1, 2, 3]) + assert allclose(c.dec.to(u.deg), [4, 5, 6]) + + +def assert_quantities_allclose(coord, q1s, attrs): + """ + Compare two tuples of quantities. This assumes that the values in q1 are of + order(1) and uses atol=1e-13, rtol=0. It also asserts that the units of the + two quantities are the *same*, in order to check that the representation + output has the expected units. + """ + q2s = [getattr(coord, attr) for attr in attrs] + assert len(q1s) == len(q2s) + for q1, q2 in zip(q1s, q2s): + assert q1.shape == q2.shape + dq = q1 - q2 + assert np.allclose(dq.value, 0.0, rtol=0, atol=1e-13) + + +# Sets of inputs corresponding to Galactic frame +base_unit_attr_sets = [ + ('spherical', u.karcsec, u.karcsec, u.kpc, Latitude, 'l', 'b', 'distance'), + ('unitspherical', u.karcsec, u.karcsec, None, Latitude, 'l', 'b', None), + ('physicsspherical', u.karcsec, u.karcsec, u.kpc, Angle, 'phi', 'theta', 'r'), + ('cartesian', u.km, u.km, u.km, u.Quantity, 'w', 'u', 'v'), + ('cylindrical', u.km, u.karcsec, u.km, Angle, 'rho', 'phi', 'z') +] + +units_attr_sets = [] +for base_unit_attr_set in base_unit_attr_sets: + repr_name = base_unit_attr_set[0] + for representation in (repr_name, REPRESENTATION_CLASSES[repr_name]): + for c1, c2, c3 in ((1, 2, 3), ([1], [2], [3])): + for arrayify in True, False: + if arrayify: + c1 = np.array(c1) + c2 = np.array(c2) + c3 = np.array(c3) + units_attr_sets.append(base_unit_attr_set + (representation, c1, c2, c3)) +units_attr_args = 'repr_name,unit1,unit2,unit3,cls2,attr1,attr2,attr3,representation,c1,c2,c3' + + +@pytest.mark.parametrize(units_attr_args, + (x for x in units_attr_sets if x[0] != 'unitspherical')) +def test_skycoord_three_components(repr_name, unit1, unit2, unit3, cls2, attr1, attr2, attr3, + representation, c1, c2, c3): + """ + Tests positional inputs using components (COMP1, COMP2, COMP3) + and various representations. Use weird units and Galactic frame. + """ + sc = SkyCoord(Galactic, c1, c2, c3, unit=(unit1, unit2, unit3), + representation=representation) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2, c3*unit3), + (attr1, attr2, attr3)) + + sc = SkyCoord(1000*c1*u.Unit(unit1/1000), cls2(c2, unit=unit2), + 1000*c3*u.Unit(unit3/1000), Galactic, + unit=(unit1, unit2, unit3), representation=representation) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2, c3*unit3), + (attr1, attr2, attr3)) + + kwargs = {attr3: c3} + sc = SkyCoord(Galactic, c1, c2, unit=(unit1, unit2, unit3), + representation=representation, **kwargs) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2, c3*unit3), + (attr1, attr2, attr3)) + + kwargs = {attr1: c1, attr2: c2, attr3: c3} + sc = SkyCoord(Galactic, unit=(unit1, unit2, unit3), + representation=representation, **kwargs) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2, c3*unit3), + (attr1, attr2, attr3)) + + +@pytest.mark.parametrize(units_attr_args, + (x for x in units_attr_sets + if x[0] in ('spherical', 'unitspherical'))) +def test_skycoord_spherical_two_components(repr_name, unit1, unit2, unit3, cls2, + attr1, attr2, attr3, representation, c1, c2, c3): + """ + Tests positional inputs using components (COMP1, COMP2) for spherical + representations. Use weird units and Galactic frame. + """ + sc = SkyCoord(Galactic, c1, c2, unit=(unit1, unit2), + representation=representation) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2), + (attr1, attr2)) + + sc = SkyCoord(1000*c1*u.Unit(unit1/1000), cls2(c2, unit=unit2), + Galactic, + unit=(unit1, unit2, unit3), representation=representation) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2), + (attr1, attr2)) + + kwargs = {attr1: c1, attr2: c2} + sc = SkyCoord(Galactic, unit=(unit1, unit2), + representation=representation, **kwargs) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2), + (attr1, attr2)) + + +@pytest.mark.parametrize(units_attr_args, + (x for x in units_attr_sets if x[0] != 'unitspherical')) +def test_galactic_three_components(repr_name, unit1, unit2, unit3, cls2, attr1, attr2, attr3, + representation, c1, c2, c3): + """ + Tests positional inputs using components (COMP1, COMP2, COMP3) + and various representations. Use weird units and Galactic frame. + """ + sc = Galactic(1000*c1*u.Unit(unit1/1000), cls2(c2, unit=unit2), + 1000*c3*u.Unit(unit3/1000), representation=representation) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2, c3*unit3), + (attr1, attr2, attr3)) + + kwargs = {attr3: c3*unit3} + sc = Galactic(c1*unit1, c2*unit2, + representation=representation, **kwargs) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2, c3*unit3), + (attr1, attr2, attr3)) + + kwargs = {attr1: c1*unit1, attr2: c2*unit2, attr3: c3*unit3} + sc = Galactic(representation=representation, **kwargs) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2, c3*unit3), + (attr1, attr2, attr3)) + + +@pytest.mark.parametrize(units_attr_args, + (x for x in units_attr_sets + if x[0] in ('spherical', 'unitspherical'))) +def test_galactic_spherical_two_components(repr_name, unit1, unit2, unit3, cls2, + attr1, attr2, attr3, representation, c1, c2, c3): + """ + Tests positional inputs using components (COMP1, COMP2) for spherical + representations. Use weird units and Galactic frame. + """ + + sc = Galactic(1000*c1*u.Unit(unit1/1000), cls2(c2, unit=unit2), representation=representation) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2), (attr1, attr2)) + + sc = Galactic(c1*unit1, c2*unit2, representation=representation) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2), (attr1, attr2)) + + kwargs = {attr1: c1*unit1, attr2: c2*unit2} + sc = Galactic(representation=representation, **kwargs) + assert_quantities_allclose(sc, (c1*unit1, c2*unit2), (attr1, attr2)) + + +@pytest.mark.parametrize('repr_name,unit1,unit2,unit3,cls2,attr1,attr2,attr3', + (x for x in base_unit_attr_sets if x[0] != 'unitspherical')) +def test_skycoord_coordinate_input(repr_name, unit1, unit2, unit3, cls2, attr1, attr2, attr3): + c1, c2, c3 = 1, 2, 3 + sc = SkyCoord([(c1, c2, c3)], unit=(unit1, unit2, unit3), representation=repr_name, + frame='galactic') + assert_quantities_allclose(sc, ([c1]*unit1, [c2]*unit2, [c3]*unit3), (attr1, attr2, attr3)) + + c1, c2, c3 = 1*unit1, 2*unit2, 3*unit3 + sc = SkyCoord([(c1, c2, c3)], representation=repr_name, frame='galactic') + assert_quantities_allclose(sc, ([1]*unit1, [2]*unit2, [3]*unit3), (attr1, attr2, attr3)) + + +def test_skycoord_string_coordinate_input(): + sc = SkyCoord('01 02 03 +02 03 04', unit='deg', representation='unitspherical') + assert_quantities_allclose(sc, (Angle('01:02:03', unit='deg'), + Angle('02:03:04', unit='deg')), + ('ra', 'dec')) + sc = SkyCoord(['01 02 03 +02 03 04'], unit='deg', representation='unitspherical') + assert_quantities_allclose(sc, (Angle(['01:02:03'], unit='deg'), + Angle(['02:03:04'], unit='deg')), + ('ra', 'dec')) + + +def test_units(): + sc = SkyCoord(1, 2, 3, unit='m', representation='cartesian') # All get meters + assert sc.x.unit is u.m + assert sc.y.unit is u.m + assert sc.z.unit is u.m + + sc = SkyCoord(1, 2*u.km, 3, unit='m', representation='cartesian') # All get u.m + assert sc.x.unit is u.m + assert sc.y.unit is u.m + assert sc.z.unit is u.m + + sc = SkyCoord(1, 2, 3, unit=u.m, representation='cartesian') # All get u.m + assert sc.x.unit is u.m + assert sc.y.unit is u.m + assert sc.z.unit is u.m + + sc = SkyCoord(1, 2, 3, unit='m, km, pc', representation='cartesian') + assert_quantities_allclose(sc, (1*u.m, 2*u.km, 3*u.pc), ('x', 'y', 'z')) + + with pytest.raises(u.UnitsError) as err: + SkyCoord(1, 2, 3, unit=(u.m, u.m), representation='cartesian') + assert 'should have matching physical types' in str(err) + + SkyCoord(1, 2, 3, unit=(u.m, u.km, u.pc), representation='cartesian') + assert_quantities_allclose(sc, (1*u.m, 2*u.km, 3*u.pc), ('x', 'y', 'z')) + + +@pytest.mark.xfail +def test_units_known_fail(): + # should fail but doesn't => corner case oddity + with pytest.raises(u.UnitsError): + SkyCoord(1, 2, 3, unit=u.deg, representation='spherical') + +def test_nodata_failure(): + with pytest.raises(ValueError): + SkyCoord() diff --git a/astropy/coordinates/tests/test_transformations.py b/astropy/coordinates/tests/test_transformations.py new file mode 100644 index 0000000..d25eee1 --- /dev/null +++ b/astropy/coordinates/tests/test_transformations.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from math import fabs + +import numpy as np +from numpy import testing as npt + +from ... import units as u +from ..distances import Distance +from .. import transformations as t +from ..builtin_frames import ICRS, FK5, FK4, FK4NoETerms, Galactic +from .. import representation as r +from ..baseframe import frame_transform_graph +from ...tests.helper import pytest + + + +#Coordinates just for these tests. +class TestCoo1(ICRS): + pass + + +class TestCoo2(ICRS): + pass + + +def test_transform_classes(): + """ + Tests the class-based/OO syntax for creating transforms + """ + + tfun = lambda c, f: f.__class__(ra=c.ra, dec=c.dec) + trans1 = t.FunctionTransform(tfun, TestCoo1, TestCoo2, + register_graph=frame_transform_graph) + + c1 = TestCoo1(ra=1*u.radian, dec=0.5*u.radian) + c2 = c1.transform_to(TestCoo2) + npt.assert_allclose(c2.ra.radian, 1) + npt.assert_allclose(c2.dec.radian, 0.5) + + + def matfunc(coo, fr): + return [[1, 0, 0], + [0, coo.ra.degree, 0], + [0, 0, 1]] + trans2 = t.DynamicMatrixTransform(matfunc, TestCoo1, TestCoo2) + trans2.register(frame_transform_graph) + + c3 = TestCoo1(ra=1*u.deg, dec=2*u.deg) + c4 = c3.transform_to(TestCoo2) + + npt.assert_allclose(c4.ra.degree, 1) + npt.assert_allclose(c4.ra.degree, 1) + + # be sure to unregister the second one - no need for trans1 because it + # already got unregistered when trans2 was created. + trans2.unregister(frame_transform_graph) + + +def test_transform_decos(): + """ + Tests the decorator syntax for creating transforms + """ + c1 = TestCoo1(ra=1*u.deg, dec=2*u.deg) + + @frame_transform_graph.transform(t.FunctionTransform, TestCoo1, TestCoo2) + def trans(coo1, f): + return TestCoo2(ra=coo1.ra, dec=coo1.dec * 2) + + c2 = c1.transform_to(TestCoo2) + npt.assert_allclose(c2.ra.degree, 1) + npt.assert_allclose(c2.dec.degree, 4) + + c3 = TestCoo1(r.CartesianRepresentation(x=1*u.pc, y=1*u.pc, z=2*u.pc)) + + @frame_transform_graph.transform(t.StaticMatrixTransform, TestCoo1, TestCoo2) + def matrix(): + return [[2, 0, 0], + [0, 1, 0], + [0, 0, 1]] + + c4 = c3.transform_to(TestCoo2) + + npt.assert_allclose(c4.cartesian.x.value, 2) + npt.assert_allclose(c4.cartesian.y.value, 1) + npt.assert_allclose(c4.cartesian.z.value, 2) + + +def test_shortest_path(): + class FakeTransform(object): + def __init__(self, pri): + self.priority = pri + + g = t.TransformGraph() + + #cheating by adding graph elements directly that are not classes - the + #graphing algorithm still works fine with integers - it just isn't a valid + #TransformGraph + + #the graph looks is a down-going diamond graph with the lower-right slightly + #heavier and a cycle from the bottom to the top + #also, a pair of nodes isolated from 1 + + g._graph[1][2] = FakeTransform(1) + g._graph[1][3] = FakeTransform(1) + g._graph[2][4] = FakeTransform(1) + g._graph[3][4] = FakeTransform(2) + g._graph[4][1] = FakeTransform(5) + + g._graph[5][6] = FakeTransform(1) + + path, d = g.find_shortest_path(1, 2) + assert path == [1, 2] + assert d == 1 + path, d = g.find_shortest_path(1, 3) + assert path == [1, 3] + assert d == 1 + path, d = g.find_shortest_path(1, 4) + print('Cached paths:', g._shortestpaths) + assert path == [1, 2, 4] + assert d == 2 + + #unreachable + path, d = g.find_shortest_path(1, 5) + assert path is None + assert d == float('inf') + + path, d = g.find_shortest_path(5, 6) + assert path == [5, 6] + assert d == 1 + + +def test_sphere_cart(): + """ + Tests the spherical <-> cartesian transform functions + """ + from numpy.testing.utils import assert_allclose + from ...utils import NumpyRNGContext + from ..distances import spherical_to_cartesian, cartesian_to_spherical + + + x, y, z = spherical_to_cartesian(1, 0, 0) + npt.assert_allclose(x, 1) + npt.assert_allclose(y, 0) + npt.assert_allclose(z, 0) + + x, y, z = spherical_to_cartesian(0, 1, 1) + npt.assert_allclose(x, 0) + npt.assert_allclose(y, 0) + npt.assert_allclose(z, 0) + + x, y, z = spherical_to_cartesian(5, 0, np.arcsin(4. / 5.)) + npt.assert_allclose(x, 3) + npt.assert_allclose(y, 4) + npt.assert_allclose(z, 0) + + r, lat, lon = cartesian_to_spherical(0, 1, 0) + npt.assert_allclose(r, 1) + npt.assert_allclose(lat, 0) + npt.assert_allclose(lon, np.pi / 2) + + #test round-tripping + with NumpyRNGContext(13579): + x, y, z = np.random.randn(3, 5) + + r, lat, lon = cartesian_to_spherical(x, y, z) + x2, y2, z2 = spherical_to_cartesian(r, lat, lon) + + assert_allclose(x, x2) + assert_allclose(y, y2) + assert_allclose(z, z2) + + +m31_sys = [ICRS, FK5, FK4, Galactic] +m31_coo = [(10.6847929, 41.2690650), (10.6847929, 41.2690650), (10.0004738, 40.9952444), (121.1744050, -21.5729360)] +m31_dist = Distance(770, u.kpc) +convert_precision = 1 * u.arcsec +roundtrip_precision = 1e-4 * u.degree +dist_precision = 1e-9 * u.kpc + +m31_params =[] +for i in range(len(m31_sys)): + for j in range(len(m31_sys)): + if i < j: + m31_params.append((m31_sys[i], m31_sys[j], m31_coo[i], m31_coo[j])) + +@pytest.mark.parametrize(('fromsys', 'tosys', 'fromcoo', 'tocoo'), m31_params) +def test_m31_coord_transforms(fromsys, tosys, fromcoo, tocoo): + """ + This tests a variety of coordinate conversions for the Chandra point-source + catalog location of M31 from NED. + """ + + from ...time import Time + + coo1 = fromsys(ra=fromcoo[0]*u.deg, dec=fromcoo[1]*u.deg, distance=m31_dist) + coo2 = coo1.transform_to(tosys) + if tosys is FK4: + coo2_prec = coo2.transform_to(FK4(equinox=Time('B1950', scale='utc'))) + assert (coo2_prec.spherical.lon - tocoo[0]*u.deg) < convert_precision # <1 arcsec + assert (coo2_prec.spherical.lat - tocoo[1]*u.deg) < convert_precision + else: + assert (coo2.spherical.lon - tocoo[0]*u.deg) < convert_precision # <1 arcsec + assert (coo2.spherical.lat - tocoo[1]*u.deg) < convert_precision + assert coo1.distance.unit == u.kpc + assert coo2.distance.unit == u.kpc + assert m31_dist.unit == u.kpc + assert (coo2.distance - m31_dist) < dist_precision + + #check round-tripping + coo1_2 = coo2.transform_to(fromsys) + assert (coo1_2.spherical.lon - fromcoo[0]*u.deg) < roundtrip_precision + assert (coo1_2.spherical.lat - fromcoo[1]*u.deg) < roundtrip_precision + assert (coo1_2.distance - m31_dist) < dist_precision + + +def test_precession(): + """ + Ensures that FK4 and FK5 coordinates precess their equinoxes + """ + from ...time import Time + + j2000 = Time('J2000', scale='utc') + b1950 = Time('B1950', scale='utc') + j1975 = Time('J1975', scale='utc') + b1975 = Time('B1975', scale='utc') + + fk4 = FK4(ra=1*u.radian, dec=0.5*u.radian) + assert fk4.equinox.byear == b1950.byear + fk4_2 = fk4.transform_to(FK4(equinox=b1975)) + assert fk4_2.equinox.byear == b1975.byear + + fk5 = FK5(ra=1*u.radian, dec=0.5*u.radian) + assert fk5.equinox.jyear == j2000.jyear + fk5_2 = fk5.transform_to(FK4(equinox=j1975)) + assert fk5_2.equinox.jyear == j1975.jyear + + +def test_transform_path_pri(): + """ + This checks that the transformation path prioritization works by + making sure the ICRS -> Gal transformation always goes through FK5 + and not FK4. + """ + frame_transform_graph.invalidate_cache() + tpath, td = frame_transform_graph.find_shortest_path(ICRS, Galactic) + assert tpath == [ICRS, FK5, Galactic] + assert td == 2 + + #but direct from FK4 to Galactic should still be possible + tpath, td = frame_transform_graph.find_shortest_path(FK4, Galactic) + assert tpath == [FK4, FK4NoETerms, Galactic] + assert td == 2 + + +def test_obstime(): + """ + Checks to make sure observation time is + accounted for at least in FK4 <-> ICRS transformations + """ + from ...time import Time + + b1950 = Time('B1950', scale='utc') + j1975 = Time('J1975', scale='utc') + + fk4_50 = FK4(ra=1*u.deg, dec=2*u.deg, obstime=b1950) + fk4_75 = FK4(ra=1*u.deg, dec=2*u.deg, obstime=j1975) + + icrs_50 = fk4_50.transform_to(ICRS) + icrs_75 = fk4_75.transform_to(ICRS) + + # now check that the resulting coordinates are *different* - they should be, + # because the obstime is different + assert icrs_50.ra.degree != icrs_75.ra.degree + assert icrs_50.dec.degree != icrs_75.dec.degree diff --git a/astropy/coordinates/transformations.py b/astropy/coordinates/transformations.py new file mode 100644 index 0000000..9c8eca6 --- /dev/null +++ b/astropy/coordinates/transformations.py @@ -0,0 +1,924 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This module contains a general framework for defining graphs of transformations +between coordinates, suitable for either spatial coordinates or more generalized +coordinate systems. + +The fundamental idea is that each class is a node in the transformation graph, +and transitions from one node to another are defined as functions (or methods) +wrapped in transformation objects. + +This module also includes more specific transformation classes for +celestial/spatial coordinate frames, generally focused around matrix-style +transformations that are typically how the algorithms are defined. +""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import heapq +import inspect +import subprocess + +from abc import ABCMeta, abstractmethod +from collections import defaultdict + +import numpy as np + +from ..utils.compat import ignored +from ..extern import six + + +__all__ = ['TransformGraph', 'CoordinateTransform', 'FunctionTransform', + 'StaticMatrixTransform', 'DynamicMatrixTransform', 'CompositeTransform'] + + +class TransformGraph(object): + """ + A graph representing the paths between coordinate frames. + """ + + def __init__(self): + self._graph = defaultdict(dict) + self.invalidate_cache() # generates cache entries + + @property + def _cached_names(self): + if self._cached_names_dct is None: + self._cached_names_dct = dct = {} + for c in self.frame_set: + nm = getattr(c, 'name', None) + if nm is not None: + dct[nm] = c + + return self._cached_names_dct + + @property + def frame_set(self): + """ + A `set` of all the frame classes present in this `TransformGraph`. + """ + if self._cached_frame_set is None: + self._cached_frame_set = frm_set = set() + for a in self._graph: + frm_set.add(a) + for b in self._graph[a]: + frm_set.add(b) + + return self._cached_frame_set.copy() + + def invalidate_cache(self): + """ + Invalidates the cache that stores optimizations for traversing the + transform graph. This is called automatically when transforms + are added or removed, but will need to be called manually if + weights on transforms are modified inplace. + """ + self._cached_names_dct = None + self._cached_frame_set = None + self._shortestpaths = {} + self._composite_cache = {} + + def add_transform(self, fromsys, tosys, transform): + """ + Add a new coordinate transformation to the graph. + + Parameters + ---------- + fromsys : class + The coordinate frame class to start from. + tosys : class + The coordinate frame class to transform into. + transform : CoordinateTransform or similar callable + The transformation object. Typically a `CoordinateTransform` object, + although it may be some other callable that is called with the same + signature. + + Raises + ------ + TypeError + If ``fromsys`` or ``tosys`` are not classes or ``transform`` is + not callable. + """ + + if not inspect.isclass(fromsys): + raise TypeError('fromsys must be a class') + if not inspect.isclass(tosys): + raise TypeError('tosys must be a class') + if not six.callable(transform): + raise TypeError('transform must be callable') + + self._graph[fromsys][tosys] = transform + self.invalidate_cache() + + def remove_transform(self, fromsys, tosys, transform): + """ + Removes a coordinate transform from the graph. + + Parameters + ---------- + fromsys : class or `None` + The coordinate frame *class* to start from. If `None`, + ``transform`` will be searched for and removed (``tosys`` must + also be `None`). + tosys : class or `None` + The coordinate frame *class* to transform into. If `None`, + ``transform`` will be searched for and removed (``fromsys`` must + also be `None`). + transform : callable or `None` + The transformation object to be removed or `None`. If `None` + and ``tosys`` and ``fromsys`` are supplied, there will be no + check to ensure the correct object is removed. + """ + if fromsys is None or tosys is None: + if not (tosys is None and fromsys is None): + raise ValueError('fromsys and tosys must both be None if either are') + if transform is None: + raise ValueError('cannot give all Nones to remove_transform') + + # search for the requested transform by brute force and remove it + for a in self._graph: + agraph = self._graph[a] + for b in agraph: + if b is transform: + del agraph[b] + break + else: + raise ValueError('Could not find transform {0} in the ' + 'graph'.format(transform)) + + else: + if transform is None: + self._graph[fromsys].pop(tosys, None) + else: + curr = self._graph[fromsys].get(tosys, None) + if curr is transform: + self._graph[fromsys].pop(tosys) + else: + raise ValueError('Current transform from {0} to {1} is not ' + '{2}'.format(fromsys, tosys, transform)) + self.invalidate_cache() + + def find_shortest_path(self, fromsys, tosys): + """ + Computes the shortest distance along the transform graph from + one system to another. + + Parameters + ---------- + fromsys : class + The coordinate frame class to start from. + tosys : class + The coordinate frame class to transform into. + + Returns + ------- + path : list of classes or `None` + The path from ``fromsys`` to ``tosys`` as an in-order sequence + of classes. This list includes *both* ``fromsys`` and + ``tosys``. Is `None` if there is no possible path. + distance : number + The total distance/priority from ``fromsys`` to ``tosys``. If + priorities are not set this is the number of transforms + needed. Is ``inf`` if there is no possible path. + """ + + inf = float('inf') + + # special-case the 0 or 1-path + if tosys is fromsys: + if tosys not in self._graph[fromsys]: + # Means there's no transform necessary to go from it to itself. + return [tosys], 0 + if tosys in self._graph[fromsys]: + # this will also catch the case where tosys is fromsys, but has + # a defined transform. + t = self._graph[fromsys][tosys] + return [fromsys, tosys], float(t.priority if hasattr(t, 'priority') else 1) + + #otherwise, need to construct the path: + + if fromsys in self._shortestpaths: + # already have a cached result + fpaths = self._shortestpaths[fromsys] + if tosys in fpaths: + return fpaths[tosys] + else: + return None, inf + + # use Dijkstra's algorithm to find shortest path in all other cases + + nodes = [] + # first make the list of nodes + for a in self._graph: + if a not in nodes: + nodes.append(a) + for b in self._graph[a]: + if b not in nodes: + nodes.append(b) + + if fromsys not in nodes or tosys not in nodes: + # fromsys or tosys are isolated or not registered, so there's + # certainly no way to get from one to the other + return None, inf + + edgeweights = {} + # construct another graph that is a dict of dicts of priorities + # (used as edge weights in Dijkstra's algorithm) + for a in self._graph: + edgeweights[a] = aew = {} + agraph = self._graph[a] + for b in agraph: + aew[b] = float(agraph[b].priority if hasattr(agraph[b], 'priority') else 1) + + # entries in q are [distance, count, nodeobj, pathlist] + # count is needed because in py 3.x, tie-breaking fails on the nodes. + # this way, insertion order is preserved if the weights are the same + q = [[inf, i, n, []] for i, n in enumerate(nodes) if n is not fromsys] + q.insert(0, [0, -1, fromsys, []]) + + # this dict will store the distance to node from ``fromsys`` and the path + result = {} + + # definitely starts as a valid heap because of the insert line; from the + # node to itself is always the shortest distance + while len(q) > 0: + d, orderi, n, path = heapq.heappop(q) + + if d == inf: + # everything left is unreachable from fromsys, just copy them to + # the results and jump out of the loop + result[n] = (None, d) + for d, orderi, n, path in q: + result[n] = (None, d) + break + else: + result[n] = (path, d) + path.append(n) + if n not in edgeweights: + # this is a system that can be transformed to, but not from. + continue + for n2 in edgeweights[n]: + if n2 not in result: # already visited + # find where n2 is in the heap + for i in range(len(q)): + if q[i][2] == n2: + break + else: + raise ValueError('n2 not in heap - this should be impossible!') + + newd = d + edgeweights[n][n2] + if newd < q[i][0]: + q[i][0] = newd + q[i][3] = list(path) + heapq.heapify(q) + + # cache for later use + self._shortestpaths[fromsys] = result + return result[tosys] + + def get_transform(self, fromsys, tosys): + """ + Generates and returns the `CompositeTransform` for a transformation + between two coordinate systems. + + Parameters + ---------- + fromsys : class + The coordinate frame class to start from. + tosys : class + The coordinate frame class to transform into. + + Returns + ------- + trans : `CompositeTransform` or `None` + If there is a path from ``fromsys`` to ``tosys``, this is a + transform object for that path. If no path could be found, this is + `None`. + + Notes + ----- + This function always returns a `CompositeTransform`, because + `CompositeTransform` is slightly more adaptable in the way it can be + called than other transform classes. Specifically, it takes care of + inetermediate steps of transformations in a way that is consistent with + 1-hop transformations. + + """ + if not inspect.isclass(fromsys): + raise TypeError('fromsys is not a class') + if not inspect.isclass(fromsys): + raise TypeError('tosys is not a class') + + path, distance = self.find_shortest_path(fromsys, tosys) + + if path is None: + return None + + transforms = [] + currsys = fromsys + for p in path[1:]: # first element is fromsys so we skip it + transforms.append(self._graph[currsys][p]) + currsys = p + + fttuple = (fromsys, tosys) + if fttuple not in self._composite_cache: + comptrans = CompositeTransform(transforms, fromsys, tosys, + register_graph=False) + self._composite_cache[fttuple] = comptrans + return self._composite_cache[fttuple] + + def lookup_name(self, name): + """ + Tries to locate the coordinate class with the provided alias. + + Parameters + ---------- + name : str + The alias to look up. + + Returns + ------- + coordcls + The coordinate class corresponding to the ``name`` or `None` if + no such class exists. + """ + + return self._cached_names.get(name, None) + + def get_names(self): + """ + Returns all available transform names. They will all be + valid arguments to `lookup_name`. + + Returns + ------- + nms : list + The aliases for coordinate systems. + """ + return list(six.iterkeys(self._cached_names)) + + def to_dot_graph(self, priorities=True, addnodes=[], savefn=None, + savelayout='plain', saveformat=None): + """ + Converts this transform graph to the graphviz_ DOT format. + + Optionally saves it (requires `graphviz`_ be installed and on your path). + + .. _graphviz: http://www.graphviz.org/ + + Parameters + ---------- + priorities : bool + If `True`, show the priority values for each transform. Otherwise, + the will not be included in the graph. + addnodes : sequence of str + Additional coordinate systems to add (this can include systems + already in the transform graph, but they will only appear once). + savefn : `None` or str + The file name to save this graph to or `None` to not save + to a file. + savelayout : str + The graphviz program to use to layout the graph (see + graphviz_ for details) or 'plain' to just save the DOT graph + content. Ignored if ``savefn`` is `None`. + saveformat : str + The graphviz output format. (e.g. the ``-Txxx`` option for + the command line program - see graphviz docs for details). + Ignored if ``savefn`` is `None`. + + Returns + ------- + dotgraph : str + A string with the DOT format graph. + """ + + nodes = [] + # find the node names + for a in self._graph: + if a not in nodes: + nodes.append(a) + for b in self._graph[a]: + if b not in nodes: + nodes.append(b) + for node in addnodes: + if node not in nodes: + nodes.append(node) + nodenames = [] + invclsaliases = dict([(v, k) for k, v in six.iteritems(self._cached_names)]) + for n in nodes: + if n in invclsaliases: + nodenames.append('{0} [shape=oval label="{0}\\n`{1}`"]'.format(n.__name__, invclsaliases[n])) + else: + nodenames.append(n.__name__ + '[ shape=oval ]') + + edgenames = [] + # Now the edges + for a in self._graph: + agraph = self._graph[a] + for b in agraph: + pri = agraph[b].priority if hasattr(agraph[b], 'priority') else 1 + edgenames.append((a.__name__, b.__name__, pri)) + + # generate simple dot format graph + lines = ['digraph AstropyCoordinateTransformGraph {'] + lines.append('; '.join(nodenames) + ';') + for enm1, enm2, weights in edgenames: + labelstr = '[ label = "{0}" ]'.format(weights) if priorities else '' + lines.append('{0} -> {1}{2};'.format(enm1, enm2, labelstr)) + lines.append('') + lines.append('overlap=false') + lines.append('}') + dotgraph = '\n'.join(lines) + + if savefn is not None: + if savelayout == 'plain': + with open(savefn, 'w') as f: + f.write(dotgraph) + else: + args = [savelayout] + if saveformat is not None: + args.append('-T' + saveformat) + proc = subprocess.Popen(args, stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = proc.communicate(dotgraph) + if proc.returncode != 0: + raise IOError('problem running graphviz: \n' + stderr) + + with open(savefn, 'w') as f: + f.write(stdout) + + return dotgraph + + def to_networkx_graph(self): + """ + Converts this transform graph into a networkx graph. + + .. note:: + You must have the `networkx `_ + package installed for this to work. + + Returns + ------- + nxgraph : `networkx.Graph `_ + This `TransformGraph` as a `networkx.Graph`_. + """ + import networkx as nx + + nxgraph = nx.Graph() + + # first make the nodes + for a in self._graph: + if a not in nxgraph: + nxgraph.add_node(a) + for b in self._graph[a]: + if b not in nxgraph: + nxgraph.add_node(b) + + # Now the edges + for a in self._graph: + agraph = self._graph[a] + for b in agraph: + pri = agraph[b].priority if hasattr(agraph[b], 'priority') else 1 + nxgraph.add_edge(a, b, weight=pri) + + return nxgraph + + def transform(self, transcls, fromsys, tosys, priority=1): + """ + A function decorator for defining transformations. + + .. note:: + If decorating a static method of a class, ``@staticmethod`` + should be added *above* this decorator. + + Parameters + ---------- + transcls : class + The class of the transformation object to create. + fromsys : class + The coordinate frame class to start from. + tosys : class + The coordinate frame class to transform into. + priority : number + The priority if this transform when finding the shortest + coordinate tranform path - large numbers are lower priorities. + + Returns + ------- + deco : function + A function that can be called on another function as a decorator + (see example). + + Notes + ----- + This decorator assumes the first argument of the ``transcls`` + initializer accepts a callable, and that the second and third + are ``fromsys`` and ``tosys``. If this is not true, you should just + initialize the class manually and use `add_transform` instead of + using this decorator. + + Examples + -------- + + :: + + graph = TransformGraph() + + class Frame1(BaseCoordinateFrame): + ... + + class Frame2(BaseCoordinateFrame): + ... + + @graph.transform(FunctionTransform, Frame1, Frame2) + def f1_to_f2(f1_obj): + ... do something with f1_obj ... + return f2_obj + + + """ + def deco(func): + # this doesn't do anything directly with the trasnform because + # ``register_graph=self`` stores it in the transform graph + # automatically + transcls(func, fromsys, tosys, priority=priority, + register_graph=self) + return func + return deco + + +#<--------------------Define the builtin transform classes---------------------> + +@six.add_metaclass(ABCMeta) +class CoordinateTransform(object): + """ + An object that transforms a coordinate from one system to another. + Subclasses must implement `__call__` with the provided signature. + They should also call this superclass's ``__init__`` in their + ``__init__``. + + Parameters + ---------- + fromsys : class + The coordinate frame class to start from. + tosys : class + The coordinate frame class to transform into. + priority : number + The priority if this transform when finding the shortest + coordinate tranform path - large numbers are lower priorities. + register_graph : `TransformGraph` or `None` + A graph to register this transformation with on creation, or + `None` to leave it unregistered. + """ + + def __init__(self, fromsys, tosys, priority=1, register_graph=None): + if not inspect.isclass(fromsys): + raise TypeError('fromsys must be a class') + if not inspect.isclass(tosys): + raise TypeError('tosys must be a class') + + self.fromsys = fromsys + self.tosys = tosys + self.priority = float(priority) + + if register_graph: + # this will do the type-checking when it adds to the graph + self.register(register_graph) + else: + if not inspect.isclass(fromsys) or not inspect.isclass(tosys): + raise TypeError('fromsys and tosys must be classes') + + self.overlapping_frame_attr_names = overlap = [] + if (hasattr(fromsys, 'get_frame_attr_names') and + hasattr(tosys, 'get_frame_attr_names')): + #the if statement is there so that non-frame things might be usable + #if it makes sense + for from_nm in fromsys.get_frame_attr_names(): + if from_nm in tosys.get_frame_attr_names(): + overlap.append(from_nm) + + def register(self, graph): + """ + Add this transformation to the requested Transformation graph, + replacing anything already connecting these two coordinates. + + Parameters + ---------- + graph : a TransformGraph object + The graph to register this transformation with. + """ + graph.add_transform(self.fromsys, self.tosys, self) + + def unregister(self, graph): + """ + Remove this transformation from the requested transformation + graph. + + Parameters + ---------- + graph : a TransformGraph object + The graph to unregister this transformation from. + + Raises + ------ + ValueError + If this is not currently in the transform graph. + """ + graph.remove_transform(self.fromsys, self.tosys, self) + + @abstractmethod + def __call__(self, fromcoord, toframe): + """ + Does the actual coordinate transformation from the ``fromsys`` class to + the ``tosys`` class. + + Parameters + ---------- + fromcoord : fromsys object + An object of class matching ``fromsys`` that is to be transformed. + toframe : object + An object that has the attributes necessary to fully specify the + frame. That is, it must have attributes with names that match the + keys of the dictionary that ``tosys.get_frame_attr_names()`` + returns. Typically this is of class ``tosys``, but it *might* be + some other class as long as it has the appropriate attributes. + + Returns + ------- + tocoord : tosys object + The new coordinate after the transform has been applied. + """ + + +class FunctionTransform(CoordinateTransform): + """ + A coordinate transformation defined by a function that accepts a + coordinate object and returns the transformed coordinate object. + + Parameters + ---------- + func : callable + The transformation function. Should have a call signature + ``func(formcoord, toframe)``. Note that, unlike + `CoordinateTransform.__call__`, ``toframe`` is assumed to be of type + ``tosys`` for this function. + fromsys : class + The coordinate frame class to start from. + tosys : class + The coordinate frame class to transform into. + priority : number + The priority if this transform when finding the shortest + coordinate tranform path - large numbers are lower priorities. + register_graph : `TransformGraph` or `None` + A graph to register this transformation with on creation, or + `None` to leave it unregistered. + + Raises + ------ + TypeError + If ``func`` is not callable. + ValueError + If ``func`` cannot accept two arguments. + + + """ + def __init__(self, func, fromsys, tosys, priority=1, register_graph=None): + from inspect import getargspec + + if not six.callable(func): + raise TypeError('func must be callable') + + with ignored(TypeError): + # TypeError raised for things getargspec can't process. We'll trust + # the transform designer knows what they're doing, though, because + # sometimes this is fine.. + argspec = getargspec(func) + if (len(argspec[0]) - len(argspec[3]) != 2) and not argspec[1]: + raise ValueError('provided function does not accept two arguments') + + self.func = func + + super(FunctionTransform, self).__init__(fromsys, tosys, + priority=priority, register_graph=register_graph) + + def __call__(self, fromcoord, toframe): + res = self.func(fromcoord, toframe) + if not isinstance(res, self.tosys): + raise TypeError('the transformation function yielded {0} but ' + 'should have been of type {1}'.format(res, self.tosys)) + return res + + +class StaticMatrixTransform(CoordinateTransform): + """ + A coordinate transformation defined as a 3 x 3 cartesian + transformation matrix. + + This is distinct from DynamicMatrixTransform in that this kind of matrix is + independent of frame attributes. That is, it depends *only* on the class of + the frame. + + Parameters + ---------- + matrix : array-like or callable + A 3 x 3 matrix for transforming 3-vectors. In most cases will + be unitary (although this is not strictly required). If a callable, + will be called *with no arguments* to get the matrix. + fromsys : class + The coordinate frame class to start from. + tosys : class + The coordinate frame class to transform into. + priority : number + The priority if this transform when finding the shortest + coordinate tranform path - large numbers are lower priorities. + register_graph : `TransformGraph` or `None` + A graph to register this transformation with on creation, or + `None` to leave it unregistered. + + Raises + ------ + ValueError + If the matrix is not 3 x 3 + + """ + def __init__(self, matrix, fromsys, tosys, priority=1, register_graph=None): + if six.callable(matrix): + matrix = matrix() + self.matrix = np.array(matrix) + + if self.matrix.shape != (3, 3): + raise ValueError('Provided matrix is not 3 x 3') + + super(StaticMatrixTransform, self).__init__(fromsys, tosys, + priority=priority, register_graph=register_graph) + + def __call__(self, fromcoord, toframe): + from .representation import CartesianRepresentation, \ + UnitSphericalRepresentation + + xyz = fromcoord.represent_as(CartesianRepresentation).xyz + v = xyz.reshape((3, xyz.size // 3)) + v2 = np.dot(np.asarray(self.matrix), v) + subshape = xyz.shape[1:] + x = v2[0].reshape(subshape) + y = v2[1].reshape(subshape) + z = v2[2].reshape(subshape) + + newrep = CartesianRepresentation(x, y, z) + if fromcoord.data.__class__ == UnitSphericalRepresentation: + #need to special-case this because otherwise the new class will + #think it has a valid distance + newrep = newrep.represent_as(UnitSphericalRepresentation) + + frameattrs = dict([(attrnm, getattr(fromcoord, attrnm)) + for attrnm in self.overlapping_frame_attr_names]) + + return toframe.realize_frame(newrep, **frameattrs) + + +class DynamicMatrixTransform(CoordinateTransform): + """ + A coordinate transformation specified as a function that yields a + 3 x 3 cartesian transformation matrix. + + This is similar to, but distinct from StaticMatrixTransform, in that the + matrix for this class might depend on frame attributes. + + Parameters + ---------- + matrix_func : callable + A callable that has the signature ``matrix_func(fromcoord, toframe)`` and + returns a 3 x 3 matrix that converts ``fromcoord`` in a cartesian + representation to the new coordinate system. + fromsys : class + The coordinate frame class to start from. + tosys : class + The coordinate frame class to transform into. + priority : number + The priority if this transform when finding the shortest + coordinate tranform path - large numbers are lower priorities. + register_graph : `TransformGraph` or `None` + A graph to register this transformation with on creation, or + `None` to leave it unregistered. + + Raises + ------ + TypeError + If ``matrix_func`` is not callable + + """ + def __init__(self, matrix_func, fromsys, tosys, priority=1, + register_graph=None): + if not six.callable(matrix_func): + raise TypeError('matrix_func is not callable') + self.matrix_func = matrix_func + + super(DynamicMatrixTransform, self).__init__(fromsys, tosys, + priority=priority, register_graph=register_graph) + + def __call__(self, fromcoord, toframe): + from .representation import CartesianRepresentation, \ + UnitSphericalRepresentation + + xyz = fromcoord.represent_as(CartesianRepresentation).xyz + v = xyz.reshape((3, xyz.size // 3)) + v2 = np.dot(np.asarray(self.matrix_func(fromcoord, toframe)), v) + subshape = xyz.shape[1:] + x = v2[0].reshape(subshape) + y = v2[1].reshape(subshape) + z = v2[2].reshape(subshape) + + newrep = CartesianRepresentation(x, y, z) + if fromcoord.data.__class__ == UnitSphericalRepresentation: + #need to special-case this because otherwise the new class will + #think it has a valid distance + newrep = newrep.represent_as(UnitSphericalRepresentation) + + return toframe.realize_frame(newrep) + + +class CompositeTransform(CoordinateTransform): + """ + A transformation constructed by combining together a series of single-step + transformations. + + Note that the intermediate frame objects are constructed using any frame + attributes in ``toframe`` or ``fromframe`` that overlap with the intermediate + frame (``toframe`` favored over ``fromframe`` if there's a conflict). Any frame + attributes that are not present use the defaults. + + Parameters + ---------- + transforms : sequence of `CoordinateTransform` objects + The sequence of transformations to apply. + fromsys : class + The coordinate frame class to start from. + tosys : class + The coordinate frame class to transform into. + priority : number + The priority if this transform when finding the shortest + coordinate tranform path - large numbers are lower priorities. + register_graph : `TransformGraph` or `None` + A graph to register this transformation with on creation, or + `None` to leave it unregistered. + collapse_static_mats : bool + If `True`, consecutive `StaticMatrixTransform` will be collapsed into a + single transformation to speed up the calculation. + + """ + def __init__(self, transforms, fromsys, tosys, priority=1, + register_graph=None, collapse_static_mats=True): + super(CompositeTransform, self).__init__(fromsys, tosys, + priority=priority, + register_graph=register_graph) + + if collapse_static_mats: + transforms = self._combine_statics(transforms) + + self.transforms = tuple(transforms) + + def _combine_statics(self, transforms): + """ + Combines together sequences of `StaticMatrixTransform`s into a single + transform and returns it. + """ + newtrans = [] + for currtrans in transforms: + lasttrans = newtrans[-1] if len(newtrans) > 0 else None + + if (isinstance(lasttrans, StaticMatrixTransform) and + isinstance(currtrans, StaticMatrixTransform)): + combinedmat = np.dot(lasttrans.matrix, currtrans.matrix) + newtrans[-1] = StaticMatrixTransform(combinedmat, + lasttrans.fromsys, + currtrans.tosys) + else: + newtrans.append(currtrans) + return newtrans + + def __call__(self, fromcoord, toframe): + curr_coord = fromcoord + for t in self.transforms: + #build an intermediate frame with attributes taken from either + #`fromframe`, or if not there, `toframe`, or if not there, use + #the defaults + #TODO: caching this information when creating the transform may + # speed things up a lot + frattrs = {} + for inter_frame_attr_nm in t.tosys.get_frame_attr_names(): + if hasattr(toframe, inter_frame_attr_nm): + attr = getattr(toframe, inter_frame_attr_nm) + frattrs[inter_frame_attr_nm] = attr + elif hasattr(fromcoord, inter_frame_attr_nm): + attr = getattr(fromcoord, inter_frame_attr_nm) + frattrs[inter_frame_attr_nm] = attr + + curr_toframe = t.tosys(**frattrs) + curr_coord = t(curr_coord, curr_toframe) + + # this is safe even in the case enere self.transforms is empty, because + # coordinate objects are immutible, so copying is not needed + return curr_coord diff --git a/astropy/cosmology/__init__.py b/astropy/cosmology/__init__.py new file mode 100644 index 0000000..63e432e --- /dev/null +++ b/astropy/cosmology/__init__.py @@ -0,0 +1,13 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" astropy.cosmology contains classes and functions for cosmological +distance measures and other cosmology-related calculations. + +See the `Astropy documentation +`_ for more +detailed usage examples and references. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from .core import * +from .funcs import * diff --git a/astropy/cosmology/core.py b/astropy/cosmology/core.py new file mode 100644 index 0000000..c2334cb --- /dev/null +++ b/astropy/cosmology/core.py @@ -0,0 +1,2462 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from ..extern import six + +import sys +from math import sqrt, pi, exp, log, floor +from abc import ABCMeta, abstractmethod + +import numpy as np + +from .. import constants as const +from ..utils.misc import isiterable, deprecated +from .. import units as u +from ..utils.state import ScienceState, ScienceStateAlias + +from . import parameters + +# Originally authored by Andrew Becker (becker@astro.washington.edu), +# and modified by Neil Crighton (neilcrighton@gmail.com) and Roban +# Kramer (robanhk@gmail.com). + +# Many of these adapted from Hogg 1999, astro-ph/9905116 +# and Linder 2003, PRL 90, 91301 + +__all__ = ["FLRW", "LambdaCDM", "FlatLambdaCDM", "wCDM", "FlatwCDM", + "Flatw0waCDM", "w0waCDM", "wpwaCDM", "w0wzCDM", "get_current", + "set_current", "WMAP5", "WMAP7", "WMAP9", "Planck13", + "default_cosmology"] + +__doctest_requires__ = {'*': ['scipy.integrate']} + +# Some conversion constants -- useful to compute them once here +# and reuse in the initialization rather than have every object do them +# Note that the call to cgs is actually extremely expensive, +# so we actually skip using the units package directly, and +# hardwire the conversion from mks to cgs. This assumes that constants +# will always return mks by default -- if this is made faster for simple +# cases like this, it should be changed back. +# Note that the unit tests should catch it if this happens +H0units_to_invs = (u.km / (u.s * u.Mpc)).to(1.0 / u.s) +sec_to_Gyr = u.s.to(u.Gyr) +# const in critical density in cgs units (g cm^-3) +critdens_const = 3. / (8. * pi * const.G.value * 1000) +arcsec_in_radians = pi / (3600. * 180) +arcmin_in_radians = pi / (60. * 180) +# Radiation parameter over c^2 in cgs (g cm^-3 K^-4) +a_B_c2 = 4e-3 * const.sigma_sb.value / const.c.value ** 3 +# Boltzmann constant in eV / K +kB_evK = const.k_B.to(u.eV / u.K) + + +class CosmologyError(Exception): + pass + + +class Cosmology(object): + """ Placeholder for when a more general Cosmology class is + implemented. """ + + +@six.add_metaclass(ABCMeta) +class FLRW(Cosmology): + """ A class describing an isotropic and homogeneous + (Friedmann-Lemaitre-Robertson-Walker) cosmology. + + This is an abstract base class -- you can't instantiate + examples of this class, but must work with one of its + subclasses such as `LambdaCDM` or `wCDM`. + + Parameters + ---------- + + H0 : float or scalar `~astropy.units.Quantity` + Hubble constant at z = 0. If a float, must be in [km/sec/Mpc] + + Om0 : float + Omega matter: density of non-relativistic matter in units of the + critical density at z=0. + + Ode0 : float + Omega dark energy: density of dark energy in units of the critical + density at z=0. + + Tcmb0 : float or scalar `~astropy.units.Quantity` + Temperature of the CMB z=0. If a float, must be in [K]. Default: 2.725. + Setting this to zero will turn off both photons and neutrinos (even + massive ones) + + Neff : float + Effective number of Neutrino species. Default 3.04. + + m_nu : `~astropy.units.Quantity` + Mass of each neutrino species. If this is a scalar Quantity, then all + neutrino species are assumed to have that mass. Otherwise, the mass of + each species. The actual number of neutrino species (and hence the + number of elements of m_nu if it is not scalar) must be the floor of + Neff. Usually this means you must provide three neutrino masses unless + you are considering something like a sterile neutrino. + + name : str + Optional name for this cosmological object. + + Notes + ----- + Class instances are static -- you can't change the values + of the parameters. That is, all of the attributes above are + read only. + """ + def __init__(self, H0, Om0, Ode0, Tcmb0=2.725, Neff=3.04, + m_nu=u.Quantity(0.0, u.eV), name=None): + + # all densities are in units of the critical density + self._Om0 = float(Om0) + if self._Om0 < 0.0: + raise ValueError("Matter density can not be negative") + self._Ode0 = float(Ode0) + self._Neff = float(Neff) + if self._Neff < 0.0: + raise ValueError("Effective number of neutrinos can " + "not be negative") + self.name = name + + # Tcmb may have units + self._Tcmb0 = u.Quantity(Tcmb0, unit=u.K, dtype=np.float) + if not self._Tcmb0.isscalar: + raise ValueError("Tcmb0 is a non-scalar quantity") + + # Hubble parameter at z=0, km/s/Mpc + self._H0 = u.Quantity(H0, unit=u.km / u.s / u.Mpc, dtype=np.float) + if not self._H0.isscalar: + raise ValueError("H0 is a non-scalar quantity") + + # 100 km/s/Mpc * h = H0 (so h is dimensionless) + self._h = self._H0.value / 100. + # Hubble distance + self._hubble_distance = (const.c / self._H0).to(u.Mpc) + # H0 in s^-1; don't use units for speed + H0_s = self._H0.value * H0units_to_invs + # Hubble time; again, avoiding units package for speed + self._hubble_time = u.Quantity(sec_to_Gyr / H0_s, u.Gyr) + + # critical density at z=0 (grams per cubic cm) + cd0value = critdens_const * H0_s ** 2 + self._critical_density0 = u.Quantity(cd0value, u.g / u.cm ** 3) + + # Load up neutrino masses. Note: in Py2.x, floor is floating + self._nneutrinos = int(floor(self._Neff)) + + # We are going to share Neff between the neutrinos equally. + # In detail this is not correct, but it is a standard assumption + # because propertly calculating it is a) complicated b) depends + # on the details of the massive nuetrinos (e.g., their weak + # interactions, which could be unusual if one is considering sterile + # neutrinos) + self._massivenu = False + if self._nneutrinos > 0 and self._Tcmb0.value > 0: + self._neff_per_nu = self._Neff / self._nneutrinos + + # We can't use the u.Quantity constructor as we do above + # because it doesn't understand equivalencies + if not isinstance(m_nu, u.Quantity): + raise ValueError("m_nu must be a Quantity") + + m_nu = m_nu.to(u.eV, equivalencies=u.mass_energy()) + + # Now, figure out if we have massive neutrinos to deal with, + # and, if so, get the right number of masses + # It is worth the effort to keep track of massless ones seperately + # (since they are quite easy to deal with, and a common use case + # is to set only one neutrino to have mass) + if m_nu.isscalar: + # Assume all neutrinos have the same mass + if m_nu.value == 0: + self._nmasslessnu = self._nneutrinos + self._nmassivenu = 0 + else: + self._massivenu = True + self._nmasslessnu = 0 + self._nmassivenu = self._nneutrinos + self._massivenu_mass = (m_nu.value * + np.ones(self._nneutrinos)) + else: + # Make sure we have the right number of masses + # -unless- they are massless, in which case we cheat a little + if m_nu.value.min() < 0: + raise ValueError("Invalid (negative) neutrino mass" + " encountered") + if m_nu.value.max() == 0: + self._nmasslessnu = self._nneutrinos + self._nmassivenu = 0 + else: + self._massivenu = True + if len(m_nu) != self._nneutrinos: + raise ValueError("Unexpected number of neutrino masses") + # Segregate out the massless ones + try: + # Numpy < 1.6 doesn't have count_nonzero + self._nmasslessnu = np.count_nonzero(m_nu.value == 0) + except AttributeError: + self._nmasslessnu = len(np.nonzero(m_nu.value == 0)[0]) + self._nmassivenu = self._nneutrinos - self._nmasslessnu + w = np.nonzero(m_nu.value > 0)[0] + self._massivenu_mass = m_nu[w] + + # Compute photon density, Tcmb, neutrino parameters + # Tcmb0=0 removes both photons and neutrinos, is handled + # as a special case for efficiency + if self._Tcmb0.value > 0: + # Compute photon density from Tcmb + self._Ogamma0 = a_B_c2 * self._Tcmb0.value ** 4 /\ + self._critical_density0.value + + # Compute Neutrino temperature + # The constant in front is (4/11)^1/3 -- see any + # cosmology book for an explanation -- for example, + # Weinberg 'Cosmology' p 154 eq (3.1.21) + self._Tnu0 = 0.7137658555036082 * self._Tcmb0 + + # Compute Neutrino Omega and total relativistic component + # for massive neutrinos + if self._massivenu: + nu_y = self._massivenu_mass / (kB_evK * self._Tnu0) + self._nu_y = nu_y.value + self._Onu0 = self._Ogamma0 * self.nu_relative_density(0) + else: + # This case is particularly simple, so do it directly + # The 0.2271... is 7/8 (4/11)^(4/3) -- the temperature + # bit ^4 (blackbody energy density) times 7/8 for + # FD vs. BE statistics. + self._Onu0 = 0.22710731766 * self._Neff * self._Ogamma0 + + else: + self._Ogamma0 = 0.0 + self._Tnu0 = u.Quantity(0.0, u.K) + self._Onu0 = 0.0 + + # Compute curvature density + self._Ok0 = 1.0 - self._Om0 - self._Ode0 - self._Ogamma0 - self._Onu0 + + def _namelead(self): + """ Helper function for constructing __repr__""" + if self.name is None: + return "{0}(".format(self.__class__.__name__) + else: + return "{0}(name=\"{1}\", ".format(self.__class__.__name__, + self.name) + + def __repr__(self): + retstr = "{0}H0={1:.3g}, Om0={2:.3g}, Ode0={3:.3g}, "\ + "Tcmb0={4:.4g}, Neff={5:.3g}, m_nu={6})" + return retstr.format(self._namelead(), self._H0, self._Om0, self._Ode0, + self._Tcmb0, self._Neff, self.m_nu) + + # Set up a set of properties for H0, Om0, Ode0, Ok0, etc. for user access. + # Note that we don't let these be set (so, obj.Om0 = value fails) + + @property + def H0(self): + """ Return the Hubble constant as an `~astropy.units.Quantity` at z=0""" + return self._H0 + + @property + def Om0(self): + """ Omega matter; matter density/critical density at z=0""" + return self._Om0 + + @property + def Ode0(self): + """ Omega dark energy; dark energy density/critical density at z=0""" + return self._Ode0 + + @property + def Ok0(self): + """ Omega curvature; the effective curvature density/critical density + at z=0""" + return self._Ok0 + + @property + def Tcmb0(self): + """ Temperature of the CMB as `~astropy.units.Quantity` at z=0""" + return self._Tcmb0 + + @property + def Tnu0(self): + """ Temperature of the neutrino background as `~astropy.units.Quantity` at z=0""" + return self._Tnu0 + + @property + def Neff(self): + """ Number of effective neutrino species""" + return self._Neff + + @property + def has_massive_nu(self): + """ Does this cosmology have at least one massive neutrino species?""" + if self._Tnu0.value == 0: + return False + return self._massivenu + + @property + def m_nu(self): + """ Mass of neutrino species""" + if self._Tnu0.value == 0: + return None + if not self._massivenu: + # Only massless + return u.Quantity(np.zeros(self._nmasslessnu), u.eV, + dtype=np.float) + if self._nmasslessnu == 0: + # Only massive + return u.Quantity(self._massivenu_mass, u.eV, + dtype=np.float) + # A mix -- the most complicated case + numass = np.append(np.zeros(self._nmasslessnu), + self._massivenu_mass.value) + return u.Quantity(numass, u.eV, dtype=np.float) + + @property + def h(self): + """ Dimensionless Hubble constant: h = H_0 / 100 [km/sec/Mpc]""" + return self._h + + @property + def hubble_time(self): + """ Hubble time as `~astropy.units.Quantity`""" + return self._hubble_time + + @property + def hubble_distance(self): + """ Hubble distance as `~astropy.units.Quantity`""" + return self._hubble_distance + + @property + def critical_density0(self): + """ Critical density as `~astropy.units.Quantity` at z=0""" + return self._critical_density0 + + @property + def Ogamma0(self): + """ Omega gamma; the density/critical density of photons at z=0""" + return self._Ogamma0 + + @property + def Onu0(self): + """ Omega nu; the density/critical density of neutrinos at z=0""" + return self._Onu0 + + @abstractmethod + def w(self, z): + """ The dark energy equation of state. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + w : ndarray, or float if input scalar + The dark energy equation of state + + Notes + ------ + The dark energy equation of state is defined as + :math:`w(z) = P(z)/\\rho(z)`, where :math:`P(z)` is the + pressure at redshift z and :math:`\\rho(z)` is the density + at redshift z, both in units where c=1. + + This must be overridden by subclasses. + """ + raise NotImplementedError("w(z) is not implemented") + + def Om(self, z): + """ Return the density parameter for non-relativistic matter + at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + Om : ndarray, or float if input scalar + The density of non-relativistic matter relative to the critical + density at each redshift. + """ + + if isiterable(z): + z = np.asarray(z) + return self._Om0 * (1. + z) ** 3 * self.inv_efunc(z) ** 2 + + def Ok(self, z): + """ Return the equivalent density parameter for curvature + at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + Ok : ndarray, or float if input scalar + The equivalent density parameter for curvature at each redshift. + """ + + if isiterable(z): + z = np.asarray(z) + # Common enough case to be worth checking explicitly + if self._Ok0 == 0: + return np.zeros(np.asanyarray(z).shape, dtype=np.float) + else: + if self._Ok0 == 0: + return 0.0 + + return self._Ok0 * (1. + z) ** 2 * self.inv_efunc(z) ** 2 + + def Ode(self, z): + """ Return the density parameter for dark energy at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + Ode : ndarray, or float if input scalar + The density of non-relativistic matter relative to the critical + density at each redshift. + """ + + if isiterable(z): + z = np.asarray(z) + # Common case worth checking + if self._Ode0 == 0: + return np.zeros(np.asanyarray(z).shape, dtype=np.float) + else: + if self._Ode0 == 0: + return 0.0 + + return self._Ode0 * self.de_density_scale(z) * self.inv_efunc(z) ** 2 + + def Ogamma(self, z): + """ Return the density parameter for photons at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + Ogamma : ndarray, or float if input scalar + The energy density of photons relative to the critical + density at each redshift. + """ + + if isiterable(z): + z = np.asarray(z) + return self._Ogamma0 * (1. + z) ** 4 * self.inv_efunc(z) ** 2 + + def Onu(self, z): + """ Return the density parameter for massless neutrinos at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + Onu : ndarray, or float if input scalar + The energy density of photons relative to the critical + density at each redshift. Note that this includes their + kinetic energy (if they have mass), so it is not equal to + the commonly used :math:`\\sum \\frac{m_{\\nu}}{94 eV}`, + which does not include kinetic energy. + """ + + if isiterable(z): + z = np.asarray(z) + if self._Onu0 == 0: + return np.zeros(np.asanyarray(z).shape, dtype=np.float) + else: + if self._Onu0 == 0: + return 0.0 + + return self.Ogamma(z) * self.nu_relative_density(z) + + def Tcmb(self, z): + """ Return the CMB temperature at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + Tcmb : `~astropy.units.Quantity` + The temperature of the CMB in K. + """ + + if isiterable(z): + z = np.asarray(z) + return self._Tcmb0 * (1. + z) + + def Tnu(self, z): + """ Return the neutrino temperature at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + Tnu : `~astropy.units.Quantity` + The temperature of the cosmic neutrino background in K. + """ + + if isiterable(z): + z = np.asarray(z) + return self._Tnu0 * (1. + z) + + def nu_relative_density(self, z): + """ Neutrino density function relative to the energy density in + photons. + + Parameters + ---------- + z : array like + Redshift + + Returns + ------- + f : ndarray, or float if z is scalar + The neutrino density scaling factor relative to the density + in photons at each redshift + + Notes + ----- + The density in neutrinos is given by + + .. math:: + + \\rho_{\\nu} \\left(a\\right) = 0.2271 \\, N_{eff} \\, + f\\left(m_{\\nu} a / T_{\\nu 0} \\right) \\, + \\rho_{\\gamma} \\left( a \\right) + + where + + .. math:: + + f \\left(y\\right) = \\frac{120}{7 \\pi^4} + \\int_0^{\\infty} \\, dx \\frac{x^2 \\sqrt{x^2 + y^2}} + {e^x + 1} + + assuming that all neutrino species have the same mass. + If they have different masses, a similar term is calculated + for each one. Note that f has the asymptotic behavior :math:`f(0) = 1`. + This method returns :math:`0.2271 f` using an + analytical fitting formula given in Komatsu et al. 2011, ApJS 192, 18. + """ + + # See Komatsu et al. 2011, eq 26 and the surrounding discussion + # However, this is modified to handle multiple neutrino masses + # by computing the above for each mass, then summing + prefac = 0.22710731766 # 7/8 (4/11)^4/3 -- see any cosmo book + + # The massive and massless contribution must be handled seperately + # But check for common cases first + if not self._massivenu: + if np.isscalar(z): + return prefac * self._Neff + else: + return prefac * self._Neff *\ + np.ones(np.asanyarray(z).shape, dtype=np.float) + + p = 1.83 + invp = 1.0 / p + if np.isscalar(z): + curr_nu_y = self._nu_y / (1.0 + z) # only includes massive ones + rel_mass_per = (1.0 + (0.3173 * curr_nu_y) ** p) ** invp + rel_mass = rel_mass_per.sum() + self._nmasslessnu + else: + z = np.asarray(z) + retarr = np.empty_like(z) + curr_nu_y = self._nu_y / (1. + np.expand_dims(z, axis=-1)) + rel_mass_per = (1. + (0.3173 * curr_nu_y) ** p) ** invp + rel_mass = rel_mass_per.sum(-1) + self._nmasslessnu + + return prefac * self._neff_per_nu * rel_mass + + def _w_integrand(self, ln1pz): + """ Internal convenience function for w(z) integral.""" + + # See Linder 2003, PRL 90, 91301 eq (5) + # Assumes scalar input, since this should only be called + # inside an integral + + z = exp(ln1pz) - 1.0 + return 1.0 + self.w(z) + + def de_density_scale(self, z): + """ Evaluates the redshift dependence of the dark energy density. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + I : ndarray, or float if input scalar + The scaling of the energy density of dark energy with redshift. + + Notes + ----- + The scaling factor, I, is defined by :math:`\\rho(z) = \\rho_0 I`, + and is given by + + .. math:: + + I = \\exp \\left( 3 \int_{a}^1 \\frac{ da^{\\prime} }{ a^{\\prime} } + \\left[ 1 + w\\left( a^{\\prime} \\right) \\right] \\right) + + It will generally helpful for subclasses to overload this method if + the integral can be done analytically for the particular dark + energy equation of state that they implement. + """ + + # This allows for an arbitrary w(z) following eq (5) of + # Linder 2003, PRL 90, 91301. The code here evaluates + # the integral numerically. However, most popular + # forms of w(z) are designed to make this integral analytic, + # so it is probably a good idea for subclasses to overload this + # method if an analytic form is available. + # + # The integral we actually use (the one given in Linder) + # is rewritten in terms of z, so looks slightly different than the + # one in the documentation string, but it's the same thing. + + from scipy.integrate import quad + + if isiterable(z): + z = np.asarray(z) + ival = np.array([quad(self._w_integrand, 0, log(1 + redshift))[0] + for redshift in z]) + return np.exp(3 * ival) + else: + ival = quad(self._w_integrand, 0, log(1 + z))[0] + return exp(3 * ival) + + def efunc(self, z): + """ Function used to calculate H(z), the Hubble parameter. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + E : ndarray, or float if input scalar + The redshift scaling of the Hubble constant. + + Notes + ----- + The return value, E, is defined such that :math:`H(z) = H_0 E`. + + It is not necessary to override this method, but if de_density_scale + takes a particularly simple form, it may be advantageous to. + """ + + if isiterable(z): + z = np.asarray(z) + + Om0, Ode0, Ok0 = self._Om0, self._Ode0, self._Ok0 + if self._massivenu: + Or = self._Ogamma0 * (1 + self.nu_relative_density(z)) + else: + Or = self._Ogamma0 + self._Onu0 + zp1 = 1.0 + z + + return np.sqrt(zp1 ** 2 * ((Or * zp1 + Om0) * zp1 + Ok0) + + Ode0 * self.de_density_scale(z)) + + def inv_efunc(self, z): + """Inverse of efunc. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + E : ndarray, or float if input scalar + The redshift scaling of the inverse Hubble constant. + """ + + # Avoid the function overhead by repeating code + if isiterable(z): + z = np.asarray(z) + Om0, Ode0, Ok0 = self._Om0, self._Ode0, self._Ok0 + if self._massivenu: + Or = self._Ogamma0 * (1 + self.nu_relative_density(z)) + else: + Or = self._Ogamma0 + self._Onu0 + zp1 = 1.0 + z + + return 1.0 / np.sqrt(zp1 ** 2 * ((Or * zp1 + Om0) * zp1 + Ok0) + + Ode0 * self.de_density_scale(z)) + + def _tfunc(self, z): + """ Integrand of the lookback time. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + I : ndarray, or float if input scalar + The integrand for the lookback time + + References + ---------- + Eqn 30 from Hogg 1999. + """ + + if isiterable(z): + zp1 = 1.0 + np.asarray(z) + else: + zp1 = 1. + z + + return 1.0 / (zp1 * self.efunc(z)) + + def _xfunc(self, z): + """ Integrand of the absorption distance. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + X : ndarray, or float if input scalar + The integrand for the absorption distance + + References + ---------- + See Hogg 1999 section 11. + """ + + if isiterable(z): + zp1 = 1.0 + np.asarray(z) + else: + zp1 = 1. + z + return zp1 ** 2 / self.efunc(z) + + def H(self, z): + """ Hubble parameter (km/s/Mpc) at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + H : `~astropy.units.Quantity` + Hubble parameter at each input redshift. + """ + + return self._H0 * self.efunc(z) + + def scale_factor(self, z): + """ Scale factor at redshift ``z``. + + The scale factor is defined as :math:`a = 1 / (1 + z)`. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + a : ndarray, or float if input scalar + Scale factor at each input redshift. + """ + + if isiterable(z): + z = np.asarray(z) + + return 1. / (1. + z) + + def lookback_time(self, z): + """ Lookback time in Gyr to redshift ``z``. + + The lookback time is the difference between the age of the + Universe now and the age at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar + + Returns + ------- + t : `~astropy.units.Quantity` + Lookback time in Gyr to each input redshift. + + See Also + -------- + z_at_value : Find the redshift corresponding to a lookback time. + """ + + from scipy.integrate import quad + if not isiterable(z): + return self._hubble_time * quad(self._tfunc, 0, z)[0] + + out = np.array([quad(self._tfunc, 0, redshift)[0] for redshift in z]) + return self._hubble_time * np.array(out) + + def age(self, z): + """ Age of the universe in Gyr at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + t : `~astropy.units.Quantity` + The age of the universe in Gyr at each input redshift. + + See Also + -------- + z_at_value : Find the redshift corresponding to an age. + """ + + from scipy.integrate import quad + if not isiterable(z): + return self._hubble_time * quad(self._tfunc, z, np.inf)[0] + + out = [quad(self._tfunc, redshift, np.inf)[0] for redshift in z] + return self._hubble_time * np.array(out) + + def critical_density(self, z): + """ Critical density in grams per cubic cm at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + rho : `~astropy.units.Quantity` + Critical density in g/cm^3 at each input redshift. + """ + + return self._critical_density0 * (self.efunc(z)) ** 2 + + def comoving_distance(self, z): + """ Comoving line-of-sight distance in Mpc at a given + redshift. + + The comoving distance along the line-of-sight between two + objects remains constant with time for objects in the Hubble + flow. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + d : ndarray, or float if input scalar + Comoving distance in Mpc to each input redshift. + """ + + from scipy.integrate import quad + if not isiterable(z): + return self._hubble_distance * quad(self.inv_efunc, 0, z)[0] + + out = [quad(self.inv_efunc, 0, redshift)[0] for redshift in z] + return self._hubble_distance * np.array(out) + + def comoving_transverse_distance(self, z): + """ Comoving transverse distance in Mpc at a given redshift. + + This value is the transverse comoving distance at redshift ``z`` + corresponding to an angular separation of 1 radian. This is + the same as the comoving distance if omega_k is zero (as in + the current concordance lambda CDM model). + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + d : `~astropy.units.Quantity` + Comoving transverse distance in Mpc at each input redshift. + + Notes + ----- + This quantity also called the 'proper motion distance' in some + texts. + """ + + Ok0 = self._Ok0 + dc = self.comoving_distance(z) + if Ok0 == 0: + return dc + sqrtOk0 = sqrt(abs(Ok0)) + dh = self._hubble_distance + if Ok0 > 0: + return dh / sqrtOk0 * np.sinh(sqrtOk0 * dc.value / dh.value) + else: + return dh / sqrtOk0 * np.sin(sqrtOk0 * dc.value / dh.value) + + def angular_diameter_distance(self, z): + """ Angular diameter distance in Mpc at a given redshift. + + This gives the proper (sometimes called 'physical') transverse + distance corresponding to an angle of 1 radian for an object + at redshift ``z``. + + Weinberg, 1972, pp 421-424; Weedman, 1986, pp 65-67; Peebles, + 1993, pp 325-327. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + d : `~astropy.units.Quantity` + Angular diameter distance in Mpc at each input redshift. + """ + + if isiterable(z): + z = np.asarray(z) + + return self.comoving_transverse_distance(z) / (1. + z) + + def luminosity_distance(self, z): + """ Luminosity distance in Mpc at redshift ``z``. + + This is the distance to use when converting between the + bolometric flux from an object at redshift ``z`` and its + bolometric luminosity. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + d : `~astropy.units.Quantity` + Luminosity distance in Mpc at each input redshift. + + See Also + -------- + z_at_value : Find the redshift corresponding to a luminosity distance. + + References + ---------- + Weinberg, 1972, pp 420-424; Weedman, 1986, pp 60-62. + """ + + if isiterable(z): + z = np.asarray(z) + + return (1. + z) * self.comoving_transverse_distance(z) + + def angular_diameter_distance_z1z2(self, z1, z2): + """ Angular diameter distance between objects at 2 redshifts. + Useful for gravitational lensing. + + Parameters + ---------- + z1, z2 : array_like, shape (N,) + Input redshifts. z2 must be large than z1. + + Returns + ------- + d : `~astropy.units.Quantity`, shape (N,) or single if input scalar + The angular diameter distance between each input redshift + pair. + + Raises + ------ + CosmologyError + If omega_k is < 0. + + Notes + ----- + This method only works for flat or open curvature + (omega_k >= 0). + """ + + # does not work for negative curvature + Ok0 = self._Ok0 + if Ok0 < 0: + raise CosmologyError('Ok0 must be >= 0 to use this method.') + + outscalar = False + if not isiterable(z1) and not isiterable(z2): + outscalar = True + + z1 = np.atleast_1d(z1) + z2 = np.atleast_1d(z2) + + if z1.size != z2.size: + raise ValueError('z1 and z2 must be the same size.') + + if (z1 > z2).any(): + raise ValueError('z2 must greater than z1') + + # z1 < z2 + if (z2 < z1).any(): + z1, z2 = z2, z1 + + dm1 = self.comoving_transverse_distance(z1).value + dm2 = self.comoving_transverse_distance(z2).value + dh_2 = self._hubble_distance.value ** 2 + + if Ok0 == 0: + # Common case worth checking + out = (dm2 - dm1) / (1. + z2) + else: + out = ((dm2 * np.sqrt(1. + Ok0 * dm1 ** 2 / dh_2) - + dm1 * np.sqrt(1. + Ok0 * dm2 ** 2 / dh_2)) / + (1. + z2)) + + if outscalar: + return u.Quantity(out[0], u.Mpc) + + return u.Quantity(out, u.Mpc) + + def absorption_distance(self, z): + """ Absorption distance at redshift ``z``. + + This is used to calculate the number of objects with some + cross section of absorption and number density intersecting a + sightline per unit redshift path. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + d : float or ndarray + Absorption distance (dimensionless) at each input redshift. + + References + ---------- + Hogg 1999 Section 11. (astro-ph/9905116) + Bahcall, John N. and Peebles, P.J.E. 1969, ApJ, 156L, 7B + """ + + from scipy.integrate import quad + if not isiterable(z): + return quad(self._xfunc, 0, z)[0] + + out = np.array([quad(self._xfunc, 0, redshift)[0] for redshift in z]) + return out + + def distmod(self, z): + """ Distance modulus at redshift ``z``. + + The distance modulus is defined as the (apparent magnitude - + absolute magnitude) for an object at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + distmod : `~astropy.units.Quantity` + Distance modulus at each input redshift, in magnitudes + + See Also + -------- + z_at_value : Find the redshift corresponding to a distance modulus. + """ + + # Remember that the luminosity distance is in Mpc + # Abs is necessary because in certain obscure closed cosmologies + # the distance modulus can be negative -- which is okay because + # it enters as the square. + val = 5. * np.log10(abs(self.luminosity_distance(z).value)) + 25.0 + return u.Quantity(val, u.mag) + + def comoving_volume(self, z): + """ Comoving volume in cubic Mpc at redshift ``z``. + + This is the volume of the universe encompassed by redshifts less + than ``z``. For the case of omega_k = 0 it is a sphere of radius + `comoving_distance` but it is less intuitive + if omega_k is not 0. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + V : `~astropy.units.Quantity` + Comoving volume in :math:`Mpc^3` at each input redshift. + """ + + Ok0 = self._Ok0 + if Ok0 == 0: + return 4. / 3. * pi * self.comoving_distance(z) ** 3 + + dh = self._hubble_distance.value # .value for speed + dm = self.comoving_transverse_distance(z).value + term1 = 4. * pi * dh ** 3 / (2. * Ok0) * u.Mpc ** 3 + term2 = dm / dh * np.sqrt(1 + Ok0 * (dm / dh) ** 2) + term3 = sqrt(abs(Ok0)) * dm / dh + + if Ok0 > 0: + return term1 * (term2 - 1. / sqrt(abs(Ok0)) * np.arcsinh(term3)) + else: + return term1 * (term2 - 1. / sqrt(abs(Ok0)) * np.arcsin(term3)) + + def differential_comoving_volume(self, z): + """Differential comoving volume at redshift z. + + Useful for calculating the effective comoving volume. + For example, allows for integration over a comoving volume + that has a sensitivity function that changes with redshift. + The total comoving volume is given by integrating + differential_comoving_volume to redshift z + and multiplying by a solid angle. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + dV : `~astropy.units.Quantity` + Differential comoving volume per redshift per steradian at + each input redshift.""" + dh = self._hubble_distance + da = self.angular_diameter_distance(z) + zp1 = 1.0 + z + return dh * (zp1 ** 2.0 * da ** 2.0) / u.Quantity(self.efunc(z), + u.steradian) + + def kpc_comoving_per_arcmin(self, z): + """ Separation in transverse comoving kpc corresponding to an + arcminute at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + d : `~astropy.units.Quantity` + The distance in comoving kpc corresponding to an arcmin at each + input redshift. + """ + return (self.comoving_transverse_distance(z).to(u.kpc) * + arcmin_in_radians / u.arcmin) + + def kpc_proper_per_arcmin(self, z): + """ Separation in transverse proper kpc corresponding to an + arcminute at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + d : `~astropy.units.Quantity` + The distance in proper kpc corresponding to an arcmin at each + input redshift. + """ + return (self.angular_diameter_distance(z).to(u.kpc) * + arcmin_in_radians / u.arcmin) + + def arcsec_per_kpc_comoving(self, z): + """ Angular separation in arcsec corresponding to a comoving kpc + at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + theta : `~astropy.units.Quantity` + The angular separation in arcsec corresponding to a comoving kpc + at each input redshift. + """ + return u.arcsec / (self.comoving_transverse_distance(z).to(u.kpc) * + arcsec_in_radians) + + def arcsec_per_kpc_proper(self, z): + """ Angular separation in arcsec corresponding to a proper kpc at + redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. Must be 1D or scalar. + + Returns + ------- + theta : `~astropy.units.Quantity` + The angular separation in arcsec corresponding to a proper kpc + at each input redshift. + """ + return u.arcsec / (self.angular_diameter_distance(z).to(u.kpc) * + arcsec_in_radians) + + +class LambdaCDM(FLRW): + """FLRW cosmology with a cosmological constant and curvature. + + This has no additional attributes beyond those of FLRW. + + Parameters + ---------- + + H0 : float or `~astropy.units.Quantity` + Hubble constant at z = 0. If a float, must be in [km/sec/Mpc] + + Om0 : float + Omega matter: density of non-relativistic matter in units of the + critical density at z=0. + + Ode0 : float + Omega dark energy: density of the cosmological constant in units of the + critical density at z=0. + + Tcmb0 : float or `~astropy.units.Quantity` + Temperature of the CMB z=0. If a float, must be in [K]. Default: 2.725. + + Neff : float + Effective number of Neutrino species. Default 3.04. + + m_nu : `~astropy.units.Quantity` + Mass of each neutrino species. If this is a scalar Quantity, then all + neutrino species are assumed to have that mass. Otherwise, the mass of + each species. The actual number of neutrino species (and hence the + number of elements of m_nu if it is not scalar) must be the floor of + Neff. Usually this means you must provide three neutrino masses unless + you are considering something like a sterile neutrino. + + name : str + Optional name for this cosmological object. + + Examples + -------- + >>> from astropy.cosmology import LambdaCDM + >>> cosmo = LambdaCDM(H0=70, Om0=0.3, Ode0=0.7) + + The comoving distance in Mpc at redshift z: + + >>> z = 0.5 + >>> dc = cosmo.comoving_distance(z) + """ + + def __init__(self, H0, Om0, Ode0, Tcmb0=2.725, Neff=3.04, + m_nu=u.Quantity(0.0, u.eV), name=None): + + FLRW.__init__(self, H0, Om0, Ode0, Tcmb0, Neff, m_nu, name=name) + + def w(self, z): + """Returns dark energy equation of state at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + w : ndarray, or float if input scalar + The dark energy equation of state + + Notes + ------ + The dark energy equation of state is defined as + :math:`w(z) = P(z)/\\rho(z)`, where :math:`P(z)` is the + pressure at redshift z and :math:`\\rho(z)` is the density + at redshift z, both in units where c=1. Here this is + :math:`w(z) = -1`. + """ + + if np.isscalar(z): + return -1.0 + else: + return -1.0 * np.ones(np.asanyarray(z).shape, dtype=np.float) + + def de_density_scale(self, z): + """ Evaluates the redshift dependence of the dark energy density. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + I : ndarray, or float if input scalar + The scaling of the energy density of dark energy with redshift. + + Notes + ----- + The scaling factor, I, is defined by :math:`\\rho(z) = \\rho_0 I`, + and in this case is given by :math:`I = 1`. + """ + + if np.isscalar(z): + return 1. + else: + return np.ones(np.asanyarray(z).shape, dtype=np.float) + + def efunc(self, z): + """ Function used to calculate H(z), the Hubble parameter. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + E : ndarray, or float if input scalar + The redshift scaling of the Hubble consant. + + Notes + ----- + The return value, E, is defined such that :math:`H(z) = H_0 E`. + """ + + if isiterable(z): + z = np.asarray(z) + + # We override this because it takes a particularly simple + # form for a cosmological constant + Om0, Ode0, Ok0 = self._Om0, self._Ode0, self._Ok0 + if self._massivenu: + Or = self._Ogamma0 * (1. + self.nu_relative_density(z)) + else: + Or = self._Ogamma0 + self._Onu0 + zp1 = 1.0 + z + + return np.sqrt(zp1 ** 2 * ((Or * zp1 + Om0) * zp1 + Ok0) + Ode0) + + def inv_efunc(self, z): + r""" Function used to calculate :math:`\frac{1}{H_z}`. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + E : ndarray, or float if input scalar + The inverse redshift scaling of the Hubble constant. + + Notes + ----- + The return value, E, is defined such that :math:`H_z = H_0 / + E`. + """ + + if isiterable(z): + z = np.asarray(z) + Om0, Ode0, Ok0 = self._Om0, self._Ode0, self._Ok0 + if self._massivenu: + Or = self._Ogamma0 * (1 + self.nu_relative_density(z)) + else: + Or = self._Ogamma0 + self._Onu0 + zp1 = 1.0 + z + + return 1.0 / np.sqrt(zp1 ** 2 * ((Or * zp1 + Om0) * zp1 + Ok0) + Ode0) + + +class FlatLambdaCDM(LambdaCDM): + """FLRW cosmology with a cosmological constant and no curvature. + + This has no additional attributes beyond those of FLRW. + + Parameters + ---------- + H0 : float or `~astropy.units.Quantity` + Hubble constant at z = 0. If a float, must be in [km/sec/Mpc] + + Om0 : float + Omega matter: density of non-relativistic matter in units of the + critical density at z=0. + + Tcmb0 : float or `~astropy.units.Quantity` + Temperature of the CMB z=0. If a float, must be in [K]. Default: 2.725. + + Neff : float + Effective number of Neutrino species. Default 3.04. + + m_nu : `~astropy.units.Quantity` + Mass of each neutrino species. If this is a scalar Quantity, then all + neutrino species are assumed to have that mass. Otherwise, the mass of + each species. The actual number of neutrino species (and hence the + number of elements of m_nu if it is not scalar) must be the floor of + Neff. Usually this means you must provide three neutrino masses unless + you are considering something like a sterile neutrino. + + name : str + Optional name for this cosmological object. + + Examples + -------- + >>> from astropy.cosmology import FlatLambdaCDM + >>> cosmo = FlatLambdaCDM(H0=70, Om0=0.3) + + The comoving distance in Mpc at redshift z: + + >>> z = 0.5 + >>> dc = cosmo.comoving_distance(z) + """ + + def __init__(self, H0, Om0, Tcmb0=2.725, Neff=3.04, + m_nu=u.Quantity(0.0, u.eV), name=None): + + FLRW.__init__(self, H0, Om0, 0.0, Tcmb0, Neff, m_nu, name=name) + # Do some twiddling after the fact to get flatness + self._Ode0 = 1.0 - self._Om0 - self._Ogamma0 - self._Onu0 + self._Ok0 = 0.0 + + def efunc(self, z): + """ Function used to calculate H(z), the Hubble parameter. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + E : ndarray, or float if input scalar + The redshift scaling of the Hubble consant. + + Notes + ----- + The return value, E, is defined such that :math:`H(z) = H_0 E`. + """ + + if isiterable(z): + z = np.asarray(z) + + # We override this because it takes a particularly simple + # form for a cosmological constant + Om0, Ode0 = self._Om0, self._Ode0 + if self._massivenu: + Or = self._Ogamma0 * (1 + self.nu_relative_density(z)) + else: + Or = self._Ogamma0 + self._Onu0 + zp1 = 1.0 + z + + return np.sqrt(zp1 ** 3 * (Or * zp1 + Om0) + Ode0) + + def inv_efunc(self, z): + r"""Function used to calculate :math:`\frac{1}{H_z}`. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + E : ndarray, or float if input scalar + The inverse redshift scaling of the Hubble constant. + + Notes + ----- + The return value, E, is defined such that :math:`H_z = H_0 / E`. + """ + + if isiterable(z): + z = np.asarray(z) + Om0, Ode0 = self._Om0, self._Ode0 + if self._massivenu: + Or = self._Ogamma0 * (1. + self.nu_relative_density(z)) + else: + Or = self._Ogamma0 + self._Onu0 + zp1 = 1.0 + z + + return 1.0 / np.sqrt(zp1 ** 3 * (Or * zp1 + Om0) + Ode0) + + def __repr__(self): + retstr = "{0}H0={1:.3g}, Om0={2:.3g}, Tcmb0={3:.4g}, "\ + "Neff={4:.3g}, m_nu={5})" + return retstr.format(self._namelead(), self._H0, self._Om0, + self._Tcmb0, self._Neff, self.m_nu) + + +class wCDM(FLRW): + """FLRW cosmology with a constant dark energy equation of state + and curvature. + + This has one additional attribute beyond those of FLRW. + + Parameters + ---------- + + H0 : float or `~astropy.units.Quantity` + Hubble constant at z = 0. If a float, must be in [km/sec/Mpc] + + Om0 : float + Omega matter: density of non-relativistic matter in units of the + critical density at z=0. + + Ode0 : float + Omega dark energy: density of dark energy in units of the critical + density at z=0. + + w0 : float + Dark energy equation of state at all redshifts. This is + pressure/density for dark energy in units where c=1. A cosmological + constant has w0=-1.0. + + Tcmb0 : float or `~astropy.units.Quantity` + Temperature of the CMB z=0. If a float, must be in [K]. Default: 2.725. + + Neff : float + Effective number of Neutrino species. Default 3.04. + + m_nu : `~astropy.units.Quantity` + Mass of each neutrino species. If this is a scalar Quantity, then all + neutrino species are assumed to have that mass. Otherwise, the mass of + each species. The actual number of neutrino species (and hence the + number of elements of m_nu if it is not scalar) must be the floor of + Neff. Usually this means you must provide three neutrino masses unless + you are considering something like a sterile neutrino. + + name : str + Optional name for this cosmological object. + + Examples + -------- + >>> from astropy.cosmology import wCDM + >>> cosmo = wCDM(H0=70, Om0=0.3, Ode0=0.7, w0=-0.9) + + The comoving distance in Mpc at redshift z: + + >>> z = 0.5 + >>> dc = cosmo.comoving_distance(z) + """ + + def __init__(self, H0, Om0, Ode0, w0=-1., Tcmb0=2.725, + Neff=3.04, m_nu=u.Quantity(0.0, u.eV), name=None): + + FLRW.__init__(self, H0, Om0, Ode0, Tcmb0, Neff, m_nu, name=name) + self._w0 = float(w0) + + @property + def w0(self): + """ Dark energy equation of state""" + return self._w0 + + def w(self, z): + """Returns dark energy equation of state at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + w : ndarray, or float if input scalar + The dark energy equation of state + + Notes + ------ + The dark energy equation of state is defined as + :math:`w(z) = P(z)/\\rho(z)`, where :math:`P(z)` is the + pressure at redshift z and :math:`\\rho(z)` is the density + at redshift z, both in units where c=1. Here this is + :math:`w(z) = w_0`. + """ + + if np.isscalar(z): + return self._w0 + else: + return self._w0 * np.ones(np.asanyarray(z).shape, dtype=np.float) + + def de_density_scale(self, z): + """ Evaluates the redshift dependence of the dark energy density. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + I : ndarray, or float if input scalar + The scaling of the energy density of dark energy with redshift. + + Notes + ----- + The scaling factor, I, is defined by :math:`\\rho(z) = \\rho_0 I`, + and in this case is given by + :math:`I = \\left(1 + z\\right)^{3\\left(1 + w_0\\right)}` + """ + + if isiterable(z): + z = np.asarray(z) + return (1. + z) ** (3. * (1. + self._w0)) + + def efunc(self, z): + """ Function used to calculate H(z), the Hubble parameter. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + E : ndarray, or float if input scalar + The redshift scaling of the Hubble consant. + + Notes + ----- + The return value, E, is defined such that :math:`H(z) = H_0 E`. + """ + + if isiterable(z): + z = np.asarray(z) + Om0, Ode0, Ok0, w0 = self._Om0, self._Ode0, self._Ok0, self._w0 + if self._massivenu: + Or = self._Ogamma0 * (1. + self.nu_relative_density(z)) + else: + Or = self._Ogamma0 + self._Onu0 + zp1 = 1.0 + z + + return np.sqrt(zp1 ** 2 * ((Or * zp1 + Om0) * zp1 + Ok0) + + Ode0 * zp1 ** (3. * (1. + w0))) + + def inv_efunc(self, z): + r""" Function used to calculate :math:`\frac{1}{H_z}`. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + E : ndarray, or float if input scalar + The inverse redshift scaling of the Hubble constant. + + Notes + ----- + The return value, E, is defined such that :math:`H_z = H_0 / E`. + """ + + if isiterable(z): + z = np.asarray(z) + Om0, Ode0, Ok0, w0 = self._Om0, self._Ode0, self._Ok0, self._w0 + if self._massivenu: + Or = self._Ogamma0 * (1. + self.nu_relative_density(z)) + else: + Or = self._Ogamma0 + self._Onu0 + zp1 = 1.0 + z + + return 1.0 / np.sqrt(zp1 ** 2 * ((Or * zp1 + Om0) * zp1 + Ok0) + + Ode0 * zp1 ** (3. * (1. + w0))) + + def __repr__(self): + retstr = "{0}H0={1:.3g}, Om0={2:.3g}, Ode0={3:.3g}, w0={4:.3g}, "\ + "Tcmb0={5:.4g}, Neff={6:.3g}, m_nu={7})" + return retstr.format(self._namelead(), self._H0, self._Om0, + self._Ode0, self._w0, self._Tcmb0, self._Neff, + self.m_nu) + + +class FlatwCDM(wCDM): + """FLRW cosmology with a constant dark energy equation of state + and no spatial curvature. + + This has one additional attribute beyond those of FLRW. + + Parameters + ---------- + + H0 : float or `~astropy.units.Quantity` + Hubble constant at z = 0. If a float, must be in [km/sec/Mpc] + + Om0 : float + Omega matter: density of non-relativistic matter in units of the + critical density at z=0. + + w0 : float + Dark energy equation of state at all redshifts. This is + pressure/density for dark energy in units where c=1. A cosmological + constant has w0=-1.0. + + Tcmb0 : float or `~astropy.units.Quantity` + Temperature of the CMB z=0. If a float, must be in [K]. Default: 2.725. + + Neff : float + Effective number of Neutrino species. Default 3.04. + + m_nu : `~astropy.units.Quantity` + Mass of each neutrino species. If this is a scalar Quantity, then all + neutrino species are assumed to have that mass. Otherwise, the mass of + each species. The actual number of neutrino species (and hence the + number of elements of m_nu if it is not scalar) must be the floor of + Neff. Usually this means you must provide three neutrino masses unless + you are considering something like a sterile neutrino. + + name : str + Optional name for this cosmological object. + + Examples + -------- + >>> from astropy.cosmology import FlatwCDM + >>> cosmo = FlatwCDM(H0=70, Om0=0.3, w0=-0.9) + + The comoving distance in Mpc at redshift z: + + >>> z = 0.5 + >>> dc = cosmo.comoving_distance(z) + """ + + def __init__(self, H0, Om0, w0=-1., Tcmb0=2.725, + Neff=3.04, m_nu=u.Quantity(0.0, u.eV), name=None): + + FLRW.__init__(self, H0, Om0, 0.0, Tcmb0, Neff, m_nu, name=name) + self._w0 = float(w0) + # Do some twiddling after the fact to get flatness + self._Ode0 = 1.0 - self._Om0 - self._Ogamma0 - self._Onu0 + self._Ok0 = 0.0 + + def efunc(self, z): + """ Function used to calculate H(z), the Hubble parameter. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + E : ndarray, or float if input scalar + The redshift scaling of the Hubble consant. + + Notes + ----- + The return value, E, is defined such that :math:`H(z) = H_0 E`. + """ + + if isiterable(z): + z = np.asarray(z) + Om0, Ode0, w0 = self._Om0, self._Ode0, self._w0 + if self._massivenu: + Or = self._Ogamma0 * (1. + self.nu_relative_density(z)) + else: + Or = self._Ogamma0 + self._Onu0 + zp1 = 1. + z + + return np.sqrt(zp1 ** 3 * (Or * zp1 + Om0) + + Ode0 * zp1 ** (3. * (1 + w0))) + + def inv_efunc(self, z): + r""" Function used to calculate :math:`\frac{1}{H_z}`. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + E : ndarray, or float if input scalar + The inverse redshift scaling of the Hubble constant. + + Notes + ----- + The return value, E, is defined such that :math:`H_z = H_0 / E`. + """ + + if isiterable(z): + z = np.asarray(z) + Om0, Ode0, Ok0, w0 = self._Om0, self._Ode0, self._Ok0, self._w0 + if self._massivenu: + Or = self._Ogamma0 * (1. + self.nu_relative_density(z)) + else: + Or = self._Ogamma0 + self._Onu0 + zp1 = 1. + z + + return 1. / np.sqrt(zp1 ** 3 * (Or * zp1 + Om0) + + Ode0 * zp1 ** (3. * (1. + w0))) + + def __repr__(self): + retstr = "{0}H0={1:.3g}, Om0={2:.3g}, w0={3:.3g}, Tcmb0={4:.4g}, "\ + "Neff={5:.3g}, m_nu={6})" + return retstr.format(self._namelead(), self._H0, self._Om0, self._w0, + self._Tcmb0, self._Neff, self.m_nu) + + +class w0waCDM(FLRW): + """FLRW cosmology with a CPL dark energy equation of state and curvature. + + The equation for the dark energy equation of state uses the + CPL form as described in Chevallier & Polarski Int. J. Mod. Phys. + D10, 213 (2001) and Linder PRL 90, 91301 (2003): + :math:`w(z) = w_0 + w_a (1-a) = w_0 + w_a z / (1+z)`. + + Parameters + ---------- + H0 : float or `~astropy.units.Quantity` + Hubble constant at z = 0. If a float, must be in [km/sec/Mpc] + + Om0 : float + Omega matter: density of non-relativistic matter in units of the + critical density at z=0. + + Ode0 : float + Omega dark energy: density of dark energy in units of the critical + density at z=0. + + w0 : float + Dark energy equation of state at z=0 (a=1). This is pressure/density + for dark energy in units where c=1. + + wa : float + Negative derivative of the dark energy equation of state with respect + to the scale factor. A cosmological constant has w0=-1.0 and wa=0.0. + + Tcmb0 : float or `~astropy.units.Quantity` + Temperature of the CMB z=0. If a float, must be in [K]. Default: 2.725. + + Neff : float + Effective number of Neutrino species. Default 3.04. + + m_nu : `~astropy.units.Quantity` + Mass of each neutrino species. If this is a scalar Quantity, then all + neutrino species are assumed to have that mass. Otherwise, the mass of + each species. The actual number of neutrino species (and hence the + number of elements of m_nu if it is not scalar) must be the floor of + Neff. Usually this means you must provide three neutrino masses unless + you are considering something like a sterile neutrino. + + name : str + Optional name for this cosmological object. + + Examples + -------- + >>> from astropy.cosmology import w0waCDM + >>> cosmo = w0waCDM(H0=70, Om0=0.3, Ode0=0.7, w0=-0.9, wa=0.2) + + The comoving distance in Mpc at redshift z: + + >>> z = 0.5 + >>> dc = cosmo.comoving_distance(z) + """ + + def __init__(self, H0, Om0, Ode0, w0=-1., wa=0., Tcmb0=2.725, + Neff=3.04, m_nu=u.Quantity(0.0, u.eV), name=None): + + FLRW.__init__(self, H0, Om0, Ode0, Tcmb0, Neff, m_nu, name=name) + self._w0 = float(w0) + self._wa = float(wa) + + @property + def w0(self): + """ Dark energy equation of state at z=0""" + return self._w0 + + @property + def wa(self): + """ Negative derivative of dark energy equation of state w.r.t. a""" + return self._wa + + def w(self, z): + """Returns dark energy equation of state at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + w : ndarray, or float if input scalar + The dark energy equation of state + + Notes + ------ + The dark energy equation of state is defined as + :math:`w(z) = P(z)/\\rho(z)`, where :math:`P(z)` is the + pressure at redshift z and :math:`\\rho(z)` is the density + at redshift z, both in units where c=1. Here this is + :math:`w(z) = w_0 + w_a (1 - a) = w_0 + w_a \\frac{z}{1+z}`. + """ + + if isiterable(z): + z = np.asarray(z) + + return self._w0 + self._wa * z / (1.0 + z) + + def de_density_scale(self, z): + """ Evaluates the redshift dependence of the dark energy density. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + I : ndarray, or float if input scalar + The scaling of the energy density of dark energy with redshift. + + Notes + ----- + The scaling factor, I, is defined by :math:`\\rho(z) = \\rho_0 I`, + and in this case is given by + + .. math:: + + I = \\left(1 + z\\right)^{3 \\left(1 + w_0 + w_a\\right)} + \exp \\left(-3 w_a \\frac{z}{1+z}\\right) + + """ + if isiterable(z): + z = np.asarray(z) + zp1 = 1.0 + z + return zp1 ** (3 * (1 + self._w0 + self._wa)) * \ + np.exp(-3 * self._wa * z / zp1) + + def __repr__(self): + retstr = "{0}H0={1:.3g}, Om0={2:.3g}, "\ + "Ode0={3:.3g}, w0={4:.3g}, wa={5:.3g}, Tcmb0={6:.4g}, "\ + "Neff={7:.3g}, m_nu={8})" + return retstr.format(self._namelead(), self._H0, self._Om0, + self._Ode0, self._w0, self._wa, + self._Tcmb0, self._Neff, self.m_nu) + + +class Flatw0waCDM(w0waCDM): + """FLRW cosmology with a CPL dark energy equation of state and no curvature. + + The equation for the dark energy equation of state uses the + CPL form as described in Chevallier & Polarski Int. J. Mod. Phys. + D10, 213 (2001) and Linder PRL 90, 91301 (2003): + :math:`w(z) = w_0 + w_a (1-a) = w_0 + w_a z / (1+z)`. + + Parameters + ---------- + + H0 : float or `~astropy.units.Quantity` + Hubble constant at z = 0. If a float, must be in [km/sec/Mpc] + + Om0 : float + Omega matter: density of non-relativistic matter in units of the + critical density at z=0. + + w0 : float + Dark energy equation of state at z=0 (a=1). This is pressure/density + for dark energy in units where c=1. + + wa : float + Negative derivative of the dark energy equation of state with respect + to the scale factor. A cosmological constant has w0=-1.0 and wa=0.0. + + Tcmb0 : float or `~astropy.units.Quantity` + Temperature of the CMB z=0. If a float, must be in [K]. Default: 2.725. + + Neff : float + Effective number of Neutrino species. Default 3.04. + + m_nu : `~astropy.units.Quantity` + Mass of each neutrino species. If this is a scalar Quantity, then all + neutrino species are assumed to have that mass. Otherwise, the mass of + each species. The actual number of neutrino species (and hence the + number of elements of m_nu if it is not scalar) must be the floor of + Neff. Usually this means you must provide three neutrino masses unless + you are considering something like a sterile neutrino. + + name : str + Optional name for this cosmological object. + + Examples + -------- + >>> from astropy.cosmology import Flatw0waCDM + >>> cosmo = Flatw0waCDM(H0=70, Om0=0.3, w0=-0.9, wa=0.2) + + The comoving distance in Mpc at redshift z: + + >>> z = 0.5 + >>> dc = cosmo.comoving_distance(z) + """ + + def __init__(self, H0, Om0, w0=-1., wa=0., Tcmb0=2.725, + Neff=3.04, m_nu=u.Quantity(0.0, u.eV), name=None): + + FLRW.__init__(self, H0, Om0, 0.0, Tcmb0, Neff, m_nu, name=name) + # Do some twiddling after the fact to get flatness + self._Ode0 = 1.0 - self._Om0 - self._Ogamma0 - self._Onu0 + self._Ok0 = 0.0 + self._w0 = float(w0) + self._wa = float(wa) + + def __repr__(self): + retstr = "{0}H0={1:.3g}, Om0={2:.3g}, "\ + "w0={3:.3g}, Tcmb0={4:.4g}, Neff={5:.3g}, m_nu={6})" + return retstr.format(self._namelead(), self._H0, self._Om0, self._w0, + self._Tcmb0, self._Neff, self.m_nu) + + +class wpwaCDM(FLRW): + """FLRW cosmology with a CPL dark energy equation of state, a pivot + redshift, and curvature. + + The equation for the dark energy equation of state uses the + CPL form as described in Chevallier & Polarski Int. J. Mod. Phys. + D10, 213 (2001) and Linder PRL 90, 91301 (2003), but modified + to have a pivot redshift as in the findings of the Dark Energy + Task Force (Albrecht et al. arXiv:0901.0721 (2009)): + :math:`w(a) = w_p + w_a (a_p - a) = w_p + w_a( 1/(1+zp) - 1/(1+z) )`. + + Parameters + ---------- + + H0 : float or `~astropy.units.Quantity` + Hubble constant at z = 0. If a float, must be in [km/sec/Mpc] + + Om0 : float + Omega matter: density of non-relativistic matter in units of the + critical density at z=0. + + Ode0 : float + Omega dark energy: density of dark energy in units of the critical + density at z=0. + + wp : float + Dark energy equation of state at the pivot redshift zp. This is + pressure/density for dark energy in units where c=1. + + wa : float + Negative derivative of the dark energy equation of state with respect + to the scale factor. A cosmological constant has w0=-1.0 and wa=0.0. + + zp : float + Pivot redshift -- the redshift where w(z) = wp + + Tcmb0 : float or `~astropy.units.Quantity` + Temperature of the CMB z=0. If a float, must be in [K]. Default: 2.725. + + Neff : float + Effective number of Neutrino species. Default 3.04. + + m_nu : `~astropy.units.Quantity` + Mass of each neutrino species. If this is a scalar Quantity, then all + neutrino species are assumed to have that mass. Otherwise, the mass of + each species. The actual number of neutrino species (and hence the + number of elements of m_nu if it is not scalar) must be the floor of + Neff. Usually this means you must provide three neutrino masses unless + you are considering something like a sterile neutrino. + + name : str + Optional name for this cosmological object. + + Examples + -------- + >>> from astropy.cosmology import wpwaCDM + >>> cosmo = wpwaCDM(H0=70, Om0=0.3, Ode0=0.7, wp=-0.9, wa=0.2, zp=0.4) + + The comoving distance in Mpc at redshift z: + + >>> z = 0.5 + >>> dc = cosmo.comoving_distance(z) + """ + + def __init__(self, H0, Om0, Ode0, wp=-1., wa=0., zp=0, + Tcmb0=2.725, Neff=3.04, m_nu=u.Quantity(0.0, u.eV), + name=None): + + FLRW.__init__(self, H0, Om0, Ode0, Tcmb0, Neff, m_nu, name=name) + self._wp = float(wp) + self._wa = float(wa) + self._zp = float(zp) + + @property + def wp(self): + """ Dark energy equation of state at the pivot redshift zp""" + return self._wp + + @property + def wa(self): + """ Negative derivative of dark energy equation of state w.r.t. a""" + return self._wa + + @property + def zp(self): + """ The pivot redshift, where w(z) = wp""" + return self._zp + + def w(self, z): + """Returns dark energy equation of state at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + w : ndarray, or float if input scalar + The dark energy equation of state + + Notes + ------ + The dark energy equation of state is defined as + :math:`w(z) = P(z)/\\rho(z)`, where :math:`P(z)` is the + pressure at redshift z and :math:`\\rho(z)` is the density + at redshift z, both in units where c=1. Here this is + :math:`w(z) = w_p + w_a (a_p - a)` where :math:`a = 1/1+z` + and :math:`a_p = 1 / 1 + z_p`. + """ + + if isiterable(z): + z = np.asarray(z) + + apiv = 1.0 / (1.0 + self._zp) + return self._wp + self._wa * (apiv - 1.0 / (1. + z)) + + def de_density_scale(self, z): + """ Evaluates the redshift dependence of the dark energy density. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + I : ndarray, or float if input scalar + The scaling of the energy density of dark energy with redshift. + + Notes + ----- + The scaling factor, I, is defined by :math:`\\rho(z) = \\rho_0 I`, + and in this case is given by + + .. math:: + + a_p = \\frac{1}{1 + z_p} + + I = \\left(1 + z\\right)^{3 \\left(1 + w_p + a_p w_a\\right)} + \exp \\left(-3 w_a \\frac{z}{1+z}\\right) + """ + + if isiterable(z): + z = np.asarray(z) + zp1 = 1. + z + apiv = 1. / (1. + self._zp) + return zp1 ** (3. * (1. + self._wp + apiv * self._wa)) * \ + np.exp(-3. * self._wa * z / zp1) + + def __repr__(self): + retstr = "{0}H0={1:.3g}, Om0={2:.3g}, Ode0={3:.3g}, wp={4:.3g}, "\ + "wa={5:.3g}, zp={6:.3g}, Tcmb0={7:.4g}, Neff={8:.3g}, "\ + "m_nu={9})" + return retstr.format(self._namelead(), self._H0, self._Om0, + self._Ode0, self._wp, self._wa, self._zp, + self._Tcmb0, self._Neff, self.m_nu) + + +class w0wzCDM(FLRW): + """FLRW cosmology with a variable dark energy equation of state + and curvature. + + The equation for the dark energy equation of state uses the + simple form: :math:`w(z) = w_0 + w_z z`. + + This form is not recommended for z > 1. + + Parameters + ---------- + + H0 : float or `~astropy.units.Quantity` + Hubble constant at z = 0. If a float, must be in [km/sec/Mpc] + + Om0 : float + Omega matter: density of non-relativistic matter in units of the + critical density at z=0. + + Ode0 : float + Omega dark energy: density of dark energy in units of the critical + density at z=0. + + w0 : float + Dark energy equation of state at z=0. This is pressure/density for dark + energy in units where c=1. A cosmological constant has w0=-1.0. + + wz : float + Derivative of the dark energy equation of state with respect to z. + + Tcmb0 : float or `~astropy.units.Quantity` + Temperature of the CMB z=0. If a float, must be in [K]. Default: 2.725. + + Neff : float + Effective number of Neutrino species. Default 3.04. + + m_nu : float or ndarray or `~astropy.units.Quantity` + Mass of each neutrino species, in eV. If this is a float or scalar + Quantity, then all neutrino species are assumed to have that mass. If a + ndarray or array Quantity, then these are the values of the mass of + each species. The actual number of neutrino species (and hence the + number of elements of m_nu if it is not scalar) must be the floor of + Neff. Usually this means you must provide three neutrino masses unless + you are considering something like a sterile neutrino. + + name : str + Optional name for this cosmological object. + + Examples + -------- + >>> from astropy.cosmology import w0wzCDM + >>> cosmo = w0wzCDM(H0=70, Om0=0.3, Ode0=0.7, w0=-0.9, wz=0.2) + + The comoving distance in Mpc at redshift z: + + >>> z = 0.5 + >>> dc = cosmo.comoving_distance(z) + """ + + def __init__(self, H0, Om0, Ode0, w0=-1., wz=0., Tcmb0=2.725, + Neff=3.04, m_nu=u.Quantity(0.0, u.eV), name=None): + + FLRW.__init__(self, H0, Om0, Ode0, Tcmb0, Neff, m_nu, name=name) + self._w0 = float(w0) + self._wz = float(wz) + + @property + def w0(self): + """ Dark energy equation of state at z=0""" + return self._w0 + + @property + def wz(self): + """ Derivative of the dark energy equation of state w.r.t. z""" + return self._wz + + def w(self, z): + """Returns dark energy equation of state at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + w : ndarray, or float if input scalar + The dark energy equation of state + + Notes + ------ + The dark energy equation of state is defined as + :math:`w(z) = P(z)/\\rho(z)`, where :math:`P(z)` is the + pressure at redshift z and :math:`\\rho(z)` is the density + at redshift z, both in units where c=1. Here this is given by + :math:`w(z) = w_0 + w_z z`. + """ + + if isiterable(z): + z = np.asarray(z) + + return self._w0 + self._wz * z + + def de_density_scale(self, z): + """ Evaluates the redshift dependence of the dark energy density. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + I : ndarray, or float if input scalar + The scaling of the energy density of dark energy with redshift. + + Notes + ----- + The scaling factor, I, is defined by :math:`\\rho(z) = \\rho_0 I`, + and in this case is given by + + .. math:: + + I = \\left(1 + z\\right)^{3 \\left(1 + w_0 - w_z\\right)} + \exp \\left(-3 w_z z\\right) + """ + + if isiterable(z): + z = np.asarray(z) + zp1 = 1. + z + return zp1 ** (3. * (1. + self._w0 - self._wz)) *\ + np.exp(-3. * self._wz * z) + + def __repr__(self): + retstr = "{0}H0={1:.3g}, Om0={2:.3g}, "\ + "Ode0={3:.3g}, w0={4:.3g}, wz={5:.3g} Tcmb0={6:.4g}, "\ + "Neff={7:.3g}, m_nu={8})" + return retstr.format(self._namelead(), self._H0, self._Om0, + self._Ode0, self._w0, self._wz, self._Tcmb0, + self._Neff, self.m_nu) + +# Pre-defined cosmologies. This loops over the parameter sets in the +# parameters module and creates a LambdaCDM or FlatLambdaCDM instance +# with the same name as the parameter set in the current module's namespace. +# Note this assumes all the cosmologies in parameters are LambdaCDM, +# which is true at least as of this writing. + +for key in parameters.available: + par = getattr(parameters, key) + if par['flat']: + cosmo = FlatLambdaCDM(par['H0'], par['Om0'], Tcmb0=par['Tcmb0'], + Neff=par['Neff'], + m_nu=u.Quantity(par['m_nu'], u.eV), + name=key) + docstr = "%s instance of FlatLambdaCDM cosmology\n\n(from %s)" + cosmo.__doc__ = docstr % (key, par['reference']) + else: + cosmo = LambdaCDM(par['H0'], par['Om0'], par['Ode0'], + Tcmb0=par['Tcmb0'], Neff=par['Neff'], + m_nu=u.Quantity(par['m_nu'], u.eV), name=key) + docstr = "%s instance of LambdaCDM cosmology\n\n(from %s)" + cosmo.__doc__ = docstr % (key, par['reference']) + setattr(sys.modules[__name__], key, cosmo) + +# don't leave these variables floating around in the namespace +del key, par, cosmo + +######################################################################### +# The science state below contains the current cosmology. +######################################################################### + + +class default_cosmology(ScienceState): + """ + The default cosmology to use. To change it:: + + >>> from astropy.cosmology import default_cosmology, WMAP7 + >>> with default_cosmology.set(WMAP7): + ... # WMAP7 cosmology in effect + + Or, you may use a string:: + + >>> with default_cosmology.set('WMAP7'): + ... # WMAP7 cosmology in effect + """ + _value = 'WMAP9' + + @staticmethod + def get_cosmology_from_string(arg): + """ Return a cosmology instance from a string. + """ + if arg == 'no_default': + cosmo = None + else: + try: + cosmo = getattr(sys.modules[__name__], arg) + except AttributeError: + s = "Unknown cosmology '%s'. Valid cosmologies:\n%s" % ( + arg, parameters.available) + raise ValueError(s) + return cosmo + + @classmethod + def validate(cls, value): + if value is None: + value = 'WMAP9' + if isinstance(value, six.string_types): + return cls.get_cosmology_from_string(value) + elif isinstance(value, Cosmology): + return value + else: + raise TypeError("default_cosmology must be a string or Cosmology instance.") + + +@deprecated('0.4', alternative='astropy.cosmology.default_cosmology.get_cosmology_from_string') +def get_cosmology_from_string(arg): + """ Return a cosmology instance from a string. + """ + return default_cosmology.get_cosmology_from_string(arg) + + +@deprecated('0.4', alternative='astropy.cosmology.default_cosmology.get') +def get_current(): + """ Get the current cosmology. + + If no current has been set, the WMAP9 comology is returned and a + warning is given. + + Returns + ------- + cosmo : ``Cosmology`` instance + """ + return default_cosmology.get() + + +@deprecated('0.4', alternative='astropy.cosmology.default_cosmology.set') +def set_current(cosmo): + """ Set the current cosmology. + + Call this with an empty string ('') to get a list of the strings + that map to available pre-defined cosmologies. + + Parameters + ---------- + cosmo : str or ``Cosmology`` instance + The cosmology to use. + """ + return default_cosmology.set(cosmo) + + +DEFAULT_COSMOLOGY = ScienceStateAlias( + '0.4', 'DEFAULT_COSMOLOGY', 'default_cosmology', default_cosmology) diff --git a/astropy/cosmology/funcs.py b/astropy/cosmology/funcs.py new file mode 100644 index 0000000..d7f12a6 --- /dev/null +++ b/astropy/cosmology/funcs.py @@ -0,0 +1,417 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" +Convenience functions for `astropy.cosmology`. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import warnings +import numpy as np + +from .core import default_cosmology as _default_cosmology +from .core import CosmologyError +from ..units import Quantity +from ..utils import deprecated + +__all__ = ['H', 'angular_diameter_distance', 'arcsec_per_kpc_comoving', + 'arcsec_per_kpc_proper', 'comoving_distance', 'critical_density', + 'distmod', 'kpc_comoving_per_arcmin', 'kpc_proper_per_arcmin', + 'lookback_time', 'luminosity_distance', 'scale_factor', + 'z_at_value'] + +__doctest_requires__ = {'*': ['scipy.integrate']} + + +def z_at_value(func, fval, zmin=0, zmax=1000, ztol=1e-5, maxfun=500): + """ Find the redshift ``z`` at which ``func(z) = fval``. + + This finds the redshift at which one of the cosmology functions or + methods (for example Planck13.distmod) is equal to a known value. + + .. warning:: + Make sure you understand the behaviour of the function that you + are trying to invert! Depending on the cosmology, there may not + be a unique solution. For example, in the standard Lambda CDM + cosmology, there are two redshifts which give an angular + diameter distance of 1500 Mpc, z ~ 0.7 and z ~ 3.8. To force + ``z_at_value`` to find the solution you are interested in, use the + ``zmin`` and ``zmax`` keywords to limit the search range (see the + example below). + + Parameters + ---------- + func : function or method + A function that takes a redshift as input. + fval : astropy.Quantity instance + The value of ``func(z)``. + zmin : float, optional + The lower search limit for ``z`` (default 0). + zmax : float, optional + The upper search limit for ``z`` (default 1000). + ztol : float, optional + The relative error in ``z`` acceptable for convergence. + maxfun : int, optional + The maximum number of function evaluations allowed in the + optimization routine (default 500). + + Returns + ------- + z : float + The redshift ``z`` satisfying ``zmin < z < zmax`` and ``func(z) = + fval`` within ``ztol``. + + Notes + ----- + This works for any arbitrary input cosmology, but is inefficient + if you want to invert a large number of values for the same + cosmology. In this case, it is faster to instead generate an array + of values at many closely-spaced redshifts that cover the relevant + redshift range, and then use interpolation to find the redshift at + each value you're interested in. For example, to efficiently find + the redshifts corresponding to 10^6 values of the distance modulus + in a Planck13 cosmology, you could do the following: + + >>> import astropy.units as u + >>> from astropy.cosmology import Planck13, z_at_value + + Generate 10^6 distance moduli between 23 and 43 for which we + want to find the corresponding redshifts: + + >>> Dvals = (23 + np.random.rand(1e6) * 20) * u.mag + + Make a grid of distance moduli covering the redshift range we + need using 50 equally log-spaced values between zmin and + zmax. We use log spacing to adequately sample the steep part of + the curve at low distance moduli: + + >>> zmin = z_at_value(Planck13.distmod, Dvals.min()) + >>> zmax = z_at_value(Planck13.distmod, Dvals.max()) + >>> zgrid = np.logspace(zmin, zmax) + >>> Dgrid = Planck13.distmod(zgrid) + + Finally interpolate to find the redshift at each distance modulus: + + >>> zvals = np.interp(Dvals.value, zgrid, Dgrid.value) + + Examples + -------- + >>> import astropy.units as u + >>> from astropy.cosmology import Planck13, z_at_value + + The age and lookback time are monotonic with redshift, and so a + unique solution can be found: + + >>> z_at_value(Planck13.age, 2 * u.Gyr) + 3.1981191749374629 + + The angular diameter is not monotonic however, and there are two + redshifts that give a value of 1500 Mpc. Use the zmin and zmax keywords + to find the one you're interested in: + + >>> z_at_value(Planck13.angular_diameter_distance, 1500 * u.Mpc, zmax=1.5) + 0.68127769625288614 + >>> z_at_value(Planck13.angular_diameter_distance, 1500 * u.Mpc, zmin=2.5) + 3.7914918534022011 + + Also note that the luminosity distance and distance modulus (two + other commonly inverted quantities) are monotonic in flat and open + universes, but not in closed universes. + """ + from scipy.optimize import fminbound + + fval_zmin = func(zmin) + fval_zmax = func(zmax) + if np.sign(fval - fval_zmin) != np.sign(fval_zmax - fval): + warnings.warn("""\ +fval is not bracketed by func(zmin) and func(zmax). This means either +there is no solution, or that there is more than one solution between +zmin and zmax satisfying fval = func(z).""") + + if isinstance(fval_zmin, Quantity): + unit = fval_zmin.unit + val = fval.to(unit).value + f = lambda z: abs(func(z).value - val) + else: + f = lambda z: abs(func(z) - fval) + + zbest, resval, ierr, ncall = fminbound(f, zmin, zmax, maxfun=maxfun, + full_output=1) + + if ierr != 0: + warnings.warn('Maximum number of function calls ({}) reached'.format( + ncall)) + + if np.allclose(zbest, zmax): + raise CosmologyError("Best guess z is very close the upper z limit.\n" + "Try re-running with a different zmax.") + elif np.allclose(zbest, zmin): + raise CosmologyError("Best guess z is very close the lower z limit.\n" + "Try re-running with a different zmin.") + + return zbest + + +@deprecated(since='0.4', alternative='.kpc_comoving_per_arcmin') +def kpc_comoving_per_arcmin(z, cosmo=None): + """ Separation in transverse comoving kpc corresponding to an + arcminute at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + d : `~astropy.units.Quantity` + The distance in comoving kpc corresponding to an arcmin at each + input redshift. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.kpc_comoving_per_arcmin(z) + + +@deprecated(since='0.4', alternative='.kpc_proper_per_arcmin') +def kpc_proper_per_arcmin(z, cosmo=None): + """ Separation in transverse proper kpc corresponding to an + arcminute at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + d : `~astropy.units.Quantity` + The distance in proper kpc corresponding to an arcmin at each + input redshift. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.kpc_proper_per_arcmin(z) + + +@deprecated(since='0.4', alternative='.arcsec_per_kpc_comoving') +def arcsec_per_kpc_comoving(z, cosmo=None): + """ Angular separation in arcsec corresponding to a comoving kpc + at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + theta : `~astropy.units.Quantity` + The angular separation in arcsec corresponding to a comoving kpc + at each input redshift. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.arcsec_per_kpc_comoving(z) + + +@deprecated(since='0.4', alternative='.arcsec_per_kpc_proper') +def arcsec_per_kpc_proper(z, cosmo=None): + """ Angular separation in arcsec corresponding to a proper kpc at + redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + theta : `~astropy.units.Quantity` + The angular separation in arcsec corresponding to a proper kpc + at each input redshift. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.arcsec_per_kpc_proper(z) + + +@deprecated(since='0.4', alternative='.distmod') +def distmod(z, cosmo=None): + """ Distance modulus at redshift ``z``. + + The distance modulus is defined as the (apparent magnitude - + absolute magnitude) for an object at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + distmod : `~astropy.units.Quantity` + Distance modulus at each input redshift. + + See Also + -------- + z_at_value : Find the redshift corresponding to a distance modulus. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.distmod(z) + + +@deprecated(since='0.4', alternative='.H') +def H(z, cosmo=None): + """ Hubble parameter (km/s/Mpc) at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + H : `~astropy.units.Quantity` + Hubble parameter at each input redshift. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.H(z) + + +@deprecated(since='0.4', alternative='.scale_factor') +def scale_factor(z, cosmo=None): + """ Scale factor at redshift ``z``. + + The scale factor is defined as ``a = 1 / (1 + z)``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + scalefac : ndarray, or float if input scalar + Scale factor at each input redshift. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.scale_factor(z) + + +@deprecated(since='0.4', alternative='.critical_density') +def critical_density(z, cosmo=None): + """ Critical density in grams per cubic cm at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + critdens : `~astropy.units.Quantity` + Critical density at each input redshift. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.critical_density(z) + + +@deprecated(since='0.4', alternative='.lookback_time') +def lookback_time(z, cosmo=None): + """ Lookback time in Gyr to redshift ``z``. + + The lookback time is the difference between the age of the + Universe now and the age at redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + t : `~astropy.units.Quantity` + Lookback time at each input redshift. + + See Also + -------- + z_at_value : Find the redshift corresponding to a lookback time. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.lookback_time(z) + + +@deprecated(since='0.4', alternative='.comoving_distance') +def comoving_distance(z, cosmo=None): + """ Comoving distance in Mpc at redshift ``z``. + + The comoving distance along the line-of-sight between two objects + remains constant with time for objects in the Hubble flow. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + codist : `~astropy.units.Quantity` + Comoving distance at each input redshift. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.comoving_distance(z) + + +@deprecated(since='0.4', alternative='.angular_diameter_distance') +def angular_diameter_distance(z, cosmo=None): + """ Angular diameter distance in Mpc at a given redshift. + + This gives the proper (sometimes called 'physical') transverse + distance corresponding to an angle of 1 radian for an object at + redshift ``z``. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + angdist : `~astropy.units.Quantity` + Angular diameter distance at each input redshift. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.angular_diameter_distance(z) + + +@deprecated(since='0.4', alternative='.luminosity_distance') +def luminosity_distance(z, cosmo=None): + """ Luminosity distance in Mpc at redshift ``z``. + + This is the distance to use when converting between the bolometric + flux from an object at redshift ``z`` and its bolometric luminosity. + + Parameters + ---------- + z : array_like + Input redshifts. + + Returns + ------- + lumdist : `~astropy.units.Quantity` + Luminosity distance at each input redshift. + + See Also + -------- + z_at_value : Find the redshift corresponding to a luminosity distance. + """ + if cosmo is None: + cosmo = _default_cosmology.get() + return cosmo.luminosity_distance(z) diff --git a/astropy/cosmology/parameters.py b/astropy/cosmology/parameters.py new file mode 100644 index 0000000..9d8948e --- /dev/null +++ b/astropy/cosmology/parameters.py @@ -0,0 +1,130 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" This module contains dictionaries with sets of parameters for a +given cosmology. + +Each cosmology has the following parameters defined: + + ========== ===================================== + Oc0 Omega cold dark matter at z=0 + Ob0 Omega baryon at z=0 + Om0 Omega matter at z=0 + flat Is this assumed flat? If not, Ode0 must be specifiec + Ode0 Omega dark energy at z=0 if flat is False + H0 Hubble parameter at z=0 in km/s/Mpc + n Density perturbation spectral index + Tcmb0 Current temperature of the CMB + Neff Effective number of neutrino species + sigma8 Density perturbation amplitude + tau Ionisation optical depth + z_reion Redshift of hydrogen reionisation + t0 Age of the universe in Gyr + reference Reference for the parameters + ========== ===================================== + +The list of cosmologies available are given by the tuple +`available`. Current cosmologies available: + +Planck13 parameters from Planck Collaboration 2013, arXiv:1303.5076 + (Paper XVI), Table 5 (Planck + WP + highL + BAO) + +WMAP 9 year parameters from Hinshaw et al. 2013, ApJS, 208, 19, +doi: 10.1088/0067-0049/208/2/19. Table 4 (WMAP9 + eCMB + BAO + H0) + +WMAP 7 year parameters from Komatsu et al. 2011, ApJS, 192, 18, +doi: 10.1088/0067-0049/192/2/18. Table 1 (WMAP + BAO + H0 ML). + +WMAP 5 year parameters from Komatsu et al. 2009, ApJS, 180, 330, +doi: 10.1088/0067-0049/180/2/330. Table 1 (WMAP + BAO + SN ML). + +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) +# delete these things from the namespace so we can automatically find +# all of the parameter dictionaries below. +del absolute_import +del division +del print_function +del unicode_literals + +# Note: if you add a new cosmology, please also update the table +# in the 'Built-in Cosmologies' section of astropy/docs/cosmology/index.rst +# in addition to the list above. + +# Ade et al. Planck 2013 paper XVI Table 5 penultimate column (best fit) +Planck13 = dict( + Oc0=0.25886, + Ob0=0.048252, + Om0=0.30712, + H0=67.77, + n=0.9611, + sigma8=0.8288, + tau=0.0952, + z_reion=11.52, + t0=13.7965, + Tcmb0=2.7255, + Neff=3.046, + flat=True, + m_nu=[0., 0., 0.06], + reference=("Planck Collaboration 2013, Paper XVI, arXiv:1303.5076" + " Table 5 (Planck + WP + highL + BAO)") +) + + +WMAP9 = dict( + Oc0=0.2402, + Ob0=0.04628, + Om0=0.2865, + H0=69.32, + n=0.9608, + sigma8=0.820, + tau=0.081, + z_reion=10.1, + t0=13.772, + Tcmb0=2.725, + Neff=3.04, + m_nu=0.0, + flat=True, + reference=("Hinshaw et al. 2013, ApJS, 208, 19, " + "doi: 10.1088/0067-0049/208/2/19. " + "Table 4 (WMAP9 + eCMB + BAO + H0, last column)") +) + +WMAP7 = dict( + Oc0=0.226, + Ob0=0.0455, + Om0=0.272, + H0=70.4, + n=0.967, + sigma8=0.810, + tau=0.085, + z_reion=10.3, + t0=13.76, + Tcmb0=2.725, + Neff=3.04, + m_nu=0.0, + flat=True, + reference=("Komatsu et al. 2011, ApJS, 192, 18, " + "doi: 10.1088/0067-0049/192/2/18. " + "Table 1 (WMAP + BAO + H0 ML).") +) + +WMAP5 = dict( + Oc0=0.231, + Ob0=0.0459, + Om0=0.277, + H0=70.2, + n=0.962, + sigma8=0.817, + tau=0.088, + z_reion=11.3, + t0=13.72, + Tcmb0=2.725, + Neff=3.04, + m_nu=0.0, + flat=True, + reference=("Komatsu et al. 2009, ApJS, 180, 330, " + "doi: 10.1088/0067-0049/180/2/330. " + "Table 1 (WMAP + BAO + SN ML).") +) + +available = tuple(k for k in locals() if not k.startswith('_')) diff --git a/astropy/cosmology/setup_package.py b/astropy/cosmology/setup_package.py new file mode 100644 index 0000000..3cd9f7c --- /dev/null +++ b/astropy/cosmology/setup_package.py @@ -0,0 +1,5 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + + +def requires_2to3(): + return False diff --git a/astropy/cosmology/tests/__init__.py b/astropy/cosmology/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/astropy/cosmology/tests/test_cosmology.py b/astropy/cosmology/tests/test_cosmology.py new file mode 100644 index 0000000..c976010 --- /dev/null +++ b/astropy/cosmology/tests/test_cosmology.py @@ -0,0 +1,1086 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from io import StringIO + +import numpy as np + +from .. import core, funcs +from ...tests.helper import pytest +from ... import units as u + +try: + import scipy # pylint: disable=W0611 +except ImportError: + HAS_SCIPY = False +else: + HAS_SCIPY = True + + +def setup_function(function): + # Make sure that tests don't affect default cosmology + core.set_current('no_default') + + +def teardown_function(function): + # Make sure that tests don't affect default cosmology + core.set_current('no_default') + + +def test_init(): + """ Tests to make sure the code refuses inputs it is supposed to""" + with pytest.raises(ValueError): + cosmo = core.FlatLambdaCDM(H0=70, Om0=-0.27) + with pytest.raises(ValueError): + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.27, Neff=-1) + with pytest.raises(ValueError): + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.27, + Tcmb0=u.Quantity([0.0, 2], u.K)) + with pytest.raises(ValueError): + h0bad = u.Quantity([70, 100], u.km / u.s / u.Mpc) + cosmo = core.FlatLambdaCDM(H0=h0bad, Om0=0.27) + with pytest.raises(ValueError): + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.2, m_nu=0.5) + with pytest.raises(ValueError): + bad_mnu = u.Quantity([-0.3, 0.2, 0.1], u.eV) + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.2, m_nu=bad_mnu) + with pytest.raises(ValueError): + bad_mnu = u.Quantity([-0.3, 0.2], u.eV) # 2, expecting 3 + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.2, m_nu=bad_mnu) + + +def test_basic(): + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.27, Tcmb0=2.0, Neff=3.04) + assert np.allclose(cosmo.Om0, 0.27) + assert np.allclose(cosmo.Ode0, 0.729975, rtol=1e-4) + # This next test will fail if astropy.const starts returning non-mks + # units by default; see the comment at the top of core.py + assert np.allclose(cosmo.Ogamma0, 1.463285e-5, rtol=1e-4) + assert np.allclose(cosmo.Onu0, 1.01026e-5, rtol=1e-4) + assert np.allclose(cosmo.Ok0, 0.0) + assert np.allclose(cosmo.Om0 + cosmo.Ode0 + cosmo.Ogamma0 + cosmo.Onu0, + 1.0, rtol=1e-6) + assert np.allclose(cosmo.Om(1) + cosmo.Ode(1) + cosmo.Ogamma(1) + + cosmo.Onu(1), 1.0, rtol=1e-6) + assert np.allclose(cosmo.Tcmb0.value, 2.0) + assert np.allclose(cosmo.Tnu0.value, 1.4275317, rtol=1e-5) + assert np.allclose(cosmo.Neff, 3.04) + assert np.allclose(cosmo.h, 0.7) + assert np.allclose(cosmo.H0.value, 70.0) + + # Make sure setting them as quantities gives the same results + H0 = u.Quantity(70, u.km / (u.s * u.Mpc)) + T = u.Quantity(2.0, u.K) + cosmo = core.FlatLambdaCDM(H0=H0, Om0=0.27, Tcmb0=T, Neff=3.04) + assert np.allclose(cosmo.Om0, 0.27) + assert np.allclose(cosmo.Ode0, 0.729975, rtol=1e-4) + assert np.allclose(cosmo.Ogamma0, 1.463285e-5, rtol=1e-4) + assert np.allclose(cosmo.Onu0, 1.01026e-5, rtol=1e-4) + assert np.allclose(cosmo.Ok0, 0.0) + assert np.allclose(cosmo.Om0 + cosmo.Ode0 + cosmo.Ogamma0 + cosmo.Onu0, + 1.0, rtol=1e-6) + assert np.allclose(cosmo.Om(1) + cosmo.Ode(1) + cosmo.Ogamma(1) + + cosmo.Onu(1), 1.0, rtol=1e-6) + assert np.allclose(cosmo.Tcmb0.value, 2.0) + assert np.allclose(cosmo.Tnu0.value, 1.4275317, rtol=1e-5) + assert np.allclose(cosmo.Neff, 3.04) + assert np.allclose(cosmo.h, 0.7) + assert np.allclose(cosmo.H0.value, 70.0) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_units(): + """ Test if the right units are being returned""" + + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.27, Tcmb0=2.0) + assert cosmo.comoving_distance(1.0).unit == u.Mpc + assert cosmo.angular_diameter_distance(1.0).unit == u.Mpc + assert cosmo.angular_diameter_distance_z1z2(1.0, 2.0).unit == u.Mpc + assert cosmo.comoving_distance(1.0).unit == u.Mpc + assert cosmo.luminosity_distance(1.0).unit == u.Mpc + assert cosmo.lookback_time(1.0).unit == u.Gyr + assert cosmo.H0.unit == u.km / u.Mpc / u.s + assert cosmo.H(1.0).unit == u.km / u.Mpc / u.s + assert cosmo.Tcmb0.unit == u.K + assert cosmo.Tcmb(1.0).unit == u.K + assert cosmo.Tcmb([0.0, 1.0]).unit == u.K + assert cosmo.Tnu0.unit == u.K + assert cosmo.Tnu(1.0).unit == u.K + assert cosmo.Tnu([0.0, 1.0]).unit == u.K + assert cosmo.arcsec_per_kpc_comoving(1.0).unit == u.arcsec / u.kpc + assert cosmo.arcsec_per_kpc_proper(1.0).unit == u.arcsec / u.kpc + assert cosmo.kpc_comoving_per_arcmin(1.0).unit == u.kpc / u.arcmin + assert cosmo.kpc_proper_per_arcmin(1.0).unit == u.kpc / u.arcmin + assert cosmo.critical_density(1.0).unit == u.g / u.cm ** 3 + assert cosmo.comoving_volume(1.0).unit == u.Mpc ** 3 + assert cosmo.age(1.0).unit == u.Gyr + assert cosmo.distmod(1.0).unit == u.mag + + +def test_repr(): + """ Test string representation of built in classes""" + cosmo = core.LambdaCDM(70, 0.3, 0.5) + expected = 'LambdaCDM(H0=70 km / (Mpc s), Om0=0.3, '\ + 'Ode0=0.5, Tcmb0=2.725 K, Neff=3.04, m_nu=[ 0. 0. 0.] eV)' + assert str(cosmo) == expected + + cosmo = core.LambdaCDM(70, 0.3, 0.5, m_nu=u.Quantity(0.01, u.eV)) + expected = 'LambdaCDM(H0=70 km / (Mpc s), Om0=0.3, Ode0=0.5, '\ + 'Tcmb0=2.725 K, Neff=3.04, m_nu=[ 0.01 0.01 0.01] eV)' + assert str(cosmo) == expected + + cosmo = core.FlatLambdaCDM(50.0, 0.27) + expected = 'FlatLambdaCDM(H0=50 km / (Mpc s), Om0=0.27, '\ + 'Tcmb0=2.725 K, Neff=3.04, m_nu=[ 0. 0. 0.] eV)' + assert str(cosmo) == expected + + cosmo = core.wCDM(60.0, 0.27, 0.6, w0=-0.8, name='test1') + expected = 'wCDM(name="test1", H0=60 km / (Mpc s), Om0=0.27, '\ + 'Ode0=0.6, w0=-0.8, Tcmb0=2.725 K, Neff=3.04, '\ + 'm_nu=[ 0. 0. 0.] eV)' + assert str(cosmo) == expected + + cosmo = core.FlatwCDM(65.0, 0.27, w0=-0.6, name='test2') + expected = 'FlatwCDM(name="test2", H0=65 km / (Mpc s), Om0=0.27, '\ + 'w0=-0.6, Tcmb0=2.725 K, Neff=3.04, m_nu=[ 0. 0. 0.] eV)' + assert str(cosmo) == expected + + cosmo = core.w0waCDM(60.0, 0.25, 0.4, w0=-0.6, wa=0.1, name='test3') + expected = 'w0waCDM(name="test3", H0=60 km / (Mpc s), Om0=0.25, '\ + 'Ode0=0.4, w0=-0.6, wa=0.1, Tcmb0=2.725 K, Neff=3.04, '\ + 'm_nu=[ 0. 0. 0.] eV)' + assert str(cosmo) == expected + + cosmo = core.Flatw0waCDM(55.0, 0.35, w0=-0.9, wa=-0.2, name='test4') + expected = 'Flatw0waCDM(name="test4", H0=55 km / (Mpc s), Om0=0.35, '\ + 'w0=-0.9, Tcmb0=2.725 K, Neff=3.04, m_nu=[ 0. 0. 0.] eV)' + assert str(cosmo) == expected + + cosmo = core.wpwaCDM(50.0, 0.3, 0.3, wp=-0.9, wa=-0.2, + zp=0.3, name='test5') + expected = 'wpwaCDM(name="test5", H0=50 km / (Mpc s), Om0=0.3, '\ + 'Ode0=0.3, wp=-0.9, wa=-0.2, zp=0.3, Tcmb0=2.725 K, '\ + 'Neff=3.04, m_nu=[ 0. 0. 0.] eV)' + assert str(cosmo) == expected + + cosmo = core.w0wzCDM(55.0, 0.4, 0.8, w0=-1.05, wz=-0.2, + m_nu=u.Quantity([0.001, 0.01, 0.015], u.eV)) + expected = 'w0wzCDM(H0=55 km / (Mpc s), Om0=0.4, Ode0=0.8, w0=-1.05, '\ + 'wz=-0.2 Tcmb0=2.725 K, Neff=3.04, '\ + 'm_nu=[ 0.001 0.01 0.015] eV)' + assert str(cosmo) == expected + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_flat_z1(): + """ Test a flat cosmology at z=1 against several other on-line + calculators. + """ + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.27, Tcmb0=0.0) + z = 1 + + # Test values were taken from the following web cosmology + # calculators on 27th Feb 2012: + + # Wright: http://www.astro.ucla.edu/~wright/CosmoCalc.html + # (http://adsabs.harvard.edu/abs/2006PASP..118.1711W) + # Kempner: http://www.kempner.net/cosmic.php + # iCosmos: http://www.icosmos.co.uk/index.html + + # The order of values below is Wright, Kempner, iCosmos' + assert np.allclose(cosmo.comoving_distance(z).value, + [3364.5, 3364.8, 3364.7988], rtol=1e-4) + assert np.allclose(cosmo.angular_diameter_distance(z).value, + [1682.3, 1682.4, 1682.3994], rtol=1e-4) + assert np.allclose(cosmo.luminosity_distance(z).value, + [6729.2, 6729.6, 6729.5976], rtol=1e-4) + assert np.allclose(cosmo.lookback_time(z).value, + [7.841, 7.84178, 7.843], rtol=1e-3) + + +def test_zeroing(): + """ Tests if setting params to 0s always respects that""" + # Make sure Ode = 0 behaves that way + cosmo = core.LambdaCDM(H0=70, Om0=0.27, Ode0=0.0) + assert np.allclose(cosmo.Ode([0, 1, 2, 3]), [0, 0, 0, 0]) + # Ogamma0 + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.27, Tcmb0=0.0) + assert np.allclose(cosmo.Ogamma([0, 1, 2, 3]), [0, 0, 0, 0]) + + +# This class is to test whether the routines work correctly +# if one only overloads w(z) +class test_cos_sub(core.FLRW): + + def __init__(self): + core.FLRW.__init__(self, 70.0, 0.27, 0.73, Tcmb0=0.0, name="test_cos") + self._w0 = -0.9 + + def w(self, z): + return self._w0 * np.ones_like(z) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_de_subclass(): + # This is the comparison object + z = [0.2, 0.4, 0.6, 0.9] + cosmo = core.wCDM(H0=70, Om0=0.27, Ode0=0.73, w0=-0.9, Tcmb0=0.0) + # Values taken from Ned Wrights advanced cosmo calcluator, Aug 17 2012 + assert np.allclose(cosmo.luminosity_distance(z).value, + [975.5, 2158.2, 3507.3, 5773.1], rtol=1e-3) + # Now try the subclass that only gives w(z) + cosmo = test_cos_sub() + assert np.allclose(cosmo.luminosity_distance(z).value, + [975.5, 2158.2, 3507.3, 5773.1], rtol=1e-3) + # Test efunc + assert np.allclose(cosmo.efunc(1.0), 1.7489240754, rtol=1e-5) + assert np.allclose(cosmo.efunc([0.5, 1.0]), + [1.31744953, 1.7489240754], rtol=1e-5) + assert np.allclose(cosmo.inv_efunc([0.5, 1.0]), + [0.75904236, 0.57178011], rtol=1e-5) + # Test de_density_scale + assert np.allclose(cosmo.de_density_scale(1.0), 1.23114444, rtol=1e-4) + assert np.allclose(cosmo.de_density_scale([0.5, 1.0]), + [1.12934694, 1.23114444], rtol=1e-4) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_varyde_lumdist_mathematica(): + """Tests a few varying dark energy EOS models against a mathematica + computation""" + + # w0wa models + z = np.array([0.2, 0.4, 0.9, 1.2]) + cosmo = core.w0waCDM(H0=70, Om0=0.2, Ode0=0.8, w0=-1.1, wa=0.2, Tcmb0=0.0) + assert np.allclose(cosmo.w0, -1.1) + assert np.allclose(cosmo.wa, 0.2) + + assert np.allclose(cosmo.luminosity_distance(z).value, + [1004.0, 2268.62, 6265.76, 9061.84], rtol=1e-4) + assert np.allclose(cosmo.de_density_scale(0.0), 1.0, rtol=1e-5) + assert np.allclose(cosmo.de_density_scale([0.0, 0.5, 1.5]), + [1.0, 0.9246310669529021, 0.9184087000251957]) + + cosmo = core.w0waCDM(H0=70, Om0=0.3, Ode0=0.7, w0=-0.9, wa=0.0, Tcmb0=0.0) + assert np.allclose(cosmo.luminosity_distance(z).value, + [971.667, 2141.67, 5685.96, 8107.41], rtol=1e-4) + cosmo = core.w0waCDM(H0=70, Om0=0.3, Ode0=0.7, w0=-0.9, wa=-0.5, Tcmb0=0.0) + assert np.allclose(cosmo.luminosity_distance(z).value, + [974.087, 2157.08, 5783.92, 8274.08], rtol=1e-4) + + # wpwa models + cosmo = core.wpwaCDM(H0=70, Om0=0.2, Ode0=0.8, wp=-1.1, wa=0.2, zp=0.5, + Tcmb0=0.0) + assert np.allclose(cosmo.wp, -1.1) + assert np.allclose(cosmo.wa, 0.2) + assert np.allclose(cosmo.zp, 0.5) + assert np.allclose(cosmo.luminosity_distance(z).value, + [1010.81, 2294.45, 6369.45, 9218.95], rtol=1e-4) + + cosmo = core.wpwaCDM(H0=70, Om0=0.2, Ode0=0.8, wp=-1.1, wa=0.2, zp=0.9, + Tcmb0=0.0) + assert np.allclose(cosmo.wp, -1.1) + assert np.allclose(cosmo.wa, 0.2) + assert np.allclose(cosmo.zp, 0.9) + assert np.allclose(cosmo.luminosity_distance(z).value, + [1013.68, 2305.3, 6412.37, 9283.33], rtol=1e-4) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_omatter(): + # Test Om evolution + tcos = core.FlatLambdaCDM(70.0, 0.3) + assert np.allclose(tcos.Om0, 0.3) + assert np.allclose(tcos.H0.value, 70.0) + assert np.allclose(tcos.Om(0), 0.3) + z = np.array([0.0, 0.5, 1.0, 2.0]) + assert np.allclose(tcos.Om(z), [0.3, 0.59112134, 0.77387435, 0.91974179], + rtol=1e-4) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_ocurv(): + # Test Ok evolution + # Flat, boring case + tcos = core.FlatLambdaCDM(70.0, 0.3) + assert np.allclose(tcos.Ok0, 0.0) + assert np.allclose(tcos.Ok(0), 0.0) + z = np.array([0.0, 0.5, 1.0, 2.0]) + assert np.allclose(tcos.Ok(z), [0.0, 0.0, 0.0, 0.0], + rtol=1e-6) + + # Not flat + tcos = core.LambdaCDM(70.0, 0.3, 0.5, Tcmb0=u.Quantity(0.0, u.K)) + assert np.allclose(tcos.Ok0, 0.2) + assert np.allclose(tcos.Ok(0), 0.2) + assert np.allclose(tcos.Ok(z), [0.2, 0.22929936, 0.21621622, 0.17307692], + rtol=1e-4) + + # Test the sum; note that Ogamma/Onu are 0 + assert np.allclose(tcos.Ok(z) + tcos.Om(z) + tcos.Ode(z), + [1.0, 1.0, 1.0, 1.0], rtol=1e-5) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_ode(): + # Test Ode evolution, turn off neutrinos, cmb + tcos = core.FlatLambdaCDM(70.0, 0.3, Tcmb0=0) + assert np.allclose(tcos.Ode0, 0.7) + assert np.allclose(tcos.Ode(0), 0.7) + z = np.array([0.0, 0.5, 1.0, 2.0]) + assert np.allclose(tcos.Ode(z), [0.7, 0.408759, 0.2258065, 0.07954545], + rtol=1e-5) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_ogamma(): + """Tests the effects of changing the temperature of the CMB""" + + # Tested against Ned Wright's advanced cosmology calculator, + # Sep 7 2012. The accuracy of our comparision is limited by + # how many digits it outputs, which limits our test to about + # 0.2% accuracy. The NWACC does not allow one + # to change the number of nuetrino species, fixing that at 3. + # Also, inspection of the NWACC code shows it uses inaccurate + # constants at the 0.2% level (specifically, a_B), + # so we shouldn't expect to match it that well. The integral is + # also done rather crudely. Therefore, we should not expect + # the NWACC to be accurate to better than about 0.5%, which is + # unfortunate, but reflects a problem with it rather than this code. + # More accurate tests below using Mathematica + z = np.array([1.0, 10.0, 500.0, 1000.0]) + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.3, Tcmb0=0, Neff=3) + assert np.allclose(cosmo.angular_diameter_distance(z).value, + [1651.9, 858.2, 26.855, 13.642], rtol=5e-4) + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.3, Tcmb0=2.725, Neff=3) + assert np.allclose(cosmo.angular_diameter_distance(z).value, + [1651.8, 857.9, 26.767, 13.582], rtol=5e-4) + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.3, Tcmb0=4.0, Neff=3) + assert np.allclose(cosmo.angular_diameter_distance(z).value, + [1651.4, 856.6, 26.489, 13.405], rtol=5e-4) + + # Next compare with doing the integral numerically in Mathematica, + # which allows more precision in the test. It is at least as + # good as 0.01%, possibly better + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.3, Tcmb0=0, Neff=3.04) + assert np.allclose(cosmo.angular_diameter_distance(z).value, + [1651.91, 858.205, 26.8586, 13.6469], rtol=1e-5) + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.3, Tcmb0=2.725, Neff=3.04) + assert np.allclose(cosmo.angular_diameter_distance(z).value, + [1651.76, 857.817, 26.7688, 13.5841], rtol=1e-5) + cosmo = core.FlatLambdaCDM(H0=70, Om0=0.3, Tcmb0=4.0, Neff=3.04) + assert np.allclose(cosmo.angular_diameter_distance(z).value, + [1651.21, 856.411, 26.4845, 13.4028], rtol=1e-5) + + # Just to be really sure, we also do a version where the integral + # is analytic, which is a Ode = 0 flat universe. In this case + # Integrate(1/E(x),{x,0,z}) = 2 ( sqrt((1+Or z)/(1+z)) - 1 )/(Or - 1) + # Recall that c/H0 * Integrate(1/E) is FLRW.comoving_distance. + Ogamma0h2 = 4 * 5.670373e-8 / 299792458.0 ** 3 * 2.725 ** 4 / 1.87837e-26 + Onu0h2 = Ogamma0h2 * 7.0 / 8.0 * (4.0 / 11.0) ** (4.0 / 3.0) * 3.04 + Or0 = (Ogamma0h2 + Onu0h2) / 0.7 ** 2 + Om0 = 1.0 - Or0 + hubdis = 299792.458 / 70.0 + cosmo = core.FlatLambdaCDM(H0=70, Om0=Om0, Tcmb0=2.725, Neff=3.04) + targvals = 2.0 * hubdis * \ + (np.sqrt((1.0 + Or0 * z) / (1.0 + z)) - 1.0) / (Or0 - 1.0) + assert np.allclose(cosmo.comoving_distance(z).value, targvals, rtol=1e-5) + + # And integers for z + assert np.allclose(cosmo.comoving_distance(z.astype(np.int)).value, + targvals, rtol=1e-5) + + # Try Tcmb0 = 4 + Or0 *= (4.0 / 2.725) ** 4 + Om0 = 1.0 - Or0 + cosmo = core.FlatLambdaCDM(H0=70, Om0=Om0, Tcmb0=4.0, Neff=3.04) + targvals = 2.0 * hubdis * \ + (np.sqrt((1.0 + Or0 * z) / (1.0 + z)) - 1.0) / (Or0 - 1.0) + assert np.allclose(cosmo.comoving_distance(z).value, targvals, rtol=1e-5) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_tcmb(): + cosmo = core.FlatLambdaCDM(70.4, 0.272, Tcmb0=2.5) + assert np.allclose(cosmo.Tcmb0.value, 2.5) + assert np.allclose(cosmo.Tcmb(2).value, 7.5) + z = [0.0, 1.0, 2.0, 3.0, 9.0] + assert np.allclose(cosmo.Tcmb(z).value, + [2.5, 5.0, 7.5, 10.0, 25.0], rtol=1e-6) + # Make sure it's the same for integers + z = [0, 1, 2, 3, 9] + assert np.allclose(cosmo.Tcmb(z).value, + [2.5, 5.0, 7.5, 10.0, 25.0], rtol=1e-6) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_tnu(): + cosmo = core.FlatLambdaCDM(70.4, 0.272, Tcmb0=3.0) + assert np.allclose(cosmo.Tnu0.value, 2.1412975665108247, rtol=1e-6) + assert np.allclose(cosmo.Tnu(2).value, 6.423892699532474, rtol=1e-6) + z = [0.0, 1.0, 2.0, 3.0] + expected = [2.14129757, 4.28259513, 6.4238927, 8.56519027] + assert np.allclose(cosmo.Tnu(z), expected, rtol=1e-6) + + # Test for integers + z = [0, 1, 2, 3] + assert np.allclose(cosmo.Tnu(z), expected, rtol=1e-6) + + +def test_efunc_vs_invefunc(): + # Test that efunc and inv_efunc give the same values + z0 = 0.5 + z = np.array([0.5, 1.0, 2.0, 5.0]) + + # Below are the 'standard' included cosmologies + # We do the non-standard case in test_efunc_vs_invefunc_flrw, + # since it requires scipy + cosmo = core.LambdaCDM(70, 0.3, 0.5) + assert np.allclose(cosmo.efunc(z0), 1.0 / cosmo.inv_efunc(z0)) + assert np.allclose(cosmo.efunc(z), 1.0 / cosmo.inv_efunc(z)) + cosmo = core.LambdaCDM(70, 0.3, 0.5, m_nu=u.Quantity(0.01, u.eV)) + assert np.allclose(cosmo.efunc(z0), 1.0 / cosmo.inv_efunc(z0)) + assert np.allclose(cosmo.efunc(z), 1.0 / cosmo.inv_efunc(z)) + cosmo = core.FlatLambdaCDM(50.0, 0.27) + assert np.allclose(cosmo.efunc(z0), 1.0 / cosmo.inv_efunc(z0)) + assert np.allclose(cosmo.efunc(z), 1.0 / cosmo.inv_efunc(z)) + cosmo = core.wCDM(60.0, 0.27, 0.6, w0=-0.8) + assert np.allclose(cosmo.efunc(z0), 1.0 / cosmo.inv_efunc(z0)) + assert np.allclose(cosmo.efunc(z), 1.0 / cosmo.inv_efunc(z)) + cosmo = core.FlatwCDM(65.0, 0.27, w0=-0.6) + assert np.allclose(cosmo.efunc(z0), 1.0 / cosmo.inv_efunc(z0)) + assert np.allclose(cosmo.efunc(z), 1.0 / cosmo.inv_efunc(z)) + cosmo = core.w0waCDM(60.0, 0.25, 0.4, w0=-0.6, wa=0.1) + assert np.allclose(cosmo.efunc(z0), 1.0 / cosmo.inv_efunc(z0)) + assert np.allclose(cosmo.efunc(z), 1.0 / cosmo.inv_efunc(z)) + cosmo = core.Flatw0waCDM(55.0, 0.35, w0=-0.9, wa=-0.2) + assert np.allclose(cosmo.efunc(z0), 1.0 / cosmo.inv_efunc(z0)) + assert np.allclose(cosmo.efunc(z), 1.0 / cosmo.inv_efunc(z)) + cosmo = core.wpwaCDM(50.0, 0.3, 0.3, wp=-0.9, wa=-0.2, zp=0.3) + assert np.allclose(cosmo.efunc(z0), 1.0 / cosmo.inv_efunc(z0)) + assert np.allclose(cosmo.efunc(z), 1.0 / cosmo.inv_efunc(z)) + cosmo = core.w0wzCDM(55.0, 0.4, 0.8, w0=-1.05, wz=-0.2) + assert np.allclose(cosmo.efunc(z0), 1.0 / cosmo.inv_efunc(z0)) + assert np.allclose(cosmo.efunc(z), 1.0 / cosmo.inv_efunc(z)) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_efunc_vs_invefunc_flrw(): + z0 = 0.5 + z = np.array([0.5, 1.0, 2.0, 5.0]) + + # FLRW is abstract, so requires test_cos_sub defined earlier + # This requires scipy, unlike the built-ins + cosmo = test_cos_sub() + assert np.allclose(cosmo.efunc(z0), 1.0 / cosmo.inv_efunc(z0)) + assert np.allclose(cosmo.efunc(z), 1.0 / cosmo.inv_efunc(z)) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_kpc_methods(): + cosmo = core.FlatLambdaCDM(70.4, 0.272, Tcmb0=0.0) + assert np.allclose(cosmo.arcsec_per_kpc_comoving(3).value, 0.0317179) + assert np.allclose(cosmo.arcsec_per_kpc_proper(3).value, 0.1268716668) + assert np.allclose(cosmo.kpc_comoving_per_arcmin(3).value, 1891.6753126) + assert np.allclose(cosmo.kpc_proper_per_arcmin(3).value, 472.918828) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_convenience(): + # these are all for WMAP7 with Tcmb = 0 + tcos = core.FlatLambdaCDM(70.4, 0.272, Tcmb0=0.0) + core.set_current(tcos) + + # scalars + assert np.allclose(funcs.arcsec_per_kpc_comoving(3).value, 0.0317179) + assert funcs.arcsec_per_kpc_comoving(3).unit == u.arcsec / u.kpc + assert np.allclose(funcs.arcsec_per_kpc_proper(3).value, 0.1268716668) + assert funcs.arcsec_per_kpc_proper(3).unit == u.arcsec / u.kpc + assert np.allclose(funcs.kpc_comoving_per_arcmin(3).value, 1891.6753126) + assert funcs.kpc_comoving_per_arcmin(3).unit == u.kpc / u.arcmin + assert np.allclose(funcs.kpc_proper_per_arcmin(3).value, 472.918828) + assert funcs.kpc_proper_per_arcmin(3).unit == u.kpc / u.arcmin + assert np.allclose(funcs.distmod(3).value, 47.075902) + assert funcs.distmod(3).unit == u.mag + assert np.allclose(funcs.H(3).value, 299.80813491298068) + assert funcs.H(3).unit == u.km / (u.Mpc * u.s) + assert np.allclose(funcs.scale_factor(3), 0.25) + assert np.allclose(funcs.scale_factor([3, 4]), [0.25, 0.2]) + assert np.allclose(funcs.critical_density(3).value, 1.6884621680232328e-28) + assert funcs.critical_density(3).unit == u.g / u.cm ** 3 + assert np.allclose(funcs.lookback_time(3).value, 11.555469926558361) + assert funcs.lookback_time(3).unit == u.Gyr + assert np.allclose(funcs.lookback_time([3, 4]).value, + [11.555469927, 12.17718555], rtol=1e-5) + assert np.allclose(funcs.comoving_distance(3).value, 6503.100697385924) + assert funcs.comoving_distance(3).unit == u.Mpc + assert np.allclose(funcs.angular_diameter_distance(3).value, + 1625.775174346481) + assert funcs.angular_diameter_distance(3).unit == u.Mpc + assert np.allclose(funcs.luminosity_distance(3).value, 26012.402789543696) + assert funcs.luminosity_distance(3).unit == u.Mpc + + # arrays + assert np.allclose(funcs.arcsec_per_kpc_comoving([0.1, 0.5]).value, + [0.4946986, 0.10876163]) + assert np.allclose(funcs.arcsec_per_kpc_proper([0.1, 0.5]).value, + [0.54416846354697479, 0.16314245192751084]) + assert np.allclose(funcs.kpc_comoving_per_arcmin([0.1, 0.5]).value, + [121.2859701, 551.66511804]) + assert np.allclose(funcs.kpc_proper_per_arcmin([0.1, 0.5]).value, + [110.25997282, 367.77674536]) + assert np.allclose(funcs.distmod([0.1, 0.5]).value, + [38.30738567, 42.27020333]) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_comoving_volume(): + + c_flat = core.LambdaCDM(H0=70, Om0=0.27, Ode0=0.73, Tcmb0=0.0) + c_open = core.LambdaCDM(H0=70, Om0=0.27, Ode0=0.0, Tcmb0=0.0) + c_closed = core.LambdaCDM(H0=70, Om0=2, Ode0=0.0, Tcmb0=0.0) + + # test against ned wright's calculator (cubic Gpc) + redshifts = np.array([0.5, 1, 2, 3, 5, 9]) + wright_flat = np.array([29.123, 159.529, 630.427, 1178.531, 2181.485, + 3654.802]) * 1e9 # convert to Mpc**3 + wright_open = np.array([20.501, 99.019, 380.278, 747.049, 1558.363, + 3123.814]) * 1e9 + wright_closed = np.array([12.619, 44.708, 114.904, 173.709, 258.82, + 358.992]) * 1e9 + # The wright calculator isn't very accurate, so we use a rather + # modest precision + assert np.allclose(c_flat.comoving_volume(redshifts).value, wright_flat, + rtol=1e-2) + assert np.allclose(c_open.comoving_volume(redshifts).value, + wright_open, rtol=1e-2) + assert np.allclose(c_closed.comoving_volume(redshifts).value, + wright_closed, rtol=1e-2) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_differential_comoving_volume(): + from scipy.integrate import quad + + c_flat = core.LambdaCDM(H0=70, Om0=0.27, Ode0=0.73, Tcmb0=0.0) + c_open = core.LambdaCDM(H0=70, Om0=0.27, Ode0=0.0, Tcmb0=0.0) + c_closed = core.LambdaCDM(H0=70, Om0=2, Ode0=0.0, Tcmb0=0.0) + + # test that integration of differential_comoving_volume() + # yields same as comoving_volume() + redshifts = np.array([0.5, 1, 2, 3, 5, 9]) + wright_flat = np.array([29.123, 159.529, 630.427, 1178.531, 2181.485, + 3654.802]) * 1e9 # convert to Mpc**3 + wright_open = np.array([20.501, 99.019, 380.278, 747.049, 1558.363, + 3123.814]) * 1e9 + wright_closed = np.array([12.619, 44.708, 114.904, 173.709, 258.82, + 358.992]) * 1e9 + # The wright calculator isn't very accurate, so we use a rather + # modest precision. + ftemp = lambda x: c_flat.differential_comoving_volume(x).value + otemp = lambda x: c_open.differential_comoving_volume(x).value + ctemp = lambda x: c_closed.differential_comoving_volume(x).value + # Multiply by solid_angle (4 * pi) + assert np.allclose(np.array([4.0 * np.pi * quad(ftemp, 0, redshift)[0] + for redshift in redshifts]), + wright_flat, rtol=1e-2) + assert np.allclose(np.array([4.0 * np.pi * quad(otemp, 0, redshift)[0] + for redshift in redshifts]), + wright_open, rtol=1e-2) + assert np.allclose(np.array([4.0 * np.pi * quad(ctemp, 0, redshift)[0] + for redshift in redshifts]), + wright_closed, rtol=1e-2) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_flat_open_closed_icosmo(): + """ Test against the tabulated values generated from icosmo.org + with three example cosmologies (flat, open and closed). + """ + + cosmo_flat = """\ +# from icosmo (icosmo.org) +# Om 0.3 w -1 h 0.7 Ol 0.7 +# z comoving_transvers_dist angular_diameter_dist luminosity_dist + 0.0000000 0.0000000 0.0000000 0.0000000 + 0.16250000 669.77536 576.15085 778.61386 + 0.32500000 1285.5964 970.26143 1703.4152 + 0.50000000 1888.6254 1259.0836 2832.9381 + 0.66250000 2395.5489 1440.9317 3982.6000 + 0.82500000 2855.5732 1564.6976 5211.4210 + 1.0000000 3303.8288 1651.9144 6607.6577 + 1.1625000 3681.1867 1702.2829 7960.5663 + 1.3250000 4025.5229 1731.4077 9359.3408 + 1.5000000 4363.8558 1745.5423 10909.640 + 1.6625000 4651.4830 1747.0359 12384.573 + 1.8250000 4916.5970 1740.3883 13889.387 + 2.0000000 5179.8621 1726.6207 15539.586 + 2.1625000 5406.0204 1709.4136 17096.540 + 2.3250000 5616.5075 1689.1752 18674.888 + 2.5000000 5827.5418 1665.0120 20396.396 + 2.6625000 6010.4886 1641.0890 22013.414 + 2.8250000 6182.1688 1616.2533 23646.796 + 3.0000000 6355.6855 1588.9214 25422.742 + 3.1625000 6507.2491 1563.3031 27086.425 + 3.3250000 6650.4520 1537.6768 28763.205 + 3.5000000 6796.1499 1510.2555 30582.674 + 3.6625000 6924.2096 1485.0852 32284.127 + 3.8250000 7045.8876 1460.2876 33996.408 + 4.0000000 7170.3664 1434.0733 35851.832 + 4.1625000 7280.3423 1410.2358 37584.767 + 4.3250000 7385.3277 1386.9160 39326.870 + 4.5000000 7493.2222 1362.4040 41212.722 + 4.6625000 7588.9589 1340.2135 42972.480 +""" + + cosmo_open = """\ +# from icosmo (icosmo.org) +# Om 0.3 w -1 h 0.7 Ol 0.1 +# z comoving_transvers_dist angular_diameter_dist luminosity_dist + 0.0000000 0.0000000 0.0000000 0.0000000 + 0.16250000 643.08185 553.18868 747.58265 + 0.32500000 1200.9858 906.40441 1591.3062 + 0.50000000 1731.6262 1154.4175 2597.4393 + 0.66250000 2174.3252 1307.8648 3614.8157 + 0.82500000 2578.7616 1413.0201 4706.2399 + 1.0000000 2979.3460 1489.6730 5958.6920 + 1.1625000 3324.2002 1537.2024 7188.5829 + 1.3250000 3646.8432 1568.5347 8478.9104 + 1.5000000 3972.8407 1589.1363 9932.1017 + 1.6625000 4258.1131 1599.2913 11337.226 + 1.8250000 4528.5346 1603.0211 12793.110 + 2.0000000 4804.9314 1601.6438 14414.794 + 2.1625000 5049.2007 1596.5852 15968.097 + 2.3250000 5282.6693 1588.7727 17564.875 + 2.5000000 5523.0914 1578.0261 19330.820 + 2.6625000 5736.9813 1566.4113 21011.694 + 2.8250000 5942.5803 1553.6158 22730.370 + 3.0000000 6155.4289 1538.8572 24621.716 + 3.1625000 6345.6997 1524.4924 26413.975 + 3.3250000 6529.3655 1509.6799 28239.506 + 3.5000000 6720.2676 1493.3928 30241.204 + 3.6625000 6891.5474 1478.0799 32131.840 + 3.8250000 7057.4213 1462.6780 34052.058 + 4.0000000 7230.3723 1446.0745 36151.862 + 4.1625000 7385.9998 1430.7021 38130.224 + 4.3250000 7537.1112 1415.4199 40135.117 + 4.5000000 7695.0718 1399.1040 42322.895 + 4.6625000 7837.5510 1384.1150 44380.133 +""" + + cosmo_closed = """\ +# from icosmo (icosmo.org) +# Om 2 w -1 h 0.7 Ol 0.1 +# z comoving_transvers_dist angular_diameter_dist luminosity_dist + 0.0000000 0.0000000 0.0000000 0.0000000 + 0.16250000 601.80160 517.67879 699.59436 + 0.32500000 1057.9502 798.45297 1401.7840 + 0.50000000 1438.2161 958.81076 2157.3242 + 0.66250000 1718.6778 1033.7912 2857.3019 + 0.82500000 1948.2400 1067.5288 3555.5381 + 1.0000000 2152.7954 1076.3977 4305.5908 + 1.1625000 2312.3427 1069.2914 5000.4410 + 1.3250000 2448.9755 1053.3228 5693.8681 + 1.5000000 2575.6795 1030.2718 6439.1988 + 1.6625000 2677.9671 1005.8092 7130.0873 + 1.8250000 2768.1157 979.86398 7819.9270 + 2.0000000 2853.9222 951.30739 8561.7665 + 2.1625000 2924.8116 924.84161 9249.7167 + 2.3250000 2988.5333 898.80701 9936.8732 + 2.5000000 3050.3065 871.51614 10676.073 + 2.6625000 3102.1909 847.01459 11361.774 + 2.8250000 3149.5043 823.39982 12046.854 + 3.0000000 3195.9966 798.99915 12783.986 + 3.1625000 3235.5334 777.30533 13467.908 + 3.3250000 3271.9832 756.52790 14151.327 + 3.5000000 3308.1758 735.15017 14886.791 + 3.6625000 3339.2521 716.19347 15569.263 + 3.8250000 3368.1489 698.06195 16251.319 + 4.0000000 3397.0803 679.41605 16985.401 + 4.1625000 3422.1142 662.87926 17666.664 + 4.3250000 3445.5542 647.05243 18347.576 + 4.5000000 3469.1805 630.76008 19080.493 + 4.6625000 3489.7534 616.29199 19760.729 +""" + + redshifts, dm, da, dl = np.loadtxt(StringIO(cosmo_flat), unpack=1) + cosmo = core.LambdaCDM(H0=70, Om0=0.3, Ode0=0.70, Tcmb0=0.0) + assert np.allclose(cosmo.comoving_transverse_distance(redshifts).value, dm) + assert np.allclose(cosmo.angular_diameter_distance(redshifts).value, da) + assert np.allclose(cosmo.luminosity_distance(redshifts).value, dl) + + redshifts, dm, da, dl = np.loadtxt(StringIO(cosmo_open), unpack=1) + cosmo = core.LambdaCDM(H0=70, Om0=0.3, Ode0=0.1, Tcmb0=0.0) + assert np.allclose(cosmo.comoving_transverse_distance(redshifts).value, dm) + assert np.allclose(cosmo.angular_diameter_distance(redshifts).value, da) + assert np.allclose(cosmo.luminosity_distance(redshifts).value, dl) + + redshifts, dm, da, dl = np.loadtxt(StringIO(cosmo_closed), unpack=1) + cosmo = core.LambdaCDM(H0=70, Om0=2, Ode0=0.1, Tcmb0=0.0) + assert np.allclose(cosmo.comoving_transverse_distance(redshifts).value, dm) + assert np.allclose(cosmo.angular_diameter_distance(redshifts).value, da) + assert np.allclose(cosmo.luminosity_distance(redshifts).value, dl) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_integral(): + # Test integer vs. floating point inputs + cosmo = core.LambdaCDM(H0=73.2, Om0=0.3, Ode0=0.50) + assert np.allclose(cosmo.comoving_distance(3), + cosmo.comoving_distance(3.0), rtol=1e-7) + assert np.allclose(cosmo.comoving_distance([1, 2, 3, 5]), + cosmo.comoving_distance([1.0, 2.0, 3.0, 5.0]), + rtol=1e-7) + assert np.allclose(cosmo.efunc(6), cosmo.efunc(6.0), rtol=1e-7) + assert np.allclose(cosmo.efunc([1, 2, 6]), + cosmo.efunc([1.0, 2.0, 6.0]), rtol=1e-7) + assert np.allclose(cosmo.inv_efunc([1, 2, 6]), + cosmo.inv_efunc([1.0, 2.0, 6.0]), rtol=1e-7) + + +def test_current(): + with core.default_cosmology.set('WMAP7'): + cosmo = core.get_current() + assert cosmo == core.WMAP7 + with core.default_cosmology.set('WMAP5'): + core.set_current('WMAP5') + assert core.get_current() == core.WMAP5 + with core.default_cosmology.set('WMAP9'): + core.set_current('WMAP9') + assert core.get_current() == core.WMAP9 + with core.default_cosmology.set('Planck13'): + core.set_current('Planck13') + assert core.get_current() == core.Planck13 + + +def test_wz(): + cosmo = core.LambdaCDM(H0=70, Om0=0.3, Ode0=0.70) + assert np.allclose(cosmo.w([0.1, 0.2, 0.5, 1.5, 2.5, 11.5]), + [-1., -1, -1, -1, -1, -1]) + + cosmo = core.wCDM(H0=70, Om0=0.3, Ode0=0.70, w0=-0.5) + assert np.allclose(cosmo.w([0.1, 0.2, 0.5, 1.5, 2.5, 11.5]), + [-0.5, -0.5, -0.5, -0.5, -0.5, -0.5]) + assert np.allclose(cosmo.w0, -0.5) + + cosmo = core.w0wzCDM(H0=70, Om0=0.3, Ode0=0.70, w0=-1, wz=0.5) + assert np.allclose(cosmo.w([0.0, 0.5, 1.0, 1.5, 2.3]), + [-1.0, -0.75, -0.5, -0.25, 0.15]) + assert np.allclose(cosmo.w0, -1.0) + assert np.allclose(cosmo.wz, 0.5) + + cosmo = core.w0waCDM(H0=70, Om0=0.3, Ode0=0.70, w0=-1, wa=-0.5) + assert np.allclose(cosmo.w0, -1.0) + assert np.allclose(cosmo.wa, -0.5) + assert np.allclose(cosmo.w([0.0, 0.5, 1.0, 1.5, 2.3]), + [-1, -1.16666667, -1.25, -1.3, -1.34848485]) + + cosmo = core.wpwaCDM(H0=70, Om0=0.3, Ode0=0.70, wp=-0.9, + wa=0.2, zp=0.5) + assert np.allclose(cosmo.wp, -0.9) + assert np.allclose(cosmo.wa, 0.2) + assert np.allclose(cosmo.zp, 0.5) + assert np.allclose(cosmo.w([0.1, 0.2, 0.5, 1.5, 2.5, 11.5]), + [-0.94848485, -0.93333333, -0.9, -0.84666667, + -0.82380952, -0.78266667]) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_de_densityscale(): + cosmo = core.LambdaCDM(H0=70, Om0=0.3, Ode0=0.70) + z = np.array([0.1, 0.2, 0.5, 1.5, 2.5]) + assert np.allclose(cosmo.de_density_scale(z), + [1.0, 1.0, 1.0, 1.0, 1.0]) + # Integer check + assert np.allclose(cosmo.de_density_scale(3), + cosmo.de_density_scale(3.0), rtol=1e-7) + assert np.allclose(cosmo.de_density_scale([1, 2, 3]), + cosmo.de_density_scale([1., 2., 3.]), rtol=1e-7) + + cosmo = core.wCDM(H0=70, Om0=0.3, Ode0=0.60, w0=-0.5) + assert np.allclose(cosmo.de_density_scale(z), + [1.15369, 1.31453, 1.83712, 3.95285, 6.5479], + rtol=1e-4) + assert np.allclose(cosmo.de_density_scale(3), + cosmo.de_density_scale(3.0), rtol=1e-7) + assert np.allclose(cosmo.de_density_scale([1, 2, 3]), + cosmo.de_density_scale([1., 2., 3.]), rtol=1e-7) + + cosmo = core.w0wzCDM(H0=70, Om0=0.3, Ode0=0.50, w0=-1, wz=0.5) + assert np.allclose(cosmo.de_density_scale(z), + [0.746048, 0.5635595, 0.25712378, 0.026664129, + 0.0035916468], rtol=1e-4) + assert np.allclose(cosmo.de_density_scale(3), + cosmo.de_density_scale(3.0), rtol=1e-7) + assert np.allclose(cosmo.de_density_scale([1, 2, 3]), + cosmo.de_density_scale([1., 2., 3.]), rtol=1e-7) + + cosmo = core.w0waCDM(H0=70, Om0=0.3, Ode0=0.70, w0=-1, wa=-0.5) + assert np.allclose(cosmo.de_density_scale(z), + [0.9934201, 0.9767912, 0.897450, + 0.622236, 0.4458753], rtol=1e-4) + assert np.allclose(cosmo.de_density_scale(3), + cosmo.de_density_scale(3.0), rtol=1e-7) + assert np.allclose(cosmo.de_density_scale([1, 2, 3]), + cosmo.de_density_scale([1., 2., 3.]), rtol=1e-7) + + cosmo = core.wpwaCDM(H0=70, Om0=0.3, Ode0=0.70, wp=-0.9, + wa=0.2, zp=0.5) + assert np.allclose(cosmo.de_density_scale(z), + [1.012246048, 1.0280102, 1.087439, + 1.324988, 1.565746], rtol=1e-4) + assert np.allclose(cosmo.de_density_scale(3), + cosmo.de_density_scale(3.0), rtol=1e-7) + assert np.allclose(cosmo.de_density_scale([1, 2, 3]), + cosmo.de_density_scale([1., 2., 3.]), rtol=1e-7) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_age(): + # WMAP7 but with Omega_relativisitic = 0 + tcos = core.FlatLambdaCDM(70.4, 0.272, Tcmb0=0.0) + assert np.allclose(tcos.hubble_time.value, 13.889094057856937) + assert np.allclose(tcos.age([1., 5.]).value, [5.97113193, 1.20553129]) + assert np.allclose(tcos.age([1, 5]).value, [5.97113193, 1.20553129]) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_distmod(): + # WMAP7 but with Omega_relativisitic = 0 + tcos = core.FlatLambdaCDM(70.4, 0.272, Tcmb0=0.0) + core.set_current(tcos) + assert np.allclose(tcos.hubble_distance.value, 4258.415596590909) + assert np.allclose(tcos.distmod([1, 5]).value, [44.124857, 48.40167258]) + assert np.allclose(tcos.distmod([1., 5.]).value, [44.124857, 48.40167258]) + assert np.allclose(funcs.distmod([1, 5], cosmo=tcos).value, + [44.124857, 48.40167258]) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_neg_distmod(): + # Cosmology with negative luminosity distances (perfectly okay, + # if obscure) + tcos = core.LambdaCDM(70, 0.2, 1.3, Tcmb0=0) + assert np.allclose(tcos.luminosity_distance([50, 100]).value, + [16612.44047622, -46890.79092244]) + assert np.allclose(tcos.distmod([50, 100]).value, + [46.102167189, 48.355437790944]) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_critical_density(): + # WMAP7 but with Omega_relativisitic = 0 + # These tests will fail if astropy.const starts returning non-mks + # units by default; see the comment at the top of core.py + tcos = core.FlatLambdaCDM(70.4, 0.272, Tcmb0=0.0) + assert np.allclose(tcos.critical_density0.value, + 9.31000324385361e-30) + assert np.allclose(tcos.critical_density0.value, + tcos.critical_density(0).value) + assert np.allclose(tcos.critical_density([1, 5]).value, + [2.70362491e-29, 5.53758986e-28]) + assert np.allclose(tcos.critical_density([1., 5.]).value, + [2.70362491e-29, 5.53758986e-28]) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_angular_diameter_distance_z1z2(): + + with pytest.raises(core.CosmologyError): # test neg Ok fail + tcos = core.LambdaCDM(H0=70.4, Om0=0.272, Ode0=0.8, Tcmb0=0.0) + tcos.angular_diameter_distance_z1z2(1, 2) + + tcos = core.FlatLambdaCDM(70.4, 0.272, Tcmb0=0.0) + with pytest.raises(ValueError): # test diff size z1, z2 fail + tcos.angular_diameter_distance_z1z2([1, 2], [3, 4, 5]) + with pytest.raises(ValueError): # test z1 > z2 fail + tcos.angular_diameter_distance_z1z2(4, 3) + # Tests that should actually work + assert np.allclose(tcos.angular_diameter_distance_z1z2(1, 2).value, + 646.22968662822018) + z1 = 0, 0, 1, 0.5, 1 + z2 = 2, 1, 2, 2.5, 1.1 + results = (1760.0628637762106, + 1670.7497657219858, + 646.22968662822018, + 1159.0970895962193, + 115.72768186186921) + + assert np.allclose(tcos.angular_diameter_distance_z1z2(z1, z2).value, + results) + + # Non-flat (positive Ocurv) test + tcos = core.LambdaCDM(H0=70.4, Om0=0.2, Ode0=0.5, Tcmb0=0.0) + assert np.allclose(tcos.angular_diameter_distance_z1z2(1, 2).value, + 620.1175337852428) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_absorption_distance(): + tcos = core.FlatLambdaCDM(70.4, 0.272, Tcmb0=0.0) + assert np.allclose(tcos.absorption_distance([1, 3]), + [1.72576635, 7.98685853]) + assert np.allclose(tcos.absorption_distance([1., 3.]), + [1.72576635, 7.98685853]) + assert np.allclose(tcos.absorption_distance(3), 7.98685853) + assert np.allclose(tcos.absorption_distance(3.), 7.98685853) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_massivenu_basic(): + # Test no neutrinos case + tcos = core.FlatLambdaCDM(70.4, 0.272, Neff=4.05, m_nu=u.Quantity(0, u.eV)) + assert np.allclose(tcos.Neff, 4.05) + assert not tcos.has_massive_nu + mnu = tcos.m_nu + assert len(mnu) == 4 + assert mnu.unit == u.eV + assert np.allclose(mnu.value, [0.0, 0.0, 0.0, 0.0]) + assert np.allclose(tcos.nu_relative_density(1.), 0.22710731766 * 4.05, + rtol=1e-6) + assert np.allclose(tcos.nu_relative_density(1), 0.22710731766 * 4.05, + rtol=1e-6) + + # Test basic setting, retrieval of values + tcos = core.FlatLambdaCDM(70.4, 0.272, + m_nu=u.Quantity([0.0, 0.01, 0.02], u.eV)) + assert tcos.has_massive_nu + mnu = tcos.m_nu + assert len(mnu) == 3 + assert mnu.unit == u.eV + assert np.allclose(mnu.value, [0.0, 0.01, 0.02]) + + # All massive neutrinos case + tcos = core.FlatLambdaCDM(70.4, 0.272, m_nu=u.Quantity(0.1, u.eV), + Neff=3.1) + assert np.allclose(tcos.Neff, 3.1) + assert tcos.has_massive_nu + mnu = tcos.m_nu + assert len(mnu) == 3 + assert mnu.unit == u.eV + assert np.allclose(mnu.value, [0.1, 0.1, 0.1]) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_massivenu_density(): + # Testing neutrino density calculation + + # Simple test cosmology, where we compare rho_nu and rho_gamma + # against the exact formula (eq 24/25 of Komatsu et al. 2011) + # computed using Mathematica. The approximation we use for f(y) + # is only good to ~ 0.5% (with some redshift dependence), so that's + # what we test to. + ztest = np.array([0.0, 1.0, 2.0, 10.0, 1000.0]) + nuprefac = 7.0 / 8.0 * (4.0 / 11.0) ** (4.0 / 3.0) + # First try 3 massive neutrinos, all 100 eV -- note this is a universe + # seriously dominated by neutrinos! + tcos = core.FlatLambdaCDM(75.0, 0.25, Tcmb0=3.0, Neff=3, + m_nu=u.Quantity(100.0, u.eV)) + assert tcos.has_massive_nu + assert tcos.Neff == 3 + nurel_exp = nuprefac * tcos.Neff * np.array([171969, 85984.5, 57323, + 15633.5, 171.801]) + assert np.allclose(tcos.nu_relative_density(ztest), nurel_exp, rtol=5e-3) + assert np.allclose(tcos.efunc([0.0, 1.0]), [1.0, 7.46144727668], rtol=5e-3) + + # Next, slightly less massive + tcos = core.FlatLambdaCDM(75.0, 0.25, Tcmb0=3.0, Neff=3, + m_nu=u.Quantity(0.25, u.eV)) + nurel_exp = nuprefac * tcos.Neff * np.array([429.924, 214.964, 143.312, + 39.1005, 1.11086]) + assert np.allclose(tcos.nu_relative_density(ztest), nurel_exp, + rtol=5e-3) + + # For this one also test Onu directly + onu_exp = np.array([0.01890217, 0.05244681, 0.0638236, + 0.06999286, 0.1344951]) + assert np.allclose(tcos.Onu(ztest), onu_exp, rtol=5e-3) + + # And fairly light + tcos = core.FlatLambdaCDM(80.0, 0.30, Tcmb0=3.0, Neff=3, + m_nu=u.Quantity(0.01, u.eV)) + + nurel_exp = nuprefac * tcos.Neff * np.array([17.2347, 8.67345, 5.84348, + 1.90671, 1.00021]) + assert np.allclose(tcos.nu_relative_density(ztest), nurel_exp, + rtol=5e-3) + onu_exp = np.array([0.00066599, 0.00172677, 0.0020732, + 0.00268404, 0.0978313]) + assert np.allclose(tcos.Onu(ztest), onu_exp, rtol=5e-3) + assert np.allclose(tcos.efunc([1.0, 2.0]), [1.76225893, 2.97022048], + rtol=1e-4) + assert np.allclose(tcos.inv_efunc([1.0, 2.0]), [0.5674535, 0.33667534], + rtol=1e-4) + + # Now a mixture of neutrino masses, with non-integer Neff + tcos = core.FlatLambdaCDM(80.0, 0.30, Tcmb0=3.0, Neff=3.04, + m_nu=u.Quantity([0.0, 0.01, 0.25], u.eV)) + nurel_exp = nuprefac * tcos.Neff * np.array([149.386233, 74.87915, 50.0518, + 14.002403, 1.03702333]) + assert np.allclose(tcos.nu_relative_density(ztest), nurel_exp, + rtol=5e-3) + onu_exp = np.array([0.00584959, 0.01493142, 0.01772291, + 0.01963451, 0.10227728]) + assert np.allclose(tcos.Onu(ztest), onu_exp, rtol=5e-3) + + # Integer redshifts + ztest = ztest.astype(np.int) + assert np.allclose(tcos.nu_relative_density(ztest), nurel_exp, + rtol=5e-3) + assert np.allclose(tcos.Onu(ztest), onu_exp, rtol=5e-3) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_z_at_value(): + z_at_value = funcs.z_at_value + cosmo = core.Planck13 + assert np.allclose(z_at_value(cosmo.age, 2 * u.Gyr), 3.1981191749374) + assert np.allclose(z_at_value(cosmo.luminosity_distance, 1e4 * u.Mpc), + 1.3685792789133948) + assert np.allclose(z_at_value(cosmo.lookback_time, 7 * u.Gyr), + 0.7951983674601507) + assert np.allclose(z_at_value(cosmo.angular_diameter_distance, 1500*u.Mpc, + zmax=2), 0.681277696252886) + assert np.allclose(z_at_value(cosmo.angular_diameter_distance, 1500*u.Mpc, + zmin=2.5), 3.7914918534022011) + assert np.allclose(z_at_value(cosmo.distmod, 46 * u.mag), + 1.9913870174451891) + + # test behaviour when the solution is outside z limits (should + # raise a CosmologyError) + with pytest.raises(core.CosmologyError): + z_at_value(cosmo.angular_diameter_distance, 1500*u.Mpc, zmax=0.5) + with pytest.raises(core.CosmologyError): + z_at_value(cosmo.angular_diameter_distance, 1500*u.Mpc, zmin=4.) + + +@pytest.mark.skipif('not HAS_SCIPY') +def test_z_at_value_roundtrip(): + """ + Calculate values from a known redshift, and then check that + z_at_value returns the right answer. + """ + z = 0.5 + + skip = ('z_at_value', 'angular_diameter_distance_z1z2', 'CosmologyError', + 'deprecated') + + core.set_current('Planck13') + for name in funcs.__all__: + if name.startswith('_') or name in skip: + continue + f = getattr(funcs, name) + if not hasattr(f, '__call__'): + continue + print('Round-trip testing {0}'.format(name)) + fval = f(z) + # we need zmax here to pick the right solution for + # angular_diameter_distance and related methods. + assert np.allclose(z, funcs.z_at_value(f, fval, zmax=1.5)) + + +def test_default_reset(): + # Check that the default is being reset after tests. This test should be + # updated if the default cosmology is updated. + assert core.get_current() == core.WMAP9 diff --git a/astropy/cython_version.py b/astropy/cython_version.py new file mode 100644 index 0000000..9c5ac9f --- /dev/null +++ b/astropy/cython_version.py @@ -0,0 +1,2 @@ +# Generated file; do not modify +cython_version = '0.18' diff --git a/astropy/extern/__init__.py b/astropy/extern/__init__.py new file mode 100644 index 0000000..4c54f84 --- /dev/null +++ b/astropy/extern/__init__.py @@ -0,0 +1,10 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" +This packages contains python packages that are bundled with Astropy but are +external to Astropy, and hence are developed in a separate source tree. Note +that this package is distinct from the /cextern directory of the source code +distribution, as that directory only contains C extension code. + +See the README.rst in this directory of the Astropy source repository for more +details. +""" diff --git a/astropy/extern/bundled/__init__.py b/astropy/extern/bundled/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/astropy/extern/bundled/six.py b/astropy/extern/bundled/six.py new file mode 100644 index 0000000..7ec7f1b --- /dev/null +++ b/astropy/extern/bundled/six.py @@ -0,0 +1,632 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2014 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.5.2" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + # This is a bit ugly, but it avoids running this again. + delattr(obj.__class__, self.name) + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + # Hack around the Django autoreloader. The reloader tries to get + # __file__ or __name__ of every module in sys.modules. This doesn't work + # well if this MovedModule is for an module that is unavailable on this + # machine (like winreg on Unix systems). Thus, we pretend __file__ and + # __name__ don't exist if the module hasn't been loaded yet. See issues + # #51 and #53. + if attr in ("__file__", "__name__") and self.mod not in sys.modules: + raise AttributeError + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + + +class _MovedItems(_LazyModule): + """Lazy loading of moved objects""" + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("winreg", "_winreg"), +] +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + sys.modules[__name__ + ".moves." + attr.name] = attr +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +sys.modules[__name__ + ".moves.urllib_parse"] = sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse") + + +class Module_six_moves_urllib_error(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +sys.modules[__name__ + ".moves.urllib_error"] = sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +sys.modules[__name__ + ".moves.urllib_request"] = sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +sys.modules[__name__ + ".moves.urllib_response"] = sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +sys.modules[__name__ + ".moves.urllib_robotparser"] = sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + parse = sys.modules[__name__ + ".moves.urllib_parse"] + error = sys.modules[__name__ + ".moves.urllib_error"] + request = sys.modules[__name__ + ".moves.urllib_request"] + response = sys.modules[__name__ + ".moves.urllib_response"] + robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"] + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + + +sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" + + _iterkeys = "keys" + _itervalues = "values" + _iteritems = "items" + _iterlists = "lists" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + _iterkeys = "iterkeys" + _itervalues = "itervalues" + _iteritems = "iteritems" + _iterlists = "iterlists" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +def iterkeys(d, **kw): + """Return an iterator over the keys of a dictionary.""" + return iter(getattr(d, _iterkeys)(**kw)) + +def itervalues(d, **kw): + """Return an iterator over the values of a dictionary.""" + return iter(getattr(d, _itervalues)(**kw)) + +def iteritems(d, **kw): + """Return an iterator over the (key, value) pairs of a dictionary.""" + return iter(getattr(d, _iteritems)(**kw)) + +def iterlists(d, **kw): + """Return an iterator over the (key, [values]) pairs of a dictionary.""" + return iter(getattr(d, _iterlists)(**kw)) + + +if PY3: + def b(s): + return s.encode("latin-1") + def u(s): + return s + unichr = chr + if sys.version_info[1] <= 1: + def int2byte(i): + return bytes((i,)) + else: + # This is about 2x faster than the implementation above on 3.2+ + int2byte = operator.methodcaller("to_bytes", 1, "big") + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO +else: + def b(s): + return s + # Workaround for standalone backslash + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + def byte2int(bs): + return ord(bs[0]) + def indexbytes(buf, i): + return ord(buf[i]) + def iterbytes(buf): + return (ord(byte) for byte in buf) + import StringIO + StringIO = BytesIO = StringIO.StringIO +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) + +_add_doc(reraise, """Reraise an exception.""") + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + return meta("NewBase", bases, {}) + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper diff --git a/astropy/extern/configobj.py b/astropy/extern/configobj.py new file mode 100644 index 0000000..5a5e39d --- /dev/null +++ b/astropy/extern/configobj.py @@ -0,0 +1,17 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This module just pulls in the appropriate `configobj` package, depending on the +currently installed version of python. + +Also, this should actually never actually show up as a docstring, because +it should get overwritten by the appropriate configobj docstring. +""" +from sys import version_info + +if version_info[0] > 2: + from .configobj_py3 import configobj, validate, __doc__ +else: + from .configobj_py2 import configobj, validate, __doc__ + +del version_info #cleans up the namespace diff --git a/astropy/extern/configobj_py2/__init__.py b/astropy/extern/configobj_py2/__init__.py new file mode 100644 index 0000000..f5a38a0 --- /dev/null +++ b/astropy/extern/configobj_py2/__init__.py @@ -0,0 +1,100 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This is a copy of the main portions of the `configobj +`_ package. This is used +internally in the Astropy configuration system. The license for configobj is +available in the ``licenses/CONFIGOBJ_LICENSE.rst`` file in the Astropy +source distribution. + +This is the original 4.7.2 version of configobj, which is only compatible with +python 2.x - the 3.x version is in ``astropy/extern/configobj-py3``. +""" + +#this holds the contents of the setup.py file used by configobj +_configobj_setup_dot_py=""" +# setup.py +# Install script for ConfigObj +# Copyright (C) 2005-2010 Michael Foord, Mark Andrews, Nicola Larosa +# E-mail: fuzzyman AT voidspace DOT org DOT uk +# mark AT la-la DOT com +# nico AT tekNico DOT net + +# This software is licensed under the terms of the BSD license. +# http://www.voidspace.org.uk/python/license.shtml + +import sys +from distutils.core import setup +from configobj import __version__ as VERSION + +NAME = 'configobj' + +MODULES = 'configobj', 'validate' + +DESCRIPTION = 'Config file reading, writing and validation.' + +URL = 'http://www.voidspace.org.uk/python/configobj.html' + +DOWNLOAD_URL = "http://www.voidspace.org.uk/downloads/configobj-%s.zip" % VERSION + +LONG_DESCRIPTION = ""#"**ConfigObj** is a simple but powerful config file reader and writer: an *ini +file round tripper*. Its main feature is that it is very easy to use, with a +straightforward programmer's interface and a simple syntax for config files. +It has lots of other features though : + +* Nested sections (subsections), to any level +* List values +* Multiple line values +* Full Unicode support +* String interpolation (substitution) +* Integrated with a powerful validation system + + - including automatic type checking/conversion + - and allowing default values + - repeated sections + +* All comments in the file are preserved +* The order of keys/sections is preserved +* Powerful ``unrepr`` mode for storing/retrieving Python data-types + +| Release 4.7.2 fixes several bugs in 4.7.1 +| Release 4.7.1 fixes a bug with the deprecated options keyword in +| 4.7.0. +| Release 4.7.0 improves performance adds features for validation and +| fixes some bugs.""#" + +CLASSIFIERS = [ + 'Development Status :: 6 - Mature', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: BSD License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2.3', + 'Programming Language :: Python :: 2.4', + 'Programming Language :: Python :: 2.5', + 'Programming Language :: Python :: 2.6', + 'Operating System :: OS Independent', + 'Topic :: Software Development :: Libraries', + 'Topic :: Software Development :: Libraries :: Python Modules', +] + +AUTHOR = 'Michael Foord & Nicola Larosa' + +AUTHOR_EMAIL = 'fuzzyman@voidspace.org.uk' + +KEYWORDS = "config, ini, dictionary, application, admin, sysadmin, configuration, validation".split(', ') + + +setup(name=NAME, + version=VERSION, + description=DESCRIPTION, + long_description=LONG_DESCRIPTION, + download_url=DOWNLOAD_URL, + author=AUTHOR, + author_email=AUTHOR_EMAIL, + url=URL, + py_modules=MODULES, + classifiers=CLASSIFIERS, + keywords=KEYWORDS + ) +""".replace('""#"','"""') +#the replacement is necessary because """ would otherwise terminate the string diff --git a/astropy/extern/configobj_py2/configobj.py b/astropy/extern/configobj_py2/configobj.py new file mode 100644 index 0000000..c1f6e6d --- /dev/null +++ b/astropy/extern/configobj_py2/configobj.py @@ -0,0 +1,2468 @@ +# configobj.py +# A config file reader/writer that supports nested sections in config files. +# Copyright (C) 2005-2010 Michael Foord, Nicola Larosa +# E-mail: fuzzyman AT voidspace DOT org DOT uk +# nico AT tekNico DOT net + +# ConfigObj 4 +# http://www.voidspace.org.uk/python/configobj.html + +# Released subject to the BSD License +# Please see http://www.voidspace.org.uk/python/license.shtml + +# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml +# For information about bugfixes, updates and support, please join the +# ConfigObj mailing list: +# http://lists.sourceforge.net/lists/listinfo/configobj-develop +# Comments, suggestions and bug reports welcome. + +from __future__ import generators + +import os +import re +import sys + +from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE + + +# imported lazily to avoid startup performance hit if it isn't used +compiler = None + +# A dictionary mapping BOM to +# the encoding to decode with, and what to set the +# encoding attribute to. +BOMS = { + BOM_UTF8: ('utf_8', None), + BOM_UTF16_BE: ('utf16_be', 'utf_16'), + BOM_UTF16_LE: ('utf16_le', 'utf_16'), + BOM_UTF16: ('utf_16', 'utf_16'), + } +# All legal variants of the BOM codecs. +# TODO: the list of aliases is not meant to be exhaustive, is there a +# better way ? +BOM_LIST = { + 'utf_16': 'utf_16', + 'u16': 'utf_16', + 'utf16': 'utf_16', + 'utf-16': 'utf_16', + 'utf16_be': 'utf16_be', + 'utf_16_be': 'utf16_be', + 'utf-16be': 'utf16_be', + 'utf16_le': 'utf16_le', + 'utf_16_le': 'utf16_le', + 'utf-16le': 'utf16_le', + 'utf_8': 'utf_8', + 'u8': 'utf_8', + 'utf': 'utf_8', + 'utf8': 'utf_8', + 'utf-8': 'utf_8', + } + +# Map of encodings to the BOM to write. +BOM_SET = { + 'utf_8': BOM_UTF8, + 'utf_16': BOM_UTF16, + 'utf16_be': BOM_UTF16_BE, + 'utf16_le': BOM_UTF16_LE, + None: BOM_UTF8 + } + + +def match_utf8(encoding): + return BOM_LIST.get(encoding.lower()) == 'utf_8' + + +# Quote strings used for writing values +squot = "'%s'" +dquot = '"%s"' +noquot = "%s" +wspace_plus = ' \r\n\v\t\'"' +tsquot = '"""%s"""' +tdquot = "'''%s'''" + +# Sentinel for use in getattr calls to replace hasattr +MISSING = object() + +__version__ = '4.7.2' + +try: + any +except NameError: + def any(iterable): + for entry in iterable: + if entry: + return True + return False + + +__all__ = ( + '__version__', + 'DEFAULT_INDENT_TYPE', + 'DEFAULT_INTERPOLATION', + 'ConfigObjError', + 'NestingError', + 'ParseError', + 'DuplicateError', + 'ConfigspecError', + 'ConfigObj', + 'SimpleVal', + 'InterpolationError', + 'InterpolationLoopError', + 'MissingInterpolationOption', + 'RepeatSectionError', + 'ReloadError', + 'UnreprError', + 'UnknownType', + 'flatten_errors', + 'get_extra_values' +) + +DEFAULT_INTERPOLATION = 'configparser' +DEFAULT_INDENT_TYPE = ' ' +MAX_INTERPOL_DEPTH = 10 + +OPTION_DEFAULTS = { + 'interpolation': True, + 'raise_errors': False, + 'list_values': True, + 'create_empty': False, + 'file_error': False, + 'configspec': None, + 'stringify': True, + # option may be set to one of ('', ' ', '\t') + 'indent_type': None, + 'encoding': None, + 'default_encoding': None, + 'unrepr': False, + 'write_empty_values': False, +} + + + +def getObj(s): + global compiler + if compiler is None: + import compiler + s = "a=" + s + p = compiler.parse(s) + return p.getChildren()[1].getChildren()[0].getChildren()[1] + + +class UnknownType(Exception): + pass + + +class Builder(object): + + def build(self, o): + m = getattr(self, 'build_' + o.__class__.__name__, None) + if m is None: + raise UnknownType(o.__class__.__name__) + return m(o) + + def build_List(self, o): + return map(self.build, o.getChildren()) + + def build_Const(self, o): + return o.value + + def build_Dict(self, o): + d = {} + i = iter(map(self.build, o.getChildren())) + for el in i: + d[el] = i.next() + return d + + def build_Tuple(self, o): + return tuple(self.build_List(o)) + + def build_Name(self, o): + if o.name == 'None': + return None + if o.name == 'True': + return True + if o.name == 'False': + return False + + # An undefined Name + raise UnknownType('Undefined Name') + + def build_Add(self, o): + real, imag = map(self.build_Const, o.getChildren()) + try: + real = float(real) + except TypeError: + raise UnknownType('Add') + if not isinstance(imag, complex) or imag.real != 0.0: + raise UnknownType('Add') + return real+imag + + def build_Getattr(self, o): + parent = self.build(o.expr) + return getattr(parent, o.attrname) + + def build_UnarySub(self, o): + return -self.build_Const(o.getChildren()[0]) + + def build_UnaryAdd(self, o): + return self.build_Const(o.getChildren()[0]) + + +_builder = Builder() + + +def unrepr(s): + if not s: + return s + return _builder.build(getObj(s)) + + + +class ConfigObjError(SyntaxError): + """ + This is the base class for all errors that ConfigObj raises. + It is a subclass of SyntaxError. + """ + def __init__(self, message='', line_number=None, line=''): + self.line = line + self.line_number = line_number + SyntaxError.__init__(self, message) + + +class NestingError(ConfigObjError): + """ + This error indicates a level of nesting that doesn't match. + """ + + +class ParseError(ConfigObjError): + """ + This error indicates that a line is badly written. + It is neither a valid ``key = value`` line, + nor a valid section marker line. + """ + + +class ReloadError(IOError): + """ + A 'reload' operation failed. + This exception is a subclass of ``IOError``. + """ + def __init__(self): + IOError.__init__(self, 'reload failed, filename is not set.') + + +class DuplicateError(ConfigObjError): + """ + The keyword or section specified already exists. + """ + + +class ConfigspecError(ConfigObjError): + """ + An error occured whilst parsing a configspec. + """ + + +class InterpolationError(ConfigObjError): + """Base class for the two interpolation errors.""" + + +class InterpolationLoopError(InterpolationError): + """Maximum interpolation depth exceeded in string interpolation.""" + + def __init__(self, option): + InterpolationError.__init__( + self, + 'interpolation loop detected in value "%s".' % option) + + +class RepeatSectionError(ConfigObjError): + """ + This error indicates additional sections in a section with a + ``__many__`` (repeated) section. + """ + + +class MissingInterpolationOption(InterpolationError): + """A value specified for interpolation was missing.""" + def __init__(self, option): + msg = 'missing option "%s" in interpolation.' % option + InterpolationError.__init__(self, msg) + + +class UnreprError(ConfigObjError): + """An error parsing in unrepr mode.""" + + + +class InterpolationEngine(object): + """ + A helper class to help perform string interpolation. + + This class is an abstract base class; its descendants perform + the actual work. + """ + + # compiled regexp to use in self.interpolate() + _KEYCRE = re.compile(r"%\(([^)]*)\)s") + _cookie = '%' + + def __init__(self, section): + # the Section instance that "owns" this engine + self.section = section + + + def interpolate(self, key, value): + # short-cut + if not self._cookie in value: + return value + + def recursive_interpolate(key, value, section, backtrail): + """The function that does the actual work. + + ``value``: the string we're trying to interpolate. + ``section``: the section in which that string was found + ``backtrail``: a dict to keep track of where we've been, + to detect and prevent infinite recursion loops + + This is similar to a depth-first-search algorithm. + """ + # Have we been here already? + if (key, section.name) in backtrail: + # Yes - infinite loop detected + raise InterpolationLoopError(key) + # Place a marker on our backtrail so we won't come back here again + backtrail[(key, section.name)] = 1 + + # Now start the actual work + match = self._KEYCRE.search(value) + while match: + # The actual parsing of the match is implementation-dependent, + # so delegate to our helper function + k, v, s = self._parse_match(match) + if k is None: + # That's the signal that no further interpolation is needed + replacement = v + else: + # Further interpolation may be needed to obtain final value + replacement = recursive_interpolate(k, v, s, backtrail) + # Replace the matched string with its final value + start, end = match.span() + value = ''.join((value[:start], replacement, value[end:])) + new_search_start = start + len(replacement) + # Pick up the next interpolation key, if any, for next time + # through the while loop + match = self._KEYCRE.search(value, new_search_start) + + # Now safe to come back here again; remove marker from backtrail + del backtrail[(key, section.name)] + + return value + + # Back in interpolate(), all we have to do is kick off the recursive + # function with appropriate starting values + value = recursive_interpolate(key, value, self.section, {}) + return value + + + def _fetch(self, key): + """Helper function to fetch values from owning section. + + Returns a 2-tuple: the value, and the section where it was found. + """ + # switch off interpolation before we try and fetch anything ! + save_interp = self.section.main.interpolation + self.section.main.interpolation = False + + # Start at section that "owns" this InterpolationEngine + current_section = self.section + while True: + # try the current section first + val = current_section.get(key) + if val is not None and not isinstance(val, Section): + break + # try "DEFAULT" next + val = current_section.get('DEFAULT', {}).get(key) + if val is not None and not isinstance(val, Section): + break + # move up to parent and try again + # top-level's parent is itself + if current_section.parent is current_section: + # reached top level, time to give up + break + current_section = current_section.parent + + # restore interpolation to previous value before returning + self.section.main.interpolation = save_interp + if val is None: + raise MissingInterpolationOption(key) + return val, current_section + + + def _parse_match(self, match): + """Implementation-dependent helper function. + + Will be passed a match object corresponding to the interpolation + key we just found (e.g., "%(foo)s" or "$foo"). Should look up that + key in the appropriate config file section (using the ``_fetch()`` + helper function) and return a 3-tuple: (key, value, section) + + ``key`` is the name of the key we're looking for + ``value`` is the value found for that key + ``section`` is a reference to the section where it was found + + ``key`` and ``section`` should be None if no further + interpolation should be performed on the resulting value + (e.g., if we interpolated "$$" and returned "$"). + """ + raise NotImplementedError() + + + +class ConfigParserInterpolation(InterpolationEngine): + """Behaves like ConfigParser.""" + _cookie = '%' + _KEYCRE = re.compile(r"%\(([^)]*)\)s") + + def _parse_match(self, match): + key = match.group(1) + value, section = self._fetch(key) + return key, value, section + + + +class TemplateInterpolation(InterpolationEngine): + """Behaves like string.Template.""" + _cookie = '$' + _delimiter = '$' + _KEYCRE = re.compile(r""" + \$(?: + (?P\$) | # Two $ signs + (?P[_a-z][_a-z0-9]*) | # $name format + {(?P[^}]*)} # ${name} format + ) + """, re.IGNORECASE | re.VERBOSE) + + def _parse_match(self, match): + # Valid name (in or out of braces): fetch value from section + key = match.group('named') or match.group('braced') + if key is not None: + value, section = self._fetch(key) + return key, value, section + # Escaped delimiter (e.g., $$): return single delimiter + if match.group('escaped') is not None: + # Return None for key and section to indicate it's time to stop + return None, self._delimiter, None + # Anything else: ignore completely, just return it unchanged + return None, match.group(), None + + +interpolation_engines = { + 'configparser': ConfigParserInterpolation, + 'template': TemplateInterpolation, +} + + +def __newobj__(cls, *args): + # Hack for pickle + return cls.__new__(cls, *args) + +class Section(dict): + """ + A dictionary-like object that represents a section in a config file. + + It does string interpolation if the 'interpolation' attribute + of the 'main' object is set to True. + + Interpolation is tried first from this object, then from the 'DEFAULT' + section of this object, next from the parent and its 'DEFAULT' section, + and so on until the main object is reached. + + A Section will behave like an ordered dictionary - following the + order of the ``scalars`` and ``sections`` attributes. + You can use this to change the order of members. + + Iteration follows the order: scalars, then sections. + """ + + + def __setstate__(self, state): + dict.update(self, state[0]) + self.__dict__.update(state[1]) + + def __reduce__(self): + state = (dict(self), self.__dict__) + return (__newobj__, (self.__class__,), state) + + + def __init__(self, parent, depth, main, indict=None, name=None): + """ + * parent is the section above + * depth is the depth level of this section + * main is the main ConfigObj + * indict is a dictionary to initialise the section with + """ + if indict is None: + indict = {} + dict.__init__(self) + # used for nesting level *and* interpolation + self.parent = parent + # used for the interpolation attribute + self.main = main + # level of nesting depth of this Section + self.depth = depth + # purely for information + self.name = name + # + self._initialise() + # we do this explicitly so that __setitem__ is used properly + # (rather than just passing to ``dict.__init__``) + for entry, value in indict.iteritems(): + self[entry] = value + + + def _initialise(self): + # the sequence of scalar values in this Section + self.scalars = [] + # the sequence of sections in this Section + self.sections = [] + # for comments :-) + self.comments = {} + self.inline_comments = {} + # the configspec + self.configspec = None + # for defaults + self.defaults = [] + self.default_values = {} + self.extra_values = [] + self._created = False + + + def _interpolate(self, key, value): + try: + # do we already have an interpolation engine? + engine = self._interpolation_engine + except AttributeError: + # not yet: first time running _interpolate(), so pick the engine + name = self.main.interpolation + if name == True: # note that "if name:" would be incorrect here + # backwards-compatibility: interpolation=True means use default + name = DEFAULT_INTERPOLATION + name = name.lower() # so that "Template", "template", etc. all work + class_ = interpolation_engines.get(name, None) + if class_ is None: + # invalid value for self.main.interpolation + self.main.interpolation = False + return value + else: + # save reference to engine so we don't have to do this again + engine = self._interpolation_engine = class_(self) + # let the engine do the actual work + return engine.interpolate(key, value) + + + def __getitem__(self, key): + """Fetch the item and do string interpolation.""" + val = dict.__getitem__(self, key) + if self.main.interpolation: + if isinstance(val, basestring): + return self._interpolate(key, val) + if isinstance(val, list): + def _check(entry): + if isinstance(entry, basestring): + return self._interpolate(key, entry) + return entry + new = [_check(entry) for entry in val] + if new != val: + return new + return val + + + def __setitem__(self, key, value, unrepr=False): + """ + Correctly set a value. + + Making dictionary values Section instances. + (We have to special case 'Section' instances - which are also dicts) + + Keys must be strings. + Values need only be strings (or lists of strings) if + ``main.stringify`` is set. + + ``unrepr`` must be set when setting a value to a dictionary, without + creating a new sub-section. + """ + if not isinstance(key, basestring): + raise ValueError('The key "%s" is not a string.' % key) + + # add the comment + if key not in self.comments: + self.comments[key] = [] + self.inline_comments[key] = '' + # remove the entry from defaults + if key in self.defaults: + self.defaults.remove(key) + # + if isinstance(value, Section): + if key not in self: + self.sections.append(key) + dict.__setitem__(self, key, value) + elif isinstance(value, dict) and not unrepr: + # First create the new depth level, + # then create the section + if key not in self: + self.sections.append(key) + new_depth = self.depth + 1 + dict.__setitem__( + self, + key, + Section( + self, + new_depth, + self.main, + indict=value, + name=key)) + else: + if key not in self: + self.scalars.append(key) + if not self.main.stringify: + if isinstance(value, basestring): + pass + elif isinstance(value, (list, tuple)): + for entry in value: + if not isinstance(entry, basestring): + raise TypeError('Value is not a string "%s".' % entry) + else: + raise TypeError('Value is not a string "%s".' % value) + dict.__setitem__(self, key, value) + + + def __delitem__(self, key): + """Remove items from the sequence when deleting.""" + dict. __delitem__(self, key) + if key in self.scalars: + self.scalars.remove(key) + else: + self.sections.remove(key) + del self.comments[key] + del self.inline_comments[key] + + + def get(self, key, default=None): + """A version of ``get`` that doesn't bypass string interpolation.""" + try: + return self[key] + except KeyError: + return default + + + def update(self, indict): + """ + A version of update that uses our ``__setitem__``. + """ + for entry in indict: + self[entry] = indict[entry] + + + def pop(self, key, default=MISSING): + """ + 'D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised' + """ + try: + val = self[key] + except KeyError: + if default is MISSING: + raise + val = default + else: + del self[key] + return val + + + def popitem(self): + """Pops the first (key,val)""" + sequence = (self.scalars + self.sections) + if not sequence: + raise KeyError(": 'popitem(): dictionary is empty'") + key = sequence[0] + val = self[key] + del self[key] + return key, val + + + def clear(self): + """ + A version of clear that also affects scalars/sections + Also clears comments and configspec. + + Leaves other attributes alone : + depth/main/parent are not affected + """ + dict.clear(self) + self.scalars = [] + self.sections = [] + self.comments = {} + self.inline_comments = {} + self.configspec = None + self.defaults = [] + self.extra_values = [] + + + def setdefault(self, key, default=None): + """A version of setdefault that sets sequence if appropriate.""" + try: + return self[key] + except KeyError: + self[key] = default + return self[key] + + + def items(self): + """D.items() -> list of D's (key, value) pairs, as 2-tuples""" + return zip((self.scalars + self.sections), self.values()) + + + def keys(self): + """D.keys() -> list of D's keys""" + return (self.scalars + self.sections) + + + def values(self): + """D.values() -> list of D's values""" + return [self[key] for key in (self.scalars + self.sections)] + + + def iteritems(self): + """D.iteritems() -> an iterator over the (key, value) items of D""" + return iter(self.items()) + + + def iterkeys(self): + """D.iterkeys() -> an iterator over the keys of D""" + return iter((self.scalars + self.sections)) + + __iter__ = iterkeys + + + def itervalues(self): + """D.itervalues() -> an iterator over the values of D""" + return iter(self.values()) + + + def __repr__(self): + """x.__repr__() <==> repr(x)""" + def _getval(key): + try: + return self[key] + except MissingInterpolationOption: + return dict.__getitem__(self, key) + return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(_getval(key)))) + for key in (self.scalars + self.sections)]) + + __str__ = __repr__ + __str__.__doc__ = "x.__str__() <==> str(x)" + + + # Extra methods - not in a normal dictionary + + def dict(self): + """ + Return a deepcopy of self as a dictionary. + + All members that are ``Section`` instances are recursively turned to + ordinary dictionaries - by calling their ``dict`` method. + + >>> n = a.dict() + >>> n == a + 1 + >>> n is a + 0 + """ + newdict = {} + for entry in self: + this_entry = self[entry] + if isinstance(this_entry, Section): + this_entry = this_entry.dict() + elif isinstance(this_entry, list): + # create a copy rather than a reference + this_entry = list(this_entry) + elif isinstance(this_entry, tuple): + # create a copy rather than a reference + this_entry = tuple(this_entry) + newdict[entry] = this_entry + return newdict + + + def merge(self, indict): + """ + A recursive update - useful for merging config files. + + >>> a = '''[section1] + ... option1 = True + ... [[subsection]] + ... more_options = False + ... # end of file'''.splitlines() + >>> b = '''# File is user.ini + ... [section1] + ... option1 = False + ... # end of file'''.splitlines() + >>> c1 = ConfigObj(b) + >>> c2 = ConfigObj(a) + >>> c2.merge(c1) + >>> c2 + ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}}) + """ + for key, val in indict.items(): + if (key in self and isinstance(self[key], dict) and + isinstance(val, dict)): + self[key].merge(val) + else: + self[key] = val + + + def rename(self, oldkey, newkey): + """ + Change a keyname to another, without changing position in sequence. + + Implemented so that transformations can be made on keys, + as well as on values. (used by encode and decode) + + Also renames comments. + """ + if oldkey in self.scalars: + the_list = self.scalars + elif oldkey in self.sections: + the_list = self.sections + else: + raise KeyError('Key "%s" not found.' % oldkey) + pos = the_list.index(oldkey) + # + val = self[oldkey] + dict.__delitem__(self, oldkey) + dict.__setitem__(self, newkey, val) + the_list.remove(oldkey) + the_list.insert(pos, newkey) + comm = self.comments[oldkey] + inline_comment = self.inline_comments[oldkey] + del self.comments[oldkey] + del self.inline_comments[oldkey] + self.comments[newkey] = comm + self.inline_comments[newkey] = inline_comment + + + def walk(self, function, raise_errors=True, + call_on_sections=False, **keywargs): + """ + Walk every member and call a function on the keyword and value. + + Return a dictionary of the return values + + If the function raises an exception, raise the errror + unless ``raise_errors=False``, in which case set the return value to + ``False``. + + Any unrecognised keyword arguments you pass to walk, will be pased on + to the function you pass in. + + Note: if ``call_on_sections`` is ``True`` then - on encountering a + subsection, *first* the function is called for the *whole* subsection, + and then recurses into it's members. This means your function must be + able to handle strings, dictionaries and lists. This allows you + to change the key of subsections as well as for ordinary members. The + return value when called on the whole subsection has to be discarded. + + See the encode and decode methods for examples, including functions. + + .. admonition:: caution + + You can use ``walk`` to transform the names of members of a section + but you mustn't add or delete members. + + >>> config = '''[XXXXsection] + ... XXXXkey = XXXXvalue'''.splitlines() + >>> cfg = ConfigObj(config) + >>> cfg + ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}}) + >>> def transform(section, key): + ... val = section[key] + ... newkey = key.replace('XXXX', 'CLIENT1') + ... section.rename(key, newkey) + ... if isinstance(val, (tuple, list, dict)): + ... pass + ... else: + ... val = val.replace('XXXX', 'CLIENT1') + ... section[newkey] = val + >>> cfg.walk(transform, call_on_sections=True) + {'CLIENT1section': {'CLIENT1key': None}} + >>> cfg + ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}}) + """ + out = {} + # scalars first + for i in range(len(self.scalars)): + entry = self.scalars[i] + try: + val = function(self, entry, **keywargs) + # bound again in case name has changed + entry = self.scalars[i] + out[entry] = val + except Exception: + if raise_errors: + raise + else: + entry = self.scalars[i] + out[entry] = False + # then sections + for i in range(len(self.sections)): + entry = self.sections[i] + if call_on_sections: + try: + function(self, entry, **keywargs) + except Exception: + if raise_errors: + raise + else: + entry = self.sections[i] + out[entry] = False + # bound again in case name has changed + entry = self.sections[i] + # previous result is discarded + out[entry] = self[entry].walk( + function, + raise_errors=raise_errors, + call_on_sections=call_on_sections, + **keywargs) + return out + + + def as_bool(self, key): + """ + Accepts a key as input. The corresponding value must be a string or + the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to + retain compatibility with Python 2.2. + + If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns + ``True``. + + If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns + ``False``. + + ``as_bool`` is not case sensitive. + + Any other input will raise a ``ValueError``. + + >>> a = ConfigObj() + >>> a['a'] = 'fish' + >>> a.as_bool('a') + Traceback (most recent call last): + ValueError: Value "fish" is neither True nor False + >>> a['b'] = 'True' + >>> a.as_bool('b') + 1 + >>> a['b'] = 'off' + >>> a.as_bool('b') + 0 + """ + val = self[key] + if val == True: + return True + elif val == False: + return False + else: + try: + if not isinstance(val, basestring): + # TODO: Why do we raise a KeyError here? + raise KeyError() + else: + return self.main._bools[val.lower()] + except KeyError: + raise ValueError('Value "%s" is neither True nor False' % val) + + + def as_int(self, key): + """ + A convenience method which coerces the specified value to an integer. + + If the value is an invalid literal for ``int``, a ``ValueError`` will + be raised. + + >>> a = ConfigObj() + >>> a['a'] = 'fish' + >>> a.as_int('a') + Traceback (most recent call last): + ValueError: invalid literal for int() with base 10: 'fish' + >>> a['b'] = '1' + >>> a.as_int('b') + 1 + >>> a['b'] = '3.2' + >>> a.as_int('b') + Traceback (most recent call last): + ValueError: invalid literal for int() with base 10: '3.2' + """ + return int(self[key]) + + + def as_float(self, key): + """ + A convenience method which coerces the specified value to a float. + + If the value is an invalid literal for ``float``, a ``ValueError`` will + be raised. + + >>> a = ConfigObj() + >>> a['a'] = 'fish' + >>> a.as_float('a') + Traceback (most recent call last): + ValueError: invalid literal for float(): fish + >>> a['b'] = '1' + >>> a.as_float('b') + 1.0 + >>> a['b'] = '3.2' + >>> a.as_float('b') + 3.2000000000000002 + """ + return float(self[key]) + + + def as_list(self, key): + """ + A convenience method which fetches the specified value, guaranteeing + that it is a list. + + >>> a = ConfigObj() + >>> a['a'] = 1 + >>> a.as_list('a') + [1] + >>> a['a'] = (1,) + >>> a.as_list('a') + [1] + >>> a['a'] = [1] + >>> a.as_list('a') + [1] + """ + result = self[key] + if isinstance(result, (tuple, list)): + return list(result) + return [result] + + + def restore_default(self, key): + """ + Restore (and return) default value for the specified key. + + This method will only work for a ConfigObj that was created + with a configspec and has been validated. + + If there is no default value for this key, ``KeyError`` is raised. + """ + default = self.default_values[key] + dict.__setitem__(self, key, default) + if key not in self.defaults: + self.defaults.append(key) + return default + + + def restore_defaults(self): + """ + Recursively restore default values to all members + that have them. + + This method will only work for a ConfigObj that was created + with a configspec and has been validated. + + It doesn't delete or modify entries without default values. + """ + for key in self.default_values: + self.restore_default(key) + + for section in self.sections: + self[section].restore_defaults() + + +class ConfigObj(Section): + """An object to read, create, and write config files.""" + + _keyword = re.compile(r'''^ # line start + (\s*) # indentation + ( # keyword + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'"=].*?) # no quotes + ) + \s*=\s* # divider + (.*) # value (including list values and comments) + $ # line end + ''', + re.VERBOSE) + + _sectionmarker = re.compile(r'''^ + (\s*) # 1: indentation + ((?:\[\s*)+) # 2: section marker open + ( # 3: section name open + (?:"\s*\S.*?\s*")| # at least one non-space with double quotes + (?:'\s*\S.*?\s*')| # at least one non-space with single quotes + (?:[^'"\s].*?) # at least one non-space unquoted + ) # section name close + ((?:\s*\])+) # 4: section marker close + \s*(\#.*)? # 5: optional comment + $''', + re.VERBOSE) + + # this regexp pulls list values out as a single string + # or single values and comments + # FIXME: this regex adds a '' to the end of comma terminated lists + # workaround in ``_handle_value`` + _valueexp = re.compile(r'''^ + (?: + (?: + ( + (?: + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\#][^,\#]*?) # unquoted + ) + \s*,\s* # comma + )* # match all list items ending in a comma (if any) + ) + ( + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\#\s][^,]*?)| # unquoted + (?:(? 1: + msg = "Parsing failed with several errors.\nFirst error %s" % info + error = ConfigObjError(msg) + else: + error = self._errors[0] + # set the errors attribute; it's a list of tuples: + # (error_type, message, line_number) + error.errors = self._errors + # set the config attribute + error.config = self + raise error + # delete private attributes + del self._errors + + if configspec is None: + self.configspec = None + else: + self._handle_configspec(configspec) + + + def _initialise(self, options=None): + if options is None: + options = OPTION_DEFAULTS + + # initialise a few variables + self.filename = None + self._errors = [] + self.raise_errors = options['raise_errors'] + self.interpolation = options['interpolation'] + self.list_values = options['list_values'] + self.create_empty = options['create_empty'] + self.file_error = options['file_error'] + self.stringify = options['stringify'] + self.indent_type = options['indent_type'] + self.encoding = options['encoding'] + self.default_encoding = options['default_encoding'] + self.BOM = False + self.newlines = None + self.write_empty_values = options['write_empty_values'] + self.unrepr = options['unrepr'] + + self.initial_comment = [] + self.final_comment = [] + self.configspec = None + + if self._inspec: + self.list_values = False + + # Clear section attributes as well + Section._initialise(self) + + + def __repr__(self): + def _getval(key): + try: + return self[key] + except MissingInterpolationOption: + return dict.__getitem__(self, key) + return ('ConfigObj({%s})' % + ', '.join([('%s: %s' % (repr(key), repr(_getval(key)))) + for key in (self.scalars + self.sections)])) + + + def _handle_bom(self, infile): + """ + Handle any BOM, and decode if necessary. + + If an encoding is specified, that *must* be used - but the BOM should + still be removed (and the BOM attribute set). + + (If the encoding is wrongly specified, then a BOM for an alternative + encoding won't be discovered or removed.) + + If an encoding is not specified, UTF8 or UTF16 BOM will be detected and + removed. The BOM attribute will be set. UTF16 will be decoded to + unicode. + + NOTE: This method must not be called with an empty ``infile``. + + Specifying the *wrong* encoding is likely to cause a + ``UnicodeDecodeError``. + + ``infile`` must always be returned as a list of lines, but may be + passed in as a single string. + """ + if ((self.encoding is not None) and + (self.encoding.lower() not in BOM_LIST)): + # No need to check for a BOM + # the encoding specified doesn't have one + # just decode + return self._decode(infile, self.encoding) + + if isinstance(infile, (list, tuple)): + line = infile[0] + else: + line = infile + if self.encoding is not None: + # encoding explicitly supplied + # And it could have an associated BOM + # TODO: if encoding is just UTF16 - we ought to check for both + # TODO: big endian and little endian versions. + enc = BOM_LIST[self.encoding.lower()] + if enc == 'utf_16': + # For UTF16 we try big endian and little endian + for BOM, (encoding, final_encoding) in BOMS.items(): + if not final_encoding: + # skip UTF8 + continue + if infile.startswith(BOM): + ### BOM discovered + ##self.BOM = True + # Don't need to remove BOM + return self._decode(infile, encoding) + + # If we get this far, will *probably* raise a DecodeError + # As it doesn't appear to start with a BOM + return self._decode(infile, self.encoding) + + # Must be UTF8 + BOM = BOM_SET[enc] + if not line.startswith(BOM): + return self._decode(infile, self.encoding) + + newline = line[len(BOM):] + + # BOM removed + if isinstance(infile, (list, tuple)): + infile[0] = newline + else: + infile = newline + self.BOM = True + return self._decode(infile, self.encoding) + + # No encoding specified - so we need to check for UTF8/UTF16 + for BOM, (encoding, final_encoding) in BOMS.items(): + if not line.startswith(BOM): + continue + else: + # BOM discovered + self.encoding = final_encoding + if not final_encoding: + self.BOM = True + # UTF8 + # remove BOM + newline = line[len(BOM):] + if isinstance(infile, (list, tuple)): + infile[0] = newline + else: + infile = newline + # UTF8 - don't decode + if isinstance(infile, basestring): + return infile.splitlines(True) + else: + return infile + # UTF16 - have to decode + return self._decode(infile, encoding) + + # No BOM discovered and no encoding specified, just return + if isinstance(infile, basestring): + # infile read from a file will be a single string + return infile.splitlines(True) + return infile + + + def _a_to_u(self, aString): + """Decode ASCII strings to unicode if a self.encoding is specified.""" + if self.encoding: + return aString.decode('ascii') + else: + return aString + + + def _decode(self, infile, encoding): + """ + Decode infile to unicode. Using the specified encoding. + + if is a string, it also needs converting to a list. + """ + if isinstance(infile, basestring): + # can't be unicode + # NOTE: Could raise a ``UnicodeDecodeError`` + return infile.decode(encoding).splitlines(True) + for i, line in enumerate(infile): + if not isinstance(line, unicode): + # NOTE: The isinstance test here handles mixed lists of unicode/string + # NOTE: But the decode will break on any non-string values + # NOTE: Or could raise a ``UnicodeDecodeError`` + infile[i] = line.decode(encoding) + return infile + + + def _decode_element(self, line): + """Decode element to unicode if necessary.""" + if not self.encoding: + return line + if isinstance(line, str) and self.default_encoding: + return line.decode(self.default_encoding) + return line + + + def _str(self, value): + """ + Used by ``stringify`` within validate, to turn non-string values + into strings. + """ + if not isinstance(value, basestring): + return str(value) + else: + return value + + + def _parse(self, infile): + """Actually parse the config file.""" + temp_list_values = self.list_values + if self.unrepr: + self.list_values = False + + comment_list = [] + done_start = False + this_section = self + maxline = len(infile) - 1 + cur_index = -1 + reset_comment = False + + while cur_index < maxline: + if reset_comment: + comment_list = [] + cur_index += 1 + line = infile[cur_index] + sline = line.strip() + # do we have anything on the line ? + if not sline or sline.startswith('#'): + reset_comment = False + comment_list.append(line) + continue + + if not done_start: + # preserve initial comment + self.initial_comment = comment_list + comment_list = [] + done_start = True + + reset_comment = True + # first we check if it's a section marker + mat = self._sectionmarker.match(line) + if mat is not None: + # is a section line + (indent, sect_open, sect_name, sect_close, comment) = mat.groups() + if indent and (self.indent_type is None): + self.indent_type = indent + cur_depth = sect_open.count('[') + if cur_depth != sect_close.count(']'): + self._handle_error("Cannot compute the section depth at line %s.", + NestingError, infile, cur_index) + continue + + if cur_depth < this_section.depth: + # the new section is dropping back to a previous level + try: + parent = self._match_depth(this_section, + cur_depth).parent + except SyntaxError: + self._handle_error("Cannot compute nesting level at line %s.", + NestingError, infile, cur_index) + continue + elif cur_depth == this_section.depth: + # the new section is a sibling of the current section + parent = this_section.parent + elif cur_depth == this_section.depth + 1: + # the new section is a child the current section + parent = this_section + else: + self._handle_error("Section too nested at line %s.", + NestingError, infile, cur_index) + + sect_name = self._unquote(sect_name) + if sect_name in parent: + self._handle_error('Duplicate section name at line %s.', + DuplicateError, infile, cur_index) + continue + + # create the new section + this_section = Section( + parent, + cur_depth, + self, + name=sect_name) + parent[sect_name] = this_section + parent.inline_comments[sect_name] = comment + parent.comments[sect_name] = comment_list + continue + # + # it's not a section marker, + # so it should be a valid ``key = value`` line + mat = self._keyword.match(line) + if mat is None: + # it neither matched as a keyword + # or a section marker + self._handle_error( + 'Invalid line at line "%s".', + ParseError, infile, cur_index) + else: + # is a keyword value + # value will include any inline comment + (indent, key, value) = mat.groups() + if indent and (self.indent_type is None): + self.indent_type = indent + # check for a multiline value + if value[:3] in ['"""', "'''"]: + try: + value, comment, cur_index = self._multiline( + value, infile, cur_index, maxline) + except SyntaxError: + self._handle_error( + 'Parse error in value at line %s.', + ParseError, infile, cur_index) + continue + else: + if self.unrepr: + comment = '' + try: + value = unrepr(value) + except Exception, e: + if type(e) == UnknownType: + msg = 'Unknown name or type in value at line %s.' + else: + msg = 'Parse error in value at line %s.' + self._handle_error(msg, UnreprError, infile, + cur_index) + continue + else: + if self.unrepr: + comment = '' + try: + value = unrepr(value) + except Exception, e: + if isinstance(e, UnknownType): + msg = 'Unknown name or type in value at line %s.' + else: + msg = 'Parse error in value at line %s.' + self._handle_error(msg, UnreprError, infile, + cur_index) + continue + else: + # extract comment and lists + try: + (value, comment) = self._handle_value(value) + except SyntaxError: + self._handle_error( + 'Parse error in value at line %s.', + ParseError, infile, cur_index) + continue + # + key = self._unquote(key) + if key in this_section: + self._handle_error( + 'Duplicate keyword name at line %s.', + DuplicateError, infile, cur_index) + continue + # add the key. + # we set unrepr because if we have got this far we will never + # be creating a new section + this_section.__setitem__(key, value, unrepr=True) + this_section.inline_comments[key] = comment + this_section.comments[key] = comment_list + continue + # + if self.indent_type is None: + # no indentation used, set the type accordingly + self.indent_type = '' + + # preserve the final comment + if not self and not self.initial_comment: + self.initial_comment = comment_list + elif not reset_comment: + self.final_comment = comment_list + self.list_values = temp_list_values + + + def _match_depth(self, sect, depth): + """ + Given a section and a depth level, walk back through the sections + parents to see if the depth level matches a previous section. + + Return a reference to the right section, + or raise a SyntaxError. + """ + while depth < sect.depth: + if sect is sect.parent: + # we've reached the top level already + raise SyntaxError() + sect = sect.parent + if sect.depth == depth: + return sect + # shouldn't get here + raise SyntaxError() + + + def _handle_error(self, text, ErrorClass, infile, cur_index): + """ + Handle an error according to the error settings. + + Either raise the error or store it. + The error will have occured at ``cur_index`` + """ + line = infile[cur_index] + cur_index += 1 + message = text % cur_index + error = ErrorClass(message, cur_index, line) + if self.raise_errors: + # raise the error - parsing stops here + raise error + # store the error + # reraise when parsing has finished + self._errors.append(error) + + + def _unquote(self, value): + """Return an unquoted version of a value""" + if not value: + # should only happen during parsing of lists + raise SyntaxError + if (value[0] == value[-1]) and (value[0] in ('"', "'")): + value = value[1:-1] + return value + + + def _quote(self, value, multiline=True): + """ + Return a safely quoted version of a value. + + Raise a ConfigObjError if the value cannot be safely quoted. + If multiline is ``True`` (default) then use triple quotes + if necessary. + + * Don't quote values that don't need it. + * Recursively quote members of a list and return a comma joined list. + * Multiline is ``False`` for lists. + * Obey list syntax for empty and single member lists. + + If ``list_values=False`` then the value is only quoted if it contains + a ``\\n`` (is multiline) or '#'. + + If ``write_empty_values`` is set, and the value is an empty string, it + won't be quoted. + """ + if multiline and self.write_empty_values and value == '': + # Only if multiline is set, so that it is used for values not + # keys, and not values that are part of a list + return '' + + if multiline and isinstance(value, (list, tuple)): + if not value: + return ',' + elif len(value) == 1: + return self._quote(value[0], multiline=False) + ',' + return ', '.join([self._quote(val, multiline=False) + for val in value]) + if not isinstance(value, basestring): + if self.stringify: + value = str(value) + else: + raise TypeError('Value "%s" is not a string.' % value) + + if not value: + return '""' + + no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value + need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value )) + hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value) + check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote + + if check_for_single: + if not self.list_values: + # we don't quote if ``list_values=False`` + quot = noquot + # for normal values either single or double quotes will do + elif '\n' in value: + # will only happen if multiline is off - e.g. '\n' in key + raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) + elif ((value[0] not in wspace_plus) and + (value[-1] not in wspace_plus) and + (',' not in value)): + quot = noquot + else: + quot = self._get_single_quote(value) + else: + # if value has '\n' or "'" *and* '"', it will need triple quotes + quot = self._get_triple_quote(value) + + if quot == noquot and '#' in value and self.list_values: + quot = self._get_single_quote(value) + + return quot % value + + + def _get_single_quote(self, value): + if ("'" in value) and ('"' in value): + raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) + elif '"' in value: + quot = squot + else: + quot = dquot + return quot + + + def _get_triple_quote(self, value): + if (value.find('"""') != -1) and (value.find("'''") != -1): + raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) + if value.find('"""') == -1: + quot = tdquot + else: + quot = tsquot + return quot + + + def _handle_value(self, value): + """ + Given a value string, unquote, remove comment, + handle lists. (including empty and single member lists) + """ + if self._inspec: + # Parsing a configspec so don't handle comments + return (value, '') + # do we look for lists in values ? + if not self.list_values: + mat = self._nolistvalue.match(value) + if mat is None: + raise SyntaxError() + # NOTE: we don't unquote here + return mat.groups() + # + mat = self._valueexp.match(value) + if mat is None: + # the value is badly constructed, probably badly quoted, + # or an invalid list + raise SyntaxError() + (list_values, single, empty_list, comment) = mat.groups() + if (list_values == '') and (single is None): + # change this if you want to accept empty values + raise SyntaxError() + # NOTE: note there is no error handling from here if the regex + # is wrong: then incorrect values will slip through + if empty_list is not None: + # the single comma - meaning an empty list + return ([], comment) + if single is not None: + # handle empty values + if list_values and not single: + # FIXME: the '' is a workaround because our regex now matches + # '' at the end of a list if it has a trailing comma + single = None + else: + single = single or '""' + single = self._unquote(single) + if list_values == '': + # not a list value + return (single, comment) + the_list = self._listvalueexp.findall(list_values) + the_list = [self._unquote(val) for val in the_list] + if single is not None: + the_list += [single] + return (the_list, comment) + + + def _multiline(self, value, infile, cur_index, maxline): + """Extract the value, where we are in a multiline situation.""" + quot = value[:3] + newvalue = value[3:] + single_line = self._triple_quote[quot][0] + multi_line = self._triple_quote[quot][1] + mat = single_line.match(value) + if mat is not None: + retval = list(mat.groups()) + retval.append(cur_index) + return retval + elif newvalue.find(quot) != -1: + # somehow the triple quote is missing + raise SyntaxError() + # + while cur_index < maxline: + cur_index += 1 + newvalue += '\n' + line = infile[cur_index] + if line.find(quot) == -1: + newvalue += line + else: + # end of multiline, process it + break + else: + # we've got to the end of the config, oops... + raise SyntaxError() + mat = multi_line.match(line) + if mat is None: + # a badly formed line + raise SyntaxError() + (value, comment) = mat.groups() + return (newvalue + value, comment, cur_index) + + + def _handle_configspec(self, configspec): + """Parse the configspec.""" + # FIXME: Should we check that the configspec was created with the + # correct settings ? (i.e. ``list_values=False``) + if not isinstance(configspec, ConfigObj): + try: + configspec = ConfigObj(configspec, + raise_errors=True, + file_error=True, + _inspec=True) + except ConfigObjError, e: + # FIXME: Should these errors have a reference + # to the already parsed ConfigObj ? + raise ConfigspecError('Parsing configspec failed: %s' % e) + except IOError, e: + raise IOError('Reading configspec failed: %s' % e) + + self.configspec = configspec + + + + def _set_configspec(self, section, copy): + """ + Called by validate. Handles setting the configspec on subsections + including sections to be validated by __many__ + """ + configspec = section.configspec + many = configspec.get('__many__') + if isinstance(many, dict): + for entry in section.sections: + if entry not in configspec: + section[entry].configspec = many + + for entry in configspec.sections: + if entry == '__many__': + continue + if entry not in section: + section[entry] = {} + section[entry]._created = True + if copy: + # copy comments + section.comments[entry] = configspec.comments.get(entry, []) + section.inline_comments[entry] = configspec.inline_comments.get(entry, '') + + # Could be a scalar when we expect a section + if isinstance(section[entry], Section): + section[entry].configspec = configspec[entry] + + + def _write_line(self, indent_string, entry, this_entry, comment): + """Write an individual line, for the write method""" + # NOTE: the calls to self._quote here handles non-StringType values. + if not self.unrepr: + val = self._decode_element(self._quote(this_entry)) + else: + val = repr(this_entry) + return '%s%s%s%s%s' % (indent_string, + self._decode_element(self._quote(entry, multiline=False)), + self._a_to_u(' = '), + val, + self._decode_element(comment)) + + + def _write_marker(self, indent_string, depth, entry, comment): + """Write a section marker line""" + return '%s%s%s%s%s' % (indent_string, + self._a_to_u('[' * depth), + self._quote(self._decode_element(entry), multiline=False), + self._a_to_u(']' * depth), + self._decode_element(comment)) + + + def _handle_comment(self, comment): + """Deal with a comment.""" + if not comment: + return '' + start = self.indent_type + if not comment.startswith('#'): + start += self._a_to_u(' # ') + return (start + comment) + + + # Public methods + + def write(self, outfile=None, section=None): + """ + Write the current ConfigObj as a file + + tekNico: FIXME: use StringIO instead of real files + + >>> filename = a.filename + >>> a.filename = 'test.ini' + >>> a.write() + >>> a.filename = filename + >>> a == ConfigObj('test.ini', raise_errors=True) + 1 + >>> import os + >>> os.remove('test.ini') + """ + if self.indent_type is None: + # this can be true if initialised from a dictionary + self.indent_type = DEFAULT_INDENT_TYPE + + out = [] + cs = self._a_to_u('#') + csp = self._a_to_u('# ') + if section is None: + int_val = self.interpolation + self.interpolation = False + section = self + for line in self.initial_comment: + line = self._decode_element(line) + stripped_line = line.strip() + if stripped_line and not stripped_line.startswith(cs): + line = csp + line + out.append(line) + + indent_string = self.indent_type * section.depth + for entry in (section.scalars + section.sections): + if entry in section.defaults: + # don't write out default values + continue + for comment_line in section.comments[entry]: + comment_line = self._decode_element(comment_line.lstrip()) + if comment_line and not comment_line.startswith(cs): + comment_line = csp + comment_line + out.append(indent_string + comment_line) + this_entry = section[entry] + comment = self._handle_comment(section.inline_comments[entry]) + + if isinstance(this_entry, dict): + # a section + out.append(self._write_marker( + indent_string, + this_entry.depth, + entry, + comment)) + out.extend(self.write(section=this_entry)) + else: + out.append(self._write_line( + indent_string, + entry, + this_entry, + comment)) + + if section is self: + for line in self.final_comment: + line = self._decode_element(line) + stripped_line = line.strip() + if stripped_line and not stripped_line.startswith(cs): + line = csp + line + out.append(line) + self.interpolation = int_val + + if section is not self: + return out + + if (self.filename is None) and (outfile is None): + # output a list of lines + # might need to encode + # NOTE: This will *screw* UTF16, each line will start with the BOM + if self.encoding: + out = [l.encode(self.encoding) for l in out] + if (self.BOM and ((self.encoding is None) or + (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))): + # Add the UTF8 BOM + if not out: + out.append('') + out[0] = BOM_UTF8 + out[0] + return out + + # Turn the list to a string, joined with correct newlines + newline = self.newlines or os.linesep + if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w' + and sys.platform == 'win32' and newline == '\r\n'): + # Windows specific hack to avoid writing '\r\r\n' + newline = '\n' + output = self._a_to_u(newline).join(out) + if self.encoding: + output = output.encode(self.encoding) + if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)): + # Add the UTF8 BOM + output = BOM_UTF8 + output + + if not output.endswith(newline): + output += newline + if outfile is not None: + outfile.write(output) + else: + h = open(self.filename, 'wb') + h.write(output) + h.close() + + + def validate(self, validator, preserve_errors=False, copy=False, + section=None): + """ + Test the ConfigObj against a configspec. + + It uses the ``validator`` object from *validate.py*. + + To run ``validate`` on the current ConfigObj, call: :: + + test = config.validate(validator) + + (Normally having previously passed in the configspec when the ConfigObj + was created - you can dynamically assign a dictionary of checks to the + ``configspec`` attribute of a section though). + + It returns ``True`` if everything passes, or a dictionary of + pass/fails (True/False). If every member of a subsection passes, it + will just have the value ``True``. (It also returns ``False`` if all + members fail). + + In addition, it converts the values from strings to their native + types if their checks pass (and ``stringify`` is set). + + If ``preserve_errors`` is ``True`` (``False`` is default) then instead + of a marking a fail with a ``False``, it will preserve the actual + exception object. This can contain info about the reason for failure. + For example the ``VdtValueTooSmallError`` indicates that the value + supplied was too small. If a value (or section) is missing it will + still be marked as ``False``. + + You must have the validate module to use ``preserve_errors=True``. + + You can then use the ``flatten_errors`` function to turn your nested + results dictionary into a flattened list of failures - useful for + displaying meaningful error messages. + """ + if section is None: + if self.configspec is None: + raise ValueError('No configspec supplied.') + if preserve_errors: + # We do this once to remove a top level dependency on the validate module + # Which makes importing configobj faster + from validate import VdtMissingValue + self._vdtMissingValue = VdtMissingValue + + section = self + + if copy: + section.initial_comment = section.configspec.initial_comment + section.final_comment = section.configspec.final_comment + section.encoding = section.configspec.encoding + section.BOM = section.configspec.BOM + section.newlines = section.configspec.newlines + section.indent_type = section.configspec.indent_type + + # + # section.default_values.clear() #?? + configspec = section.configspec + self._set_configspec(section, copy) + + + def validate_entry(entry, spec, val, missing, ret_true, ret_false): + section.default_values.pop(entry, None) + + try: + section.default_values[entry] = validator.get_default_value(configspec[entry]) + except (KeyError, AttributeError, validator.baseErrorClass): + # No default, bad default or validator has no 'get_default_value' + # (e.g. SimpleVal) + pass + + try: + check = validator.check(spec, + val, + missing=missing + ) + except validator.baseErrorClass, e: + if not preserve_errors or isinstance(e, self._vdtMissingValue): + out[entry] = False + else: + # preserve the error + out[entry] = e + ret_false = False + ret_true = False + else: + ret_false = False + out[entry] = True + if self.stringify or missing: + # if we are doing type conversion + # or the value is a supplied default + if not self.stringify: + if isinstance(check, (list, tuple)): + # preserve lists + check = [self._str(item) for item in check] + elif missing and check is None: + # convert the None from a default to a '' + check = '' + else: + check = self._str(check) + if (check != val) or missing: + section[entry] = check + if not copy and missing and entry not in section.defaults: + section.defaults.append(entry) + return ret_true, ret_false + + # + out = {} + ret_true = True + ret_false = True + + unvalidated = [k for k in section.scalars if k not in configspec] + incorrect_sections = [k for k in configspec.sections if k in section.scalars] + incorrect_scalars = [k for k in configspec.scalars if k in section.sections] + + for entry in configspec.scalars: + if entry in ('__many__', '___many___'): + # reserved names + continue + if (not entry in section.scalars) or (entry in section.defaults): + # missing entries + # or entries from defaults + missing = True + val = None + if copy and entry not in section.scalars: + # copy comments + section.comments[entry] = ( + configspec.comments.get(entry, [])) + section.inline_comments[entry] = ( + configspec.inline_comments.get(entry, '')) + # + else: + missing = False + val = section[entry] + + ret_true, ret_false = validate_entry(entry, configspec[entry], val, + missing, ret_true, ret_false) + + many = None + if '__many__' in configspec.scalars: + many = configspec['__many__'] + elif '___many___' in configspec.scalars: + many = configspec['___many___'] + + if many is not None: + for entry in unvalidated: + val = section[entry] + ret_true, ret_false = validate_entry(entry, many, val, False, + ret_true, ret_false) + unvalidated = [] + + for entry in incorrect_scalars: + ret_true = False + if not preserve_errors: + out[entry] = False + else: + ret_false = False + msg = 'Value %r was provided as a section' % entry + out[entry] = validator.baseErrorClass(msg) + for entry in incorrect_sections: + ret_true = False + if not preserve_errors: + out[entry] = False + else: + ret_false = False + msg = 'Section %r was provided as a single value' % entry + out[entry] = validator.baseErrorClass(msg) + + # Missing sections will have been created as empty ones when the + # configspec was read. + for entry in section.sections: + # FIXME: this means DEFAULT is not copied in copy mode + if section is self and entry == 'DEFAULT': + continue + if section[entry].configspec is None: + unvalidated.append(entry) + continue + if copy: + section.comments[entry] = configspec.comments.get(entry, []) + section.inline_comments[entry] = configspec.inline_comments.get(entry, '') + check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry]) + out[entry] = check + if check == False: + ret_true = False + elif check == True: + ret_false = False + else: + ret_true = False + + section.extra_values = unvalidated + if preserve_errors and not section._created: + # If the section wasn't created (i.e. it wasn't missing) + # then we can't return False, we need to preserve errors + ret_false = False + # + if ret_false and preserve_errors and out: + # If we are preserving errors, but all + # the failures are from missing sections / values + # then we can return False. Otherwise there is a + # real failure that we need to preserve. + ret_false = not any(out.values()) + if ret_true: + return True + elif ret_false: + return False + return out + + + def reset(self): + """Clear ConfigObj instance and restore to 'freshly created' state.""" + self.clear() + self._initialise() + # FIXME: Should be done by '_initialise', but ConfigObj constructor (and reload) + # requires an empty dictionary + self.configspec = None + # Just to be sure ;-) + self._original_configspec = None + + + def reload(self): + """ + Reload a ConfigObj from file. + + This method raises a ``ReloadError`` if the ConfigObj doesn't have + a filename attribute pointing to a file. + """ + if not isinstance(self.filename, basestring): + raise ReloadError() + + filename = self.filename + current_options = {} + for entry in OPTION_DEFAULTS: + if entry == 'configspec': + continue + current_options[entry] = getattr(self, entry) + + configspec = self._original_configspec + current_options['configspec'] = configspec + + self.clear() + self._initialise(current_options) + self._load(filename, configspec) + + + +class SimpleVal(object): + """ + A simple validator. + Can be used to check that all members expected are present. + + To use it, provide a configspec with all your members in (the value given + will be ignored). Pass an instance of ``SimpleVal`` to the ``validate`` + method of your ``ConfigObj``. ``validate`` will return ``True`` if all + members are present, or a dictionary with True/False meaning + present/missing. (Whole missing sections will be replaced with ``False``) + """ + + def __init__(self): + self.baseErrorClass = ConfigObjError + + def check(self, check, member, missing=False): + """A dummy check method, always returns the value unchanged.""" + if missing: + raise self.baseErrorClass() + return member + + +def flatten_errors(cfg, res, levels=None, results=None): + """ + An example function that will turn a nested dictionary of results + (as returned by ``ConfigObj.validate``) into a flat list. + + ``cfg`` is the ConfigObj instance being checked, ``res`` is the results + dictionary returned by ``validate``. + + (This is a recursive function, so you shouldn't use the ``levels`` or + ``results`` arguments - they are used by the function.) + + Returns a list of keys that failed. Each member of the list is a tuple:: + + ([list of sections...], key, result) + + If ``validate`` was called with ``preserve_errors=False`` (the default) + then ``result`` will always be ``False``. + + *list of sections* is a flattened list of sections that the key was found + in. + + If the section was missing (or a section was expected and a scalar provided + - or vice-versa) then key will be ``None``. + + If the value (or section) was missing then ``result`` will be ``False``. + + If ``validate`` was called with ``preserve_errors=True`` and a value + was present, but failed the check, then ``result`` will be the exception + object returned. You can use this as a string that describes the failure. + + For example *The value "3" is of the wrong type*. + """ + if levels is None: + # first time called + levels = [] + results = [] + if res == True: + return results + if res == False or isinstance(res, Exception): + results.append((levels[:], None, res)) + if levels: + levels.pop() + return results + for (key, val) in res.items(): + if val == True: + continue + if isinstance(cfg.get(key), dict): + # Go down one level + levels.append(key) + flatten_errors(cfg[key], val, levels, results) + continue + results.append((levels[:], key, val)) + # + # Go up one level + if levels: + levels.pop() + # + return results + + +def get_extra_values(conf, _prepend=()): + """ + Find all the values and sections not in the configspec from a validated + ConfigObj. + + ``get_extra_values`` returns a list of tuples where each tuple represents + either an extra section, or an extra value. + + The tuples contain two values, a tuple representing the section the value + is in and the name of the extra values. For extra values in the top level + section the first member will be an empty tuple. For values in the 'foo' + section the first member will be ``('foo',)``. For members in the 'bar' + subsection of the 'foo' section the first member will be ``('foo', 'bar')``. + + NOTE: If you call ``get_extra_values`` on a ConfigObj instance that hasn't + been validated it will return an empty list. + """ + out = [] + + out.extend([(_prepend, name) for name in conf.extra_values]) + for name in conf.sections: + if name not in conf.extra_values: + out.extend(get_extra_values(conf[name], _prepend + (name,))) + return out + + +"""*A programming language is a medium of expression.* - Paul Graham""" diff --git a/astropy/extern/configobj_py2/validate.py b/astropy/extern/configobj_py2/validate.py new file mode 100644 index 0000000..73dbdb8 --- /dev/null +++ b/astropy/extern/configobj_py2/validate.py @@ -0,0 +1,1450 @@ +# validate.py +# A Validator object +# Copyright (C) 2005-2010 Michael Foord, Mark Andrews, Nicola Larosa +# E-mail: fuzzyman AT voidspace DOT org DOT uk +# mark AT la-la DOT com +# nico AT tekNico DOT net + +# This software is licensed under the terms of the BSD license. +# http://www.voidspace.org.uk/python/license.shtml +# Basically you're free to copy, modify, distribute and relicense it, +# So long as you keep a copy of the license with it. + +# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml +# For information about bugfixes, updates and support, please join the +# ConfigObj mailing list: +# http://lists.sourceforge.net/lists/listinfo/configobj-develop +# Comments, suggestions and bug reports welcome. + +""" + The Validator object is used to check that supplied values + conform to a specification. + + The value can be supplied as a string - e.g. from a config file. + In this case the check will also *convert* the value to + the required type. This allows you to add validation + as a transparent layer to access data stored as strings. + The validation checks that the data is correct *and* + converts it to the expected type. + + Some standard checks are provided for basic data types. + Additional checks are easy to write. They can be + provided when the ``Validator`` is instantiated or + added afterwards. + + The standard functions work with the following basic data types : + + * integers + * floats + * booleans + * strings + * ip_addr + + plus lists of these datatypes + + Adding additional checks is done through coding simple functions. + + The full set of standard checks are : + + * 'integer': matches integer values (including negative) + Takes optional 'min' and 'max' arguments : :: + + integer() + integer(3, 9) # any value from 3 to 9 + integer(min=0) # any positive value + integer(max=9) + + * 'float': matches float values + Has the same parameters as the integer check. + + * 'boolean': matches boolean values - ``True`` or ``False`` + Acceptable string values for True are : + true, on, yes, 1 + Acceptable string values for False are : + false, off, no, 0 + + Any other value raises an error. + + * 'ip_addr': matches an Internet Protocol address, v.4, represented + by a dotted-quad string, i.e. '1.2.3.4'. + + * 'string': matches any string. + Takes optional keyword args 'min' and 'max' + to specify min and max lengths of the string. + + * 'list': matches any list. + Takes optional keyword args 'min', and 'max' to specify min and + max sizes of the list. (Always returns a list.) + + * 'tuple': matches any tuple. + Takes optional keyword args 'min', and 'max' to specify min and + max sizes of the tuple. (Always returns a tuple.) + + * 'int_list': Matches a list of integers. + Takes the same arguments as list. + + * 'float_list': Matches a list of floats. + Takes the same arguments as list. + + * 'bool_list': Matches a list of boolean values. + Takes the same arguments as list. + + * 'ip_addr_list': Matches a list of IP addresses. + Takes the same arguments as list. + + * 'string_list': Matches a list of strings. + Takes the same arguments as list. + + * 'mixed_list': Matches a list with different types in + specific positions. List size must match + the number of arguments. + + Each position can be one of : + 'integer', 'float', 'ip_addr', 'string', 'boolean' + + So to specify a list with two strings followed + by two integers, you write the check as : :: + + mixed_list('string', 'string', 'integer', 'integer') + + * 'pass': This check matches everything ! It never fails + and the value is unchanged. + + It is also the default if no check is specified. + + * 'option': This check matches any from a list of options. + You specify this check with : :: + + option('option 1', 'option 2', 'option 3') + + You can supply a default value (returned if no value is supplied) + using the default keyword argument. + + You specify a list argument for default using a list constructor syntax in + the check : :: + + checkname(arg1, arg2, default=list('val 1', 'val 2', 'val 3')) + + A badly formatted set of arguments will raise a ``VdtParamError``. +""" + +__version__ = '1.0.1' + + +__all__ = ( + '__version__', + 'dottedQuadToNum', + 'numToDottedQuad', + 'ValidateError', + 'VdtUnknownCheckError', + 'VdtParamError', + 'VdtTypeError', + 'VdtValueError', + 'VdtValueTooSmallError', + 'VdtValueTooBigError', + 'VdtValueTooShortError', + 'VdtValueTooLongError', + 'VdtMissingValue', + 'Validator', + 'is_integer', + 'is_float', + 'is_boolean', + 'is_list', + 'is_tuple', + 'is_ip_addr', + 'is_string', + 'is_int_list', + 'is_bool_list', + 'is_float_list', + 'is_string_list', + 'is_ip_addr_list', + 'is_mixed_list', + 'is_option', + '__docformat__', +) + + +import re + + +_list_arg = re.compile(r''' + (?: + ([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*list\( + ( + (?: + \s* + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s\)][^,\)]*?) # unquoted + ) + \s*,\s* + )* + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s\)][^,\)]*?) # unquoted + )? # last one + ) + \) + ) +''', re.VERBOSE | re.DOTALL) # two groups + +_list_members = re.compile(r''' + ( + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s=][^,=]*?) # unquoted + ) + (?: + (?:\s*,\s*)|(?:\s*$) # comma + ) +''', re.VERBOSE | re.DOTALL) # one group + +_paramstring = r''' + (?: + ( + (?: + [a-zA-Z_][a-zA-Z0-9_]*\s*=\s*list\( + (?: + \s* + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s\)][^,\)]*?) # unquoted + ) + \s*,\s* + )* + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s\)][^,\)]*?) # unquoted + )? # last one + \) + )| + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s=][^,=]*?)| # unquoted + (?: # keyword argument + [a-zA-Z_][a-zA-Z0-9_]*\s*=\s* + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s=][^,=]*?) # unquoted + ) + ) + ) + ) + (?: + (?:\s*,\s*)|(?:\s*$) # comma + ) + ) + ''' + +_matchstring = '^%s*' % _paramstring + +# Python pre 2.2.1 doesn't have bool +try: + bool +except NameError: + def bool(val): + """Simple boolean equivalent function. """ + if val: + return 1 + else: + return 0 + + +def dottedQuadToNum(ip): + """ + Convert decimal dotted quad string to long integer + + >>> int(dottedQuadToNum('1 ')) + 1 + >>> int(dottedQuadToNum(' 1.2')) + 16777218 + >>> int(dottedQuadToNum(' 1.2.3 ')) + 16908291 + >>> int(dottedQuadToNum('1.2.3.4')) + 16909060 + >>> dottedQuadToNum('255.255.255.255') + 4294967295L + >>> dottedQuadToNum('255.255.255.256') + Traceback (most recent call last): + ValueError: Not a good dotted-quad IP: 255.255.255.256 + """ + + # import here to avoid it when ip_addr values are not used + import socket, struct + + try: + return struct.unpack('!L', + socket.inet_aton(ip.strip()))[0] + except socket.error: + # bug in inet_aton, corrected in Python 2.4 + if ip.strip() == '255.255.255.255': + return 0xFFFFFFFFL + else: + raise ValueError('Not a good dotted-quad IP: %s' % ip) + return + + +def numToDottedQuad(num): + """ + Convert long int to dotted quad string + + >>> numToDottedQuad(-1L) + Traceback (most recent call last): + ValueError: Not a good numeric IP: -1 + >>> numToDottedQuad(1L) + '0.0.0.1' + >>> numToDottedQuad(16777218L) + '1.0.0.2' + >>> numToDottedQuad(16908291L) + '1.2.0.3' + >>> numToDottedQuad(16909060L) + '1.2.3.4' + >>> numToDottedQuad(4294967295L) + '255.255.255.255' + >>> numToDottedQuad(4294967296L) + Traceback (most recent call last): + ValueError: Not a good numeric IP: 4294967296 + """ + + # import here to avoid it when ip_addr values are not used + import socket, struct + + # no need to intercept here, 4294967295L is fine + if num > 4294967295L or num < 0: + raise ValueError('Not a good numeric IP: %s' % num) + try: + return socket.inet_ntoa( + struct.pack('!L', long(num))) + except (socket.error, struct.error, OverflowError): + raise ValueError('Not a good numeric IP: %s' % num) + + +class ValidateError(Exception): + """ + This error indicates that the check failed. + It can be the base class for more specific errors. + + Any check function that fails ought to raise this error. + (or a subclass) + + >>> raise ValidateError + Traceback (most recent call last): + ValidateError + """ + + +class VdtMissingValue(ValidateError): + """No value was supplied to a check that needed one.""" + + +class VdtUnknownCheckError(ValidateError): + """An unknown check function was requested""" + + def __init__(self, value): + """ + >>> raise VdtUnknownCheckError('yoda') + Traceback (most recent call last): + VdtUnknownCheckError: the check "yoda" is unknown. + """ + ValidateError.__init__(self, 'the check "%s" is unknown.' % (value,)) + + +class VdtParamError(SyntaxError): + """An incorrect parameter was passed""" + + def __init__(self, name, value): + """ + >>> raise VdtParamError('yoda', 'jedi') + Traceback (most recent call last): + VdtParamError: passed an incorrect value "jedi" for parameter "yoda". + """ + SyntaxError.__init__(self, 'passed an incorrect value "%s" for parameter "%s".' % (value, name)) + + +class VdtTypeError(ValidateError): + """The value supplied was of the wrong type""" + + def __init__(self, value): + """ + >>> raise VdtTypeError('jedi') + Traceback (most recent call last): + VdtTypeError: the value "jedi" is of the wrong type. + """ + ValidateError.__init__(self, 'the value "%s" is of the wrong type.' % (value,)) + + +class VdtValueError(ValidateError): + """The value supplied was of the correct type, but was not an allowed value.""" + + def __init__(self, value): + """ + >>> raise VdtValueError('jedi') + Traceback (most recent call last): + VdtValueError: the value "jedi" is unacceptable. + """ + ValidateError.__init__(self, 'the value "%s" is unacceptable.' % (value,)) + + +class VdtValueTooSmallError(VdtValueError): + """The value supplied was of the correct type, but was too small.""" + + def __init__(self, value): + """ + >>> raise VdtValueTooSmallError('0') + Traceback (most recent call last): + VdtValueTooSmallError: the value "0" is too small. + """ + ValidateError.__init__(self, 'the value "%s" is too small.' % (value,)) + + +class VdtValueTooBigError(VdtValueError): + """The value supplied was of the correct type, but was too big.""" + + def __init__(self, value): + """ + >>> raise VdtValueTooBigError('1') + Traceback (most recent call last): + VdtValueTooBigError: the value "1" is too big. + """ + ValidateError.__init__(self, 'the value "%s" is too big.' % (value,)) + + +class VdtValueTooShortError(VdtValueError): + """The value supplied was of the correct type, but was too short.""" + + def __init__(self, value): + """ + >>> raise VdtValueTooShortError('jed') + Traceback (most recent call last): + VdtValueTooShortError: the value "jed" is too short. + """ + ValidateError.__init__( + self, + 'the value "%s" is too short.' % (value,)) + + +class VdtValueTooLongError(VdtValueError): + """The value supplied was of the correct type, but was too long.""" + + def __init__(self, value): + """ + >>> raise VdtValueTooLongError('jedie') + Traceback (most recent call last): + VdtValueTooLongError: the value "jedie" is too long. + """ + ValidateError.__init__(self, 'the value "%s" is too long.' % (value,)) + + +class Validator(object): + """ + Validator is an object that allows you to register a set of 'checks'. + These checks take input and test that it conforms to the check. + + This can also involve converting the value from a string into + the correct datatype. + + The ``check`` method takes an input string which configures which + check is to be used and applies that check to a supplied value. + + An example input string would be: + 'int_range(param1, param2)' + + You would then provide something like: + + >>> def int_range_check(value, min, max): + ... # turn min and max from strings to integers + ... min = int(min) + ... max = int(max) + ... # check that value is of the correct type. + ... # possible valid inputs are integers or strings + ... # that represent integers + ... if not isinstance(value, (int, long, basestring)): + ... raise VdtTypeError(value) + ... elif isinstance(value, basestring): + ... # if we are given a string + ... # attempt to convert to an integer + ... try: + ... value = int(value) + ... except ValueError: + ... raise VdtValueError(value) + ... # check the value is between our constraints + ... if not min <= value: + ... raise VdtValueTooSmallError(value) + ... if not value <= max: + ... raise VdtValueTooBigError(value) + ... return value + + >>> fdict = {'int_range': int_range_check} + >>> vtr1 = Validator(fdict) + >>> vtr1.check('int_range(20, 40)', '30') + 30 + >>> vtr1.check('int_range(20, 40)', '60') + Traceback (most recent call last): + VdtValueTooBigError: the value "60" is too big. + + New functions can be added with : :: + + >>> vtr2 = Validator() + >>> vtr2.functions['int_range'] = int_range_check + + Or by passing in a dictionary of functions when Validator + is instantiated. + + Your functions *can* use keyword arguments, + but the first argument should always be 'value'. + + If the function doesn't take additional arguments, + the parentheses are optional in the check. + It can be written with either of : :: + + keyword = function_name + keyword = function_name() + + The first program to utilise Validator() was Michael Foord's + ConfigObj, an alternative to ConfigParser which supports lists and + can validate a config file using a config schema. + For more details on using Validator with ConfigObj see: + http://www.voidspace.org.uk/python/configobj.html + """ + + # this regex does the initial parsing of the checks + _func_re = re.compile(r'(.+?)\((.*)\)', re.DOTALL) + + # this regex takes apart keyword arguments + _key_arg = re.compile(r'^([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.*)$', re.DOTALL) + + + # this regex finds keyword=list(....) type values + _list_arg = _list_arg + + # this regex takes individual values out of lists - in one pass + _list_members = _list_members + + # These regexes check a set of arguments for validity + # and then pull the members out + _paramfinder = re.compile(_paramstring, re.VERBOSE | re.DOTALL) + _matchfinder = re.compile(_matchstring, re.VERBOSE | re.DOTALL) + + + def __init__(self, functions=None): + """ + >>> vtri = Validator() + """ + self.functions = { + '': self._pass, + 'integer': is_integer, + 'float': is_float, + 'boolean': is_boolean, + 'ip_addr': is_ip_addr, + 'string': is_string, + 'list': is_list, + 'tuple': is_tuple, + 'int_list': is_int_list, + 'float_list': is_float_list, + 'bool_list': is_bool_list, + 'ip_addr_list': is_ip_addr_list, + 'string_list': is_string_list, + 'mixed_list': is_mixed_list, + 'pass': self._pass, + 'option': is_option, + 'force_list': force_list, + } + if functions is not None: + self.functions.update(functions) + # tekNico: for use by ConfigObj + self.baseErrorClass = ValidateError + self._cache = {} + + + def check(self, check, value, missing=False): + """ + Usage: check(check, value) + + Arguments: + check: string representing check to apply (including arguments) + value: object to be checked + Returns value, converted to correct type if necessary + + If the check fails, raises a ``ValidateError`` subclass. + + >>> vtor.check('yoda', '') + Traceback (most recent call last): + VdtUnknownCheckError: the check "yoda" is unknown. + >>> vtor.check('yoda()', '') + Traceback (most recent call last): + VdtUnknownCheckError: the check "yoda" is unknown. + + >>> vtor.check('string(default="")', '', missing=True) + '' + """ + fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check) + + if missing: + if default is None: + # no information needed here - to be handled by caller + raise VdtMissingValue() + value = self._handle_none(default) + + if value is None: + return None + + return self._check_value(value, fun_name, fun_args, fun_kwargs) + + + def _handle_none(self, value): + if value == 'None': + return None + elif value in ("'None'", '"None"'): + # Special case a quoted None + value = self._unquote(value) + return value + + + def _parse_with_caching(self, check): + if check in self._cache: + fun_name, fun_args, fun_kwargs, default = self._cache[check] + # We call list and dict below to work with *copies* of the data + # rather than the original (which are mutable of course) + fun_args = list(fun_args) + fun_kwargs = dict(fun_kwargs) + else: + fun_name, fun_args, fun_kwargs, default = self._parse_check(check) + fun_kwargs = dict([(str(key), value) for (key, value) in fun_kwargs.items()]) + self._cache[check] = fun_name, list(fun_args), dict(fun_kwargs), default + return fun_name, fun_args, fun_kwargs, default + + + def _check_value(self, value, fun_name, fun_args, fun_kwargs): + try: + fun = self.functions[fun_name] + except KeyError: + raise VdtUnknownCheckError(fun_name) + else: + return fun(value, *fun_args, **fun_kwargs) + + + def _parse_check(self, check): + fun_match = self._func_re.match(check) + if fun_match: + fun_name = fun_match.group(1) + arg_string = fun_match.group(2) + arg_match = self._matchfinder.match(arg_string) + if arg_match is None: + # Bad syntax + raise VdtParamError('Bad syntax in check "%s".' % check) + fun_args = [] + fun_kwargs = {} + # pull out args of group 2 + for arg in self._paramfinder.findall(arg_string): + # args may need whitespace removing (before removing quotes) + arg = arg.strip() + listmatch = self._list_arg.match(arg) + if listmatch: + key, val = self._list_handle(listmatch) + fun_kwargs[key] = val + continue + keymatch = self._key_arg.match(arg) + if keymatch: + val = keymatch.group(2) + if not val in ("'None'", '"None"'): + # Special case a quoted None + val = self._unquote(val) + fun_kwargs[keymatch.group(1)] = val + continue + + fun_args.append(self._unquote(arg)) + else: + # allows for function names without (args) + return check, (), {}, None + + # Default must be deleted if the value is specified too, + # otherwise the check function will get a spurious "default" keyword arg + default = fun_kwargs.pop('default', None) + return fun_name, fun_args, fun_kwargs, default + + + def _unquote(self, val): + """Unquote a value if necessary.""" + if (len(val) >= 2) and (val[0] in ("'", '"')) and (val[0] == val[-1]): + val = val[1:-1] + return val + + + def _list_handle(self, listmatch): + """Take apart a ``keyword=list('val, 'val')`` type string.""" + out = [] + name = listmatch.group(1) + args = listmatch.group(2) + for arg in self._list_members.findall(args): + out.append(self._unquote(arg)) + return name, out + + + def _pass(self, value): + """ + Dummy check that always passes + + >>> vtor.check('', 0) + 0 + >>> vtor.check('', '0') + '0' + """ + return value + + + def get_default_value(self, check): + """ + Given a check, return the default value for the check + (converted to the right type). + + If the check doesn't specify a default value then a + ``KeyError`` will be raised. + """ + fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check) + if default is None: + raise KeyError('Check "%s" has no default value.' % check) + value = self._handle_none(default) + if value is None: + return value + return self._check_value(value, fun_name, fun_args, fun_kwargs) + + +def _is_num_param(names, values, to_float=False): + """ + Return numbers from inputs or raise VdtParamError. + + Lets ``None`` pass through. + Pass in keyword argument ``to_float=True`` to + use float for the conversion rather than int. + + >>> _is_num_param(('', ''), (0, 1.0)) + [0, 1] + >>> _is_num_param(('', ''), (0, 1.0), to_float=True) + [0.0, 1.0] + >>> _is_num_param(('a'), ('a')) + Traceback (most recent call last): + VdtParamError: passed an incorrect value "a" for parameter "a". + """ + fun = to_float and float or int + out_params = [] + for (name, val) in zip(names, values): + if val is None: + out_params.append(val) + elif isinstance(val, (int, long, float, basestring)): + try: + out_params.append(fun(val)) + except ValueError, e: + raise VdtParamError(name, val) + else: + raise VdtParamError(name, val) + return out_params + + +# built in checks +# you can override these by setting the appropriate name +# in Validator.functions +# note: if the params are specified wrongly in your input string, +# you will also raise errors. + +def is_integer(value, min=None, max=None): + """ + A check that tests that a given value is an integer (int, or long) + and optionally, between bounds. A negative value is accepted, while + a float will fail. + + If the value is a string, then the conversion is done - if possible. + Otherwise a VdtError is raised. + + >>> vtor.check('integer', '-1') + -1 + >>> vtor.check('integer', '0') + 0 + >>> vtor.check('integer', 9) + 9 + >>> vtor.check('integer', 'a') + Traceback (most recent call last): + VdtTypeError: the value "a" is of the wrong type. + >>> vtor.check('integer', '2.2') + Traceback (most recent call last): + VdtTypeError: the value "2.2" is of the wrong type. + >>> vtor.check('integer(10)', '20') + 20 + >>> vtor.check('integer(max=20)', '15') + 15 + >>> vtor.check('integer(10)', '9') + Traceback (most recent call last): + VdtValueTooSmallError: the value "9" is too small. + >>> vtor.check('integer(10)', 9) + Traceback (most recent call last): + VdtValueTooSmallError: the value "9" is too small. + >>> vtor.check('integer(max=20)', '35') + Traceback (most recent call last): + VdtValueTooBigError: the value "35" is too big. + >>> vtor.check('integer(max=20)', 35) + Traceback (most recent call last): + VdtValueTooBigError: the value "35" is too big. + >>> vtor.check('integer(0, 9)', False) + 0 + """ + (min_val, max_val) = _is_num_param(('min', 'max'), (min, max)) + if not isinstance(value, (int, long, basestring)): + raise VdtTypeError(value) + if isinstance(value, basestring): + # if it's a string - does it represent an integer ? + try: + value = int(value) + except ValueError: + raise VdtTypeError(value) + if (min_val is not None) and (value < min_val): + raise VdtValueTooSmallError(value) + if (max_val is not None) and (value > max_val): + raise VdtValueTooBigError(value) + return value + + +def is_float(value, min=None, max=None): + """ + A check that tests that a given value is a float + (an integer will be accepted), and optionally - that it is between bounds. + + If the value is a string, then the conversion is done - if possible. + Otherwise a VdtError is raised. + + This can accept negative values. + + >>> vtor.check('float', '2') + 2.0 + + From now on we multiply the value to avoid comparing decimals + + >>> vtor.check('float', '-6.8') * 10 + -68.0 + >>> vtor.check('float', '12.2') * 10 + 122.0 + >>> vtor.check('float', 8.4) * 10 + 84.0 + >>> vtor.check('float', 'a') + Traceback (most recent call last): + VdtTypeError: the value "a" is of the wrong type. + >>> vtor.check('float(10.1)', '10.2') * 10 + 102.0 + >>> vtor.check('float(max=20.2)', '15.1') * 10 + 151.0 + >>> vtor.check('float(10.0)', '9.0') + Traceback (most recent call last): + VdtValueTooSmallError: the value "9.0" is too small. + >>> vtor.check('float(max=20.0)', '35.0') + Traceback (most recent call last): + VdtValueTooBigError: the value "35.0" is too big. + """ + (min_val, max_val) = _is_num_param( + ('min', 'max'), (min, max), to_float=True) + if not isinstance(value, (int, long, float, basestring)): + raise VdtTypeError(value) + if not isinstance(value, float): + # if it's a string - does it represent a float ? + try: + value = float(value) + except ValueError: + raise VdtTypeError(value) + if (min_val is not None) and (value < min_val): + raise VdtValueTooSmallError(value) + if (max_val is not None) and (value > max_val): + raise VdtValueTooBigError(value) + return value + + +bool_dict = { + True: True, 'on': True, '1': True, 'true': True, 'yes': True, + False: False, 'off': False, '0': False, 'false': False, 'no': False, +} + + +def is_boolean(value): + """ + Check if the value represents a boolean. + + >>> vtor.check('boolean', 0) + 0 + >>> vtor.check('boolean', False) + 0 + >>> vtor.check('boolean', '0') + 0 + >>> vtor.check('boolean', 'off') + 0 + >>> vtor.check('boolean', 'false') + 0 + >>> vtor.check('boolean', 'no') + 0 + >>> vtor.check('boolean', 'nO') + 0 + >>> vtor.check('boolean', 'NO') + 0 + >>> vtor.check('boolean', 1) + 1 + >>> vtor.check('boolean', True) + 1 + >>> vtor.check('boolean', '1') + 1 + >>> vtor.check('boolean', 'on') + 1 + >>> vtor.check('boolean', 'true') + 1 + >>> vtor.check('boolean', 'yes') + 1 + >>> vtor.check('boolean', 'Yes') + 1 + >>> vtor.check('boolean', 'YES') + 1 + >>> vtor.check('boolean', '') + Traceback (most recent call last): + VdtTypeError: the value "" is of the wrong type. + >>> vtor.check('boolean', 'up') + Traceback (most recent call last): + VdtTypeError: the value "up" is of the wrong type. + + """ + if isinstance(value, basestring): + try: + return bool_dict[value.lower()] + except KeyError: + raise VdtTypeError(value) + # we do an equality test rather than an identity test + # this ensures Python 2.2 compatibilty + # and allows 0 and 1 to represent True and False + if value == False: + return False + elif value == True: + return True + else: + raise VdtTypeError(value) + + +def is_ip_addr(value): + """ + Check that the supplied value is an Internet Protocol address, v.4, + represented by a dotted-quad string, i.e. '1.2.3.4'. + + >>> vtor.check('ip_addr', '1 ') + '1' + >>> vtor.check('ip_addr', ' 1.2') + '1.2' + >>> vtor.check('ip_addr', ' 1.2.3 ') + '1.2.3' + >>> vtor.check('ip_addr', '1.2.3.4') + '1.2.3.4' + >>> vtor.check('ip_addr', '0.0.0.0') + '0.0.0.0' + >>> vtor.check('ip_addr', '255.255.255.255') + '255.255.255.255' + >>> vtor.check('ip_addr', '255.255.255.256') + Traceback (most recent call last): + VdtValueError: the value "255.255.255.256" is unacceptable. + >>> vtor.check('ip_addr', '1.2.3.4.5') + Traceback (most recent call last): + VdtValueError: the value "1.2.3.4.5" is unacceptable. + >>> vtor.check('ip_addr', 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + """ + if not isinstance(value, basestring): + raise VdtTypeError(value) + value = value.strip() + try: + dottedQuadToNum(value) + except ValueError: + raise VdtValueError(value) + return value + + +def is_list(value, min=None, max=None): + """ + Check that the value is a list of values. + + You can optionally specify the minimum and maximum number of members. + + It does no check on list members. + + >>> vtor.check('list', ()) + [] + >>> vtor.check('list', []) + [] + >>> vtor.check('list', (1, 2)) + [1, 2] + >>> vtor.check('list', [1, 2]) + [1, 2] + >>> vtor.check('list(3)', (1, 2)) + Traceback (most recent call last): + VdtValueTooShortError: the value "(1, 2)" is too short. + >>> vtor.check('list(max=5)', (1, 2, 3, 4, 5, 6)) + Traceback (most recent call last): + VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long. + >>> vtor.check('list(min=3, max=5)', (1, 2, 3, 4)) + [1, 2, 3, 4] + >>> vtor.check('list', 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + >>> vtor.check('list', '12') + Traceback (most recent call last): + VdtTypeError: the value "12" is of the wrong type. + """ + (min_len, max_len) = _is_num_param(('min', 'max'), (min, max)) + if isinstance(value, basestring): + raise VdtTypeError(value) + try: + num_members = len(value) + except TypeError: + raise VdtTypeError(value) + if min_len is not None and num_members < min_len: + raise VdtValueTooShortError(value) + if max_len is not None and num_members > max_len: + raise VdtValueTooLongError(value) + return list(value) + + +def is_tuple(value, min=None, max=None): + """ + Check that the value is a tuple of values. + + You can optionally specify the minimum and maximum number of members. + + It does no check on members. + + >>> vtor.check('tuple', ()) + () + >>> vtor.check('tuple', []) + () + >>> vtor.check('tuple', (1, 2)) + (1, 2) + >>> vtor.check('tuple', [1, 2]) + (1, 2) + >>> vtor.check('tuple(3)', (1, 2)) + Traceback (most recent call last): + VdtValueTooShortError: the value "(1, 2)" is too short. + >>> vtor.check('tuple(max=5)', (1, 2, 3, 4, 5, 6)) + Traceback (most recent call last): + VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long. + >>> vtor.check('tuple(min=3, max=5)', (1, 2, 3, 4)) + (1, 2, 3, 4) + >>> vtor.check('tuple', 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + >>> vtor.check('tuple', '12') + Traceback (most recent call last): + VdtTypeError: the value "12" is of the wrong type. + """ + return tuple(is_list(value, min, max)) + + +def is_string(value, min=None, max=None): + """ + Check that the supplied value is a string. + + You can optionally specify the minimum and maximum number of members. + + >>> vtor.check('string', '0') + '0' + >>> vtor.check('string', 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + >>> vtor.check('string(2)', '12') + '12' + >>> vtor.check('string(2)', '1') + Traceback (most recent call last): + VdtValueTooShortError: the value "1" is too short. + >>> vtor.check('string(min=2, max=3)', '123') + '123' + >>> vtor.check('string(min=2, max=3)', '1234') + Traceback (most recent call last): + VdtValueTooLongError: the value "1234" is too long. + """ + if not isinstance(value, basestring): + raise VdtTypeError(value) + (min_len, max_len) = _is_num_param(('min', 'max'), (min, max)) + try: + num_members = len(value) + except TypeError: + raise VdtTypeError(value) + if min_len is not None and num_members < min_len: + raise VdtValueTooShortError(value) + if max_len is not None and num_members > max_len: + raise VdtValueTooLongError(value) + return value + + +def is_int_list(value, min=None, max=None): + """ + Check that the value is a list of integers. + + You can optionally specify the minimum and maximum number of members. + + Each list member is checked that it is an integer. + + >>> vtor.check('int_list', ()) + [] + >>> vtor.check('int_list', []) + [] + >>> vtor.check('int_list', (1, 2)) + [1, 2] + >>> vtor.check('int_list', [1, 2]) + [1, 2] + >>> vtor.check('int_list', [1, 'a']) + Traceback (most recent call last): + VdtTypeError: the value "a" is of the wrong type. + """ + return [is_integer(mem) for mem in is_list(value, min, max)] + + +def is_bool_list(value, min=None, max=None): + """ + Check that the value is a list of booleans. + + You can optionally specify the minimum and maximum number of members. + + Each list member is checked that it is a boolean. + + >>> vtor.check('bool_list', ()) + [] + >>> vtor.check('bool_list', []) + [] + >>> check_res = vtor.check('bool_list', (True, False)) + >>> check_res == [True, False] + 1 + >>> check_res = vtor.check('bool_list', [True, False]) + >>> check_res == [True, False] + 1 + >>> vtor.check('bool_list', [True, 'a']) + Traceback (most recent call last): + VdtTypeError: the value "a" is of the wrong type. + """ + return [is_boolean(mem) for mem in is_list(value, min, max)] + + +def is_float_list(value, min=None, max=None): + """ + Check that the value is a list of floats. + + You can optionally specify the minimum and maximum number of members. + + Each list member is checked that it is a float. + + >>> vtor.check('float_list', ()) + [] + >>> vtor.check('float_list', []) + [] + >>> vtor.check('float_list', (1, 2.0)) + [1.0, 2.0] + >>> vtor.check('float_list', [1, 2.0]) + [1.0, 2.0] + >>> vtor.check('float_list', [1, 'a']) + Traceback (most recent call last): + VdtTypeError: the value "a" is of the wrong type. + """ + return [is_float(mem) for mem in is_list(value, min, max)] + + +def is_string_list(value, min=None, max=None): + """ + Check that the value is a list of strings. + + You can optionally specify the minimum and maximum number of members. + + Each list member is checked that it is a string. + + >>> vtor.check('string_list', ()) + [] + >>> vtor.check('string_list', []) + [] + >>> vtor.check('string_list', ('a', 'b')) + ['a', 'b'] + >>> vtor.check('string_list', ['a', 1]) + Traceback (most recent call last): + VdtTypeError: the value "1" is of the wrong type. + >>> vtor.check('string_list', 'hello') + Traceback (most recent call last): + VdtTypeError: the value "hello" is of the wrong type. + """ + if isinstance(value, basestring): + raise VdtTypeError(value) + return [is_string(mem) for mem in is_list(value, min, max)] + + +def is_ip_addr_list(value, min=None, max=None): + """ + Check that the value is a list of IP addresses. + + You can optionally specify the minimum and maximum number of members. + + Each list member is checked that it is an IP address. + + >>> vtor.check('ip_addr_list', ()) + [] + >>> vtor.check('ip_addr_list', []) + [] + >>> vtor.check('ip_addr_list', ('1.2.3.4', '5.6.7.8')) + ['1.2.3.4', '5.6.7.8'] + >>> vtor.check('ip_addr_list', ['a']) + Traceback (most recent call last): + VdtValueError: the value "a" is unacceptable. + """ + return [is_ip_addr(mem) for mem in is_list(value, min, max)] + + +def force_list(value, min=None, max=None): + """ + Check that a value is a list, coercing strings into + a list with one member. Useful where users forget the + trailing comma that turns a single value into a list. + + You can optionally specify the minimum and maximum number of members. + A minumum of greater than one will fail if the user only supplies a + string. + + >>> vtor.check('force_list', ()) + [] + >>> vtor.check('force_list', []) + [] + >>> vtor.check('force_list', 'hello') + ['hello'] + """ + if not isinstance(value, (list, tuple)): + value = [value] + return is_list(value, min, max) + + + +fun_dict = { + 'integer': is_integer, + 'float': is_float, + 'ip_addr': is_ip_addr, + 'string': is_string, + 'boolean': is_boolean, +} + + +def is_mixed_list(value, *args): + """ + Check that the value is a list. + Allow specifying the type of each member. + Work on lists of specific lengths. + + You specify each member as a positional argument specifying type + + Each type should be one of the following strings : + 'integer', 'float', 'ip_addr', 'string', 'boolean' + + So you can specify a list of two strings, followed by + two integers as : + + mixed_list('string', 'string', 'integer', 'integer') + + The length of the list must match the number of positional + arguments you supply. + + >>> mix_str = "mixed_list('integer', 'float', 'ip_addr', 'string', 'boolean')" + >>> check_res = vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', True)) + >>> check_res == [1, 2.0, '1.2.3.4', 'a', True] + 1 + >>> check_res = vtor.check(mix_str, ('1', '2.0', '1.2.3.4', 'a', 'True')) + >>> check_res == [1, 2.0, '1.2.3.4', 'a', True] + 1 + >>> vtor.check(mix_str, ('b', 2.0, '1.2.3.4', 'a', True)) + Traceback (most recent call last): + VdtTypeError: the value "b" is of the wrong type. + >>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a')) + Traceback (most recent call last): + VdtValueTooShortError: the value "(1, 2.0, '1.2.3.4', 'a')" is too short. + >>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', 1, 'b')) + Traceback (most recent call last): + VdtValueTooLongError: the value "(1, 2.0, '1.2.3.4', 'a', 1, 'b')" is too long. + >>> vtor.check(mix_str, 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + + This test requires an elaborate setup, because of a change in error string + output from the interpreter between Python 2.2 and 2.3 . + + >>> res_seq = ( + ... 'passed an incorrect value "', + ... 'yoda', + ... '" for parameter "mixed_list".', + ... ) + >>> res_str = "'".join(res_seq) + >>> try: + ... vtor.check('mixed_list("yoda")', ('a')) + ... except VdtParamError, err: + ... str(err) == res_str + 1 + """ + try: + length = len(value) + except TypeError: + raise VdtTypeError(value) + if length < len(args): + raise VdtValueTooShortError(value) + elif length > len(args): + raise VdtValueTooLongError(value) + try: + return [fun_dict[arg](val) for arg, val in zip(args, value)] + except KeyError, e: + raise VdtParamError('mixed_list', e) + + +def is_option(value, *options): + """ + This check matches the value to any of a set of options. + + >>> vtor.check('option("yoda", "jedi")', 'yoda') + 'yoda' + >>> vtor.check('option("yoda", "jedi")', 'jed') + Traceback (most recent call last): + VdtValueError: the value "jed" is unacceptable. + >>> vtor.check('option("yoda", "jedi")', 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + """ + if not isinstance(value, basestring): + raise VdtTypeError(value) + if not value in options: + raise VdtValueError(value) + return value + + +def _test(value, *args, **keywargs): + """ + A function that exists for test purposes. + + >>> checks = [ + ... '3, 6, min=1, max=3, test=list(a, b, c)', + ... '3', + ... '3, 6', + ... '3,', + ... 'min=1, test="a b c"', + ... 'min=5, test="a, b, c"', + ... 'min=1, max=3, test="a, b, c"', + ... 'min=-100, test=-99', + ... 'min=1, max=3', + ... '3, 6, test="36"', + ... '3, 6, test="a, b, c"', + ... '3, max=3, test=list("a", "b", "c")', + ... '''3, max=3, test=list("'a'", 'b', "x=(c)")''', + ... "test='x=fish(3)'", + ... ] + >>> v = Validator({'test': _test}) + >>> for entry in checks: + ... print v.check(('test(%s)' % entry), 3) + (3, ('3', '6'), {'test': ['a', 'b', 'c'], 'max': '3', 'min': '1'}) + (3, ('3',), {}) + (3, ('3', '6'), {}) + (3, ('3',), {}) + (3, (), {'test': 'a b c', 'min': '1'}) + (3, (), {'test': 'a, b, c', 'min': '5'}) + (3, (), {'test': 'a, b, c', 'max': '3', 'min': '1'}) + (3, (), {'test': '-99', 'min': '-100'}) + (3, (), {'max': '3', 'min': '1'}) + (3, ('3', '6'), {'test': '36'}) + (3, ('3', '6'), {'test': 'a, b, c'}) + (3, ('3',), {'test': ['a', 'b', 'c'], 'max': '3'}) + (3, ('3',), {'test': ["'a'", 'b', 'x=(c)'], 'max': '3'}) + (3, (), {'test': 'x=fish(3)'}) + + >>> v = Validator() + >>> v.check('integer(default=6)', '3') + 3 + >>> v.check('integer(default=6)', None, True) + 6 + >>> v.get_default_value('integer(default=6)') + 6 + >>> v.get_default_value('float(default=6)') + 6.0 + >>> v.get_default_value('pass(default=None)') + >>> v.get_default_value("string(default='None')") + 'None' + >>> v.get_default_value('pass') + Traceback (most recent call last): + KeyError: 'Check "pass" has no default value.' + >>> v.get_default_value('pass(default=list(1, 2, 3, 4))') + ['1', '2', '3', '4'] + + >>> v = Validator() + >>> v.check("pass(default=None)", None, True) + >>> v.check("pass(default='None')", None, True) + 'None' + >>> v.check('pass(default="None")', None, True) + 'None' + >>> v.check('pass(default=list(1, 2, 3, 4))', None, True) + ['1', '2', '3', '4'] + + Bug test for unicode arguments + >>> v = Validator() + >>> v.check(u'string(min=4)', u'test') + u'test' + + >>> v = Validator() + >>> v.get_default_value(u'string(min=4, default="1234")') + u'1234' + >>> v.check(u'string(min=4, default="1234")', u'test') + u'test' + + >>> v = Validator() + >>> default = v.get_default_value('string(default=None)') + >>> default == None + 1 + """ + return (value, args, keywargs) + + +def _test2(): + """ + >>> + >>> v = Validator() + >>> v.get_default_value('string(default="#ff00dd")') + '#ff00dd' + >>> v.get_default_value('integer(default=3) # comment') + 3 + """ + +def _test3(): + r""" + >>> vtor.check('string(default="")', '', missing=True) + '' + >>> vtor.check('string(default="\n")', '', missing=True) + '\n' + >>> print vtor.check('string(default="\n")', '', missing=True), + + >>> vtor.check('string()', '\n') + '\n' + >>> vtor.check('string(default="\n\n\n")', '', missing=True) + '\n\n\n' + >>> vtor.check('string()', 'random \n text goes here\n\n') + 'random \n text goes here\n\n' + >>> vtor.check('string(default=" \nrandom text\ngoes \n here\n\n ")', + ... '', missing=True) + ' \nrandom text\ngoes \n here\n\n ' + >>> vtor.check("string(default='\n\n\n')", '', missing=True) + '\n\n\n' + >>> vtor.check("option('\n','a','b',default='\n')", '', missing=True) + '\n' + >>> vtor.check("string_list()", ['foo', '\n', 'bar']) + ['foo', '\n', 'bar'] + >>> vtor.check("string_list(default=list('\n'))", '', missing=True) + ['\n'] + """ + + +if __name__ == '__main__': + # run the code tests in doctest format + import sys + import doctest + m = sys.modules.get('__main__') + globs = m.__dict__.copy() + globs.update({ + 'vtor': Validator(), + }) + doctest.testmod(m, globs=globs) diff --git a/astropy/extern/configobj_py3/__init__.py b/astropy/extern/configobj_py3/__init__.py new file mode 100644 index 0000000..54e9e57 --- /dev/null +++ b/astropy/extern/configobj_py3/__init__.py @@ -0,0 +1,104 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +""" +This is a copy of the main portions of the `configobj +`_ package. This is used +internally in the Astropy configuration system. The license for configobj is +available in the ``licenses/CONFIGOBJ_LICENSE.rst`` file in the Astropy +source distribution. + +This is a version of configobj that has been modified by Zubin Mithra to be +compatible with python 3.x. This version is not official, but has been +"blessed" by configobj's original author. This version of the code was +obtained from https://bitbucket.org/zubin71/configobj-py3 + +For a python 2.x version, see the ``astropy/extern/configobj`` directory. +""" + +#this holds the contents of the setup.py file used by configobj +_configobj_setup_dot_py=""" +# setup.py +# Install script for ConfigObj +# Copyright (C) 2005-2010 Michael Foord, Mark Andrews, Nicola Larosa +# E-mail: fuzzyman AT voidspace DOT org DOT uk +# mark AT la-la DOT com +# nico AT tekNico DOT net + +# This software is licensed under the terms of the BSD license. +# http://www.voidspace.org.uk/python/license.shtml + +import sys +from distutils.core import setup +from configobj import __version__ as VERSION + +NAME = 'configobj' + +MODULES = 'configobj', 'validate' + +DESCRIPTION = 'Config file reading, writing and validation.' + +URL = 'http://www.voidspace.org.uk/python/configobj.html' + +DOWNLOAD_URL = "http://www.voidspace.org.uk/downloads/configobj-%s.zip" % VERSION + +LONG_DESCRIPTION = ""#"**ConfigObj** is a simple but powerful config file reader and writer: an *ini +file round tripper*. Its main feature is that it is very easy to use, with a +straightforward programmer's interface and a simple syntax for config files. +It has lots of other features though : + +* Nested sections (subsections), to any level +* List values +* Multiple line values +* Full Unicode support +* String interpolation (substitution) +* Integrated with a powerful validation system + + - including automatic type checking/conversion + - and allowing default values + - repeated sections + +* All comments in the file are preserved +* The order of keys/sections is preserved +* Powerful ``unrepr`` mode for storing/retrieving Python data-types + +| Release 4.7.2 fixes several bugs in 4.7.1 +| Release 4.7.1 fixes a bug with the deprecated options keyword in +| 4.7.0. +| Release 4.7.0 improves performance adds features for validation and +| fixes some bugs.""#" + +CLASSIFIERS = [ + 'Development Status :: 6 - Mature', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: BSD License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2.3', + 'Programming Language :: Python :: 2.4', + 'Programming Language :: Python :: 2.5', + 'Programming Language :: Python :: 2.6', + 'Operating System :: OS Independent', + 'Topic :: Software Development :: Libraries', + 'Topic :: Software Development :: Libraries :: Python Modules', +] + +AUTHOR = 'Michael Foord & Nicola Larosa' + +AUTHOR_EMAIL = 'fuzzyman@voidspace.org.uk' + +KEYWORDS = "config, ini, dictionary, application, admin, sysadmin, configuration, validation".split(', ') + + +setup(name=NAME, + version=VERSION, + description=DESCRIPTION, + long_description=LONG_DESCRIPTION, + download_url=DOWNLOAD_URL, + author=AUTHOR, + author_email=AUTHOR_EMAIL, + url=URL, + py_modules=MODULES, + classifiers=CLASSIFIERS, + keywords=KEYWORDS + ) +""".replace('""#"','"""') +#the replacement is necessary because """ would otherwise terminate the string diff --git a/astropy/extern/configobj_py3/configobj.py b/astropy/extern/configobj_py3/configobj.py new file mode 100644 index 0000000..70c89f2 --- /dev/null +++ b/astropy/extern/configobj_py3/configobj.py @@ -0,0 +1,2405 @@ +# configobj.py +# A config file reader/writer that supports nested sections in config files. +# Copyright (C) 2005-2010 Michael Foord, Nicola Larosa +# E-mail: fuzzyman AT voidspace DOT org DOT uk +# nico AT tekNico DOT net + +# ConfigObj 4 +# http://www.voidspace.org.uk/python/configobj.html + +# Released subject to the BSD License +# Please see http://www.voidspace.org.uk/python/license.shtml + +# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml +# For information about bugfixes, updates and support, please join the +# ConfigObj mailing list: +# http://lists.sourceforge.net/lists/listinfo/configobj-develop +# Comments, suggestions and bug reports welcome. +import pdb + +import os +import re +import sys + +from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE + +from ast import parse + +# A dictionary mapping BOM to +# the encoding to decode with, and what to set the +# encoding attribute to. +BOMS = { + BOM_UTF8: ('utf_8', None), + BOM_UTF16_BE: ('utf16_be', 'utf_16'), + BOM_UTF16_LE: ('utf16_le', 'utf_16'), + BOM_UTF16: ('utf_16', 'utf_16'), + } + +# All legal variants of the BOM codecs. +# TODO: the list of aliases is not meant to be exhaustive, is there a +# better way ? +BOM_LIST = { + 'utf_16': 'utf_16', + 'u16': 'utf_16', + 'utf16': 'utf_16', + 'utf-16': 'utf_16', + 'utf16_be': 'utf16_be', + 'utf_16_be': 'utf16_be', + 'utf-16be': 'utf16_be', + 'utf16_le': 'utf16_le', + 'utf_16_le': 'utf16_le', + 'utf-16le': 'utf16_le', + 'utf_8': 'utf_8', + 'u8': 'utf_8', + 'utf': 'utf_8', + 'utf8': 'utf_8', + 'utf-8': 'utf_8', + } + +# Map of encodings to the BOM to write. +BOM_SET = { + 'utf_8': BOM_UTF8, + 'utf_16': BOM_UTF16, + 'utf16_be': BOM_UTF16_BE, + 'utf16_le': BOM_UTF16_LE, + None: BOM_UTF8 + } + + +def match_utf8(encoding): + return BOM_LIST.get(encoding.lower()) == 'utf_8' + +# Quote strings used for writing values +squot = "'%s'" +dquot = '"%s"' +noquot = "%s" +wspace_plus = ' \r\n\v\t\'"' +tsquot = '"""%s"""' +tdquot = "'''%s'''" + +# Sentinel for use in getattr calls to replace hasattr +MISSING = object() + +__version__ = '4.7.2' + +try: + any +except NameError: + def any(iterable): + for entry in iterable: + if entry: + return True + return False + +__all__ = ( + '__version__', + 'DEFAULT_INDENT_TYPE', + 'DEFAULT_INTERPOLATION', + 'ConfigObjError', + 'NestingError', + 'ParseError', + 'DuplicateError', + 'ConfigspecError', + 'ConfigObj', + 'SimpleVal', + 'InterpolationError', + 'InterpolationLoopError', + 'MissingInterpolationOption', + 'RepeatSectionError', + 'ReloadError', + 'UnreprError', + 'UnknownType', + 'flatten_errors', + 'get_extra_values' +) + +DEFAULT_INTERPOLATION = 'configparser' +DEFAULT_INDENT_TYPE = ' ' +MAX_INTERPOL_DEPTH = 10 + +OPTION_DEFAULTS = { + 'interpolation': True, + 'raise_errors': False, + 'list_values': True, + 'create_empty': False, + 'file_error': False, + 'configspec': None, + 'stringify': True, + # option may be set to one of ('', ' ', '\t') + 'indent_type': None, + 'encoding': None, + 'default_encoding': None, + 'unrepr': False, + 'write_empty_values': False, +} + +def getObj(s): + p = parse("a=" + s) + obj = p.body[0].value + return obj + +class UnknownType(Exception): + pass + +class Builder(object): + + def build(self, o): + + m = getattr(self, 'build_' + o.__class__.__name__, None) + if m is None: + raise UnknownType(o.__class__.__name__) + return m(o) + + def build_List(self, o): + return map(self.build, o.elts) + + def build_Num(self, o): + return o.n + + def build_Str(str, o): + return o.s + + def build_Dict(self, o): + d = {} + items = zip(o.keys, o.values) + for key, value in items: + key = self.build(key) + value = self.build(value) + d[key] = value + return d + + def build_Tuple(self, o): + return tuple(self.build_List(o)) + + def build_Name(self, o): + value = o.id + if value == 'None': + return None + if value == 'True': + return True + if value == 'False': + return False + + # An undefined Name + raise UnknownType('Undefined Name') + +_builder = Builder() + +def unrepr(s): + if not s: + return s + return _builder.build(getObj(s)) + + +class ConfigObjError(SyntaxError): + """ + This is the base class for all errors that ConfigObj raises. + It is a subclass of SyntaxError. + """ + def __init__(self, message='', line_number=None, line=''): + self.line = line + self.line_number = line_number + SyntaxError.__init__(self, message) + + +class NestingError(ConfigObjError): + """ + This error indicates a level of nesting that doesn't match. + """ + + +class ParseError(ConfigObjError): + """ + This error indicates that a line is badly written. + It is neither a valid ``key = value`` line, + nor a valid section marker line. + """ + + +class ReloadError(IOError): + """ + A 'reload' operation failed. + This exception is a subclass of ``IOError``. + """ + def __init__(self): + IOError.__init__(self, 'reload failed, filename is not set.') + + +class DuplicateError(ConfigObjError): + """ + The keyword or section specified already exists. + """ + + +class ConfigspecError(ConfigObjError): + """ + An error occured whilst parsing a configspec. + """ + + +class InterpolationError(ConfigObjError): + """Base class for the two interpolation errors.""" + + +class InterpolationLoopError(InterpolationError): + """Maximum interpolation depth exceeded in string interpolation.""" + + def __init__(self, option): + InterpolationError.__init__( + self, + 'interpolation loop detected in value "%s".' % option) + + +class RepeatSectionError(ConfigObjError): + """ + This error indicates additional sections in a section with a + ``__many__`` (repeated) section. + """ + + +class MissingInterpolationOption(InterpolationError): + """A value specified for interpolation was missing.""" + def __init__(self, option): + msg = 'missing option "%s" in interpolation.' % option + InterpolationError.__init__(self, msg) + + +class UnreprError(ConfigObjError): + """An error parsing in unrepr mode.""" + + +class InterpolationEngine(object): + """ + A helper class to help perform string interpolation. + + This class is an abstract base class; its descendants perform + the actual work. + """ + + # compiled regexp to use in self.interpolate() + _KEYCRE = re.compile(r"%\(([^)]*)\)s") + _cookie = '%' + + def __init__(self, section): + # the Section instance that "owns" this engine + self.section = section + + + def interpolate(self, key, value): + # short-cut + if not self._cookie in value: + return value + + def recursive_interpolate(key, value, section, backtrail): + """The function that does the actual work. + + ``value``: the string we're trying to interpolate. + ``section``: the section in which that string was found + ``backtrail``: a dict to keep track of where we've been, + to detect and prevent infinite recursion loops + + This is similar to a depth-first-search algorithm. + """ + # Have we been here already? + if (key, section.name) in backtrail: + # Yes - infinite loop detected + raise InterpolationLoopError(key) + # Place a marker on our backtrail so we won't come back here again + backtrail[(key, section.name)] = 1 + + # Now start the actual work + match = self._KEYCRE.search(value) + while match: + # The actual parsing of the match is implementation-dependent, + # so delegate to our helper function + k, v, s = self._parse_match(match) + if k is None: + # That's the signal that no further interpolation is needed + replacement = v + else: + # Further interpolation may be needed to obtain final value + replacement = recursive_interpolate(k, v, s, backtrail) + # Replace the matched string with its final value + start, end = match.span() + value = ''.join((value[:start], replacement, value[end:])) + new_search_start = start + len(replacement) + # Pick up the next interpolation key, if any, for next time + # through the while loop + match = self._KEYCRE.search(value, new_search_start) + + # Now safe to come back here again; remove marker from backtrail + del backtrail[(key, section.name)] + + return value + + # Back in interpolate(), all we have to do is kick off the recursive + # function with appropriate starting values + value = recursive_interpolate(key, value, self.section, {}) + return value + + + def _fetch(self, key): + """Helper function to fetch values from owning section. + + Returns a 2-tuple: the value, and the section where it was found. + """ + # switch off interpolation before we try and fetch anything ! + save_interp = self.section.main.interpolation + self.section.main.interpolation = False + + # Start at section that "owns" this InterpolationEngine + current_section = self.section + while True: + # try the current section first + val = current_section.get(key) + if val is not None and not isinstance(val, Section): + break + # try "DEFAULT" next + val = current_section.get('DEFAULT', {}).get(key) + if val is not None and not isinstance(val, Section): + break + # move up to parent and try again + # top-level's parent is itself + if current_section.parent is current_section: + # reached top level, time to give up + break + current_section = current_section.parent + + # restore interpolation to previous value before returning + self.section.main.interpolation = save_interp + if val is None: + raise MissingInterpolationOption(key) + return val, current_section + + + def _parse_match(self, match): + """Implementation-dependent helper function. + + Will be passed a match object corresponding to the interpolation + key we just found (e.g., "%(foo)s" or "$foo"). Should look up that + key in the appropriate config file section (using the ``_fetch()`` + helper function) and return a 3-tuple: (key, value, section) + + ``key`` is the name of the key we're looking for + ``value`` is the value found for that key + ``section`` is a reference to the section where it was found + + ``key`` and ``section`` should be None if no further + interpolation should be performed on the resulting value + (e.g., if we interpolated "$$" and returned "$"). + """ + raise NotImplementedError() + + +class ConfigParserInterpolation(InterpolationEngine): + """Behaves like ConfigParser.""" + _cookie = '%' + _KEYCRE = re.compile(r"%\(([^)]*)\)s") + + def _parse_match(self, match): + key = match.group(1) + value, section = self._fetch(key) + return key, value, section + + +class TemplateInterpolation(InterpolationEngine): + """Behaves like string.Template.""" + _cookie = '$' + _delimiter = '$' + _KEYCRE = re.compile(r""" + \$(?: + (?P\$) | # Two $ signs + (?P[_a-z][_a-z0-9]*) | # $name format + {(?P[^}]*)} # ${name} format + ) + """, re.IGNORECASE | re.VERBOSE) + + def _parse_match(self, match): + # Valid name (in or out of braces): fetch value from section + key = match.group('named') or match.group('braced') + if key is not None: + value, section = self._fetch(key) + return key, value, section + # Escaped delimiter (e.g., $$): return single delimiter + if match.group('escaped') is not None: + # Return None for key and section to indicate it's time to stop + return None, self._delimiter, None + # Anything else: ignore completely, just return it unchanged + return None, match.group(), None + + +interpolation_engines = { + 'configparser': ConfigParserInterpolation, + 'template': TemplateInterpolation, +} + +def __newobj__(cls, *args): + # Hack for pickle + return cls.__new__(cls, *args) + +class Section(dict): + """ + A dictionary-like object that represents a section in a config file. + + It does string interpolation if the 'interpolation' attribute + of the 'main' object is set to True. + + Interpolation is tried first from this object, then from the 'DEFAULT' + section of this object, next from the parent and its 'DEFAULT' section, + and so on until the main object is reached. + + A Section will behave like an ordered dictionary - following the + order of the ``scalars`` and ``sections`` attributes. + You can use this to change the order of members. + + Iteration follows the order: scalars, then sections. + """ + + def __setstate__(self, state): + dict.update(self, state[0]) + self.__dict__.update(state[1]) + + def __reduce__(self): + state = (dict(self), self.__dict__) + return (__newobj__, (self.__class__,), state) + + def __init__(self, parent, depth, main, indict=None, name=None): + """ + * parent is the section above + * depth is the depth level of this section + * main is the main ConfigObj + * indict is a dictionary to initialise the section with + """ + if indict is None: + indict = {} + dict.__init__(self) + # used for nesting level *and* interpolation + self.parent = parent + # used for the interpolation attribute + self.main = main + # level of nesting depth of this Section + self.depth = depth + # purely for information + self.name = name + # + self._initialise() + # we do this explicitly so that __setitem__ is used properly + # (rather than just passing to ``dict.__init__``) + for entry, value in list(indict.items()): + self[entry] = value + + def _initialise(self): + # the sequence of scalar values in this Section + self.scalars = [] + # the sequence of sections in this Section + self.sections = [] + # for comments :-) + self.comments = {} + self.inline_comments = {} + # the configspec + self.configspec = None + # for defaults + self.defaults = [] + self.default_values = {} + self.extra_values = [] + self._created = False + + def _interpolate(self, key, value): + try: + # do we already have an interpolation engine? + engine = self._interpolation_engine + except AttributeError: + # not yet: first time running _interpolate(), so pick the engine + name = self.main.interpolation + if name == True: # note that "if name:" would be incorrect here + # backwards-compatibility: interpolation=True means use default + name = DEFAULT_INTERPOLATION + name = name.lower() # so that "Template", "template", etc. all work + class_ = interpolation_engines.get(name, None) + if class_ is None: + # invalid value for self.main.interpolation + self.main.interpolation = False + return value + else: + # save reference to engine so we don't have to do this again + engine = self._interpolation_engine = class_(self) + # let the engine do the actual work + return engine.interpolate(key, value) + + def __getitem__(self, key): + """Fetch the item and do string interpolation.""" + val = dict.__getitem__(self, key) + if self.main.interpolation: + if isinstance(val, str): + return self._interpolate(key, val) + if isinstance(val, list): + def _check(entry): + if isinstance(entry, str): + return self._interpolate(key, entry) + return entry + new = [_check(entry) for entry in val] + if new != val: + return new + return val + + def __setitem__(self, key, value, unrepr=False): + """ + Correctly set a value. + + Making dictionary values Section instances. + (We have to special case 'Section' instances - which are also dicts) + + Keys must be strings. + Values need only be strings (or lists of strings) if + ``main.stringify`` is set. + + ``unrepr`` must be set when setting a value to a dictionary, without + creating a new sub-section. + """ + if not isinstance(key, str): + raise ValueError('The key "%s" is not a string.' % key) + + # add the comment + if key not in self.comments: + self.comments[key] = [] + self.inline_comments[key] = '' + # remove the entry from defaults + if key in self.defaults: + self.defaults.remove(key) + # + if isinstance(value, Section): + if key not in self: + self.sections.append(key) + dict.__setitem__(self, key, value) + elif isinstance(value, dict) and not unrepr: + # First create the new depth level, + # then create the section + if key not in self: + self.sections.append(key) + new_depth = self.depth + 1 + dict.__setitem__( + self, + key, + Section( + self, + new_depth, + self.main, + indict=value, + name=key)) + else: + if key not in self: + self.scalars.append(key) + if not self.main.stringify: + if isinstance(value, str): + pass + elif isinstance(value, (list, tuple)): + for entry in value: + if not isinstance(entry, str): + raise TypeError('Value is not a string "%s".' % entry) + else: + raise TypeError('Value is not a string "%s".' % value) + dict.__setitem__(self, key, value) + + + def __delitem__(self, key): + """Remove items from the sequence when deleting.""" + dict. __delitem__(self, key) + if key in self.scalars: + self.scalars.remove(key) + else: + self.sections.remove(key) + del self.comments[key] + del self.inline_comments[key] + + def get(self, key, default=None): + """A version of ``get`` that doesn't bypass string interpolation.""" + try: + return self[key] + except KeyError: + return default + + + def update(self, indict): + """ + A version of update that uses our ``__setitem__``. + """ + for entry in indict: + self[entry] = indict[entry] + + + def pop(self, key, default=MISSING): + """ + 'D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised' + """ + try: + val = self[key] + except KeyError: + if default is MISSING: + raise + val = default + else: + del self[key] + return val + + def popitem(self): + """Pops the first (key,val)""" + sequence = (self.scalars + self.sections) + if not sequence: + raise KeyError(": 'popitem(): dictionary is empty'") + key = sequence[0] + val = self[key] + del self[key] + return key, val + + def clear(self): + """ + A version of clear that also affects scalars/sections + Also clears comments and configspec. + + Leaves other attributes alone : + depth/main/parent are not affected + """ + dict.clear(self) + self.scalars = [] + self.sections = [] + self.comments = {} + self.inline_comments = {} + self.configspec = None + self.defaults = [] + self.extra_values = [] + + + def setdefault(self, key, default=None): + """A version of setdefault that sets sequence if appropriate.""" + try: + return self[key] + except KeyError: + self[key] = default + return self[key] + + + def items(self): + """D.items() -> list of D's (key, value) pairs, as 2-tuples""" + return list(zip((self.scalars + self.sections), list(self.values()))) + + + def keys(self): + """D.keys() -> list of D's keys""" + return (self.scalars + self.sections) + + + def values(self): + """D.values() -> list of D's values""" + return [self[key] for key in (self.scalars + self.sections)] + + + def iteritems(self): + """D.iteritems() -> an iterator over the (key, value) items of D""" + return iter(list(self.items())) + + + def iterkeys(self): + """D.iterkeys() -> an iterator over the keys of D""" + return iter((self.scalars + self.sections)) + + __iter__ = iterkeys + + + def itervalues(self): + """D.itervalues() -> an iterator over the values of D""" + return iter(list(self.values())) + + + def __repr__(self): + """x.__repr__() <==> repr(x)""" + def _getval(key): + try: + return self[key] + except MissingInterpolationOption: + return dict.__getitem__(self, key) + return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(_getval(key)))) + for key in (self.scalars + self.sections)]) + + __str__ = __repr__ + __str__.__doc__ = "x.__str__() <==> str(x)" + + + # Extra methods - not in a normal dictionary + + def dict(self): + """ + Return a deepcopy of self as a dictionary. + + All members that are ``Section`` instances are recursively turned to + ordinary dictionaries - by calling their ``dict`` method. + + >>> n = a.dict() + >>> n == a + 1 + >>> n is a + 0 + """ + newdict = {} + for entry in self: + this_entry = self[entry] + if isinstance(this_entry, Section): + this_entry = this_entry.dict() + elif isinstance(this_entry, list): + # create a copy rather than a reference + this_entry = list(this_entry) + elif isinstance(this_entry, tuple): + # create a copy rather than a reference + this_entry = tuple(this_entry) + newdict[entry] = this_entry + return newdict + + + def merge(self, indict): + """ + A recursive update - useful for merging config files. + + >>> a = '''[section1] + ... option1 = True + ... [[subsection]] + ... more_options = False + ... # end of file'''.splitlines() + >>> b = '''# File is user.ini + ... [section1] + ... option1 = False + ... # end of file'''.splitlines() + >>> c1 = ConfigObj(b) + >>> c2 = ConfigObj(a) + >>> c2.merge(c1) + >>> c2 + ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}}) + """ + for key, val in list(indict.items()): + if (key in self and isinstance(self[key], dict) and + isinstance(val, dict)): + self[key].merge(val) + else: + self[key] = val + + + def rename(self, oldkey, newkey): + """ + Change a keyname to another, without changing position in sequence. + + Implemented so that transformations can be made on keys, + as well as on values. (used by encode and decode) + + Also renames comments. + """ + if oldkey in self.scalars: + the_list = self.scalars + elif oldkey in self.sections: + the_list = self.sections + else: + raise KeyError('Key "%s" not found.' % oldkey) + pos = the_list.index(oldkey) + # + val = self[oldkey] + dict.__delitem__(self, oldkey) + dict.__setitem__(self, newkey, val) + the_list.remove(oldkey) + the_list.insert(pos, newkey) + comm = self.comments[oldkey] + inline_comment = self.inline_comments[oldkey] + del self.comments[oldkey] + del self.inline_comments[oldkey] + self.comments[newkey] = comm + self.inline_comments[newkey] = inline_comment + + def walk(self, function, raise_errors=True, + call_on_sections=False, **keywargs): + """ + Walk every member and call a function on the keyword and value. + + Return a dictionary of the return values + + If the function raises an exception, raise the errror + unless ``raise_errors=False``, in which case set the return value to + ``False``. + + Any unrecognised keyword arguments you pass to walk, will be pased on + to the function you pass in. + + Note: if ``call_on_sections`` is ``True`` then - on encountering a + subsection, *first* the function is called for the *whole* subsection, + and then recurses into it's members. This means your function must be + able to handle strings, dictionaries and lists. This allows you + to change the key of subsections as well as for ordinary members. The + return value when called on the whole subsection has to be discarded. + + See the encode and decode methods for examples, including functions. + + .. admonition:: caution + + You can use ``walk`` to transform the names of members of a section + but you mustn't add or delete members. + + >>> config = '''[XXXXsection] + ... XXXXkey = XXXXvalue'''.splitlines() + >>> cfg = ConfigObj(config) + >>> cfg + ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}}) + >>> def transform(section, key): + ... val = section[key] + ... newkey = key.replace('XXXX', 'CLIENT1') + ... section.rename(key, newkey) + ... if isinstance(val, (tuple, list, dict)): + ... pass + ... else: + ... val = val.replace('XXXX', 'CLIENT1') + ... section[newkey] = val + >>> cfg.walk(transform, call_on_sections=True) + {'CLIENT1section': {'CLIENT1key': None}} + >>> cfg + ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}}) + """ + out = {} + # scalars first + for i in range(len(self.scalars)): + entry = self.scalars[i] + try: + val = function(self, entry, **keywargs) + # bound again in case name has changed + entry = self.scalars[i] + out[entry] = val + except Exception: + if raise_errors: + raise + else: + entry = self.scalars[i] + out[entry] = False + # then sections + for i in range(len(self.sections)): + entry = self.sections[i] + if call_on_sections: + try: + function(self, entry, **keywargs) + except Exception: + if raise_errors: + raise + else: + entry = self.sections[i] + out[entry] = False + # bound again in case name has changed + entry = self.sections[i] + # previous result is discarded + out[entry] = self[entry].walk( + function, + raise_errors=raise_errors, + call_on_sections=call_on_sections, + **keywargs) + return out + + def as_bool(self, key): + """ + Accepts a key as input. The corresponding value must be a string or + the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to + retain compatibility with Python 2.2. + + If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns + ``True``. + + If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns + ``False``. + + ``as_bool`` is not case sensitive. + + Any other input will raise a ``ValueError``. + + >>> a = ConfigObj() + >>> a['a'] = 'fish' + >>> a.as_bool('a') + Traceback (most recent call last): + ValueError: Value "fish" is neither True nor False + >>> a['b'] = 'True' + >>> a.as_bool('b') + 1 + >>> a['b'] = 'off' + >>> a.as_bool('b') + 0 + """ + val = self[key] + if val == True: + return True + elif val == False: + return False + else: + try: + if not isinstance(val, str): + # TODO: Why do we raise a KeyError here? + raise KeyError() + else: + return self.main._bools[val.lower()] + except KeyError: + raise ValueError('Value "%s" is neither True nor False' % val) + + def as_int(self, key): + """ + A convenience method which coerces the specified value to an integer. + + If the value is an invalid literal for ``int``, a ``ValueError`` will + be raised. + + >>> a = ConfigObj() + >>> a['a'] = 'fish' + >>> a.as_int('a') + Traceback (most recent call last): + ValueError: invalid literal for int() with base 10: 'fish' + >>> a['b'] = '1' + >>> a.as_int('b') + 1 + >>> a['b'] = '3.2' + >>> a.as_int('b') + Traceback (most recent call last): + ValueError: invalid literal for int() with base 10: '3.2' + """ + return int(self[key]) + + def as_float(self, key): + """ + A convenience method which coerces the specified value to a float. + + If the value is an invalid literal for ``float``, a ``ValueError`` will + be raised. + + >>> a = ConfigObj() + >>> a['a'] = 'fish' + >>> a.as_float('a') + Traceback (most recent call last): + ValueError: invalid literal for float(): fish + >>> a['b'] = '1' + >>> a.as_float('b') + 1.0 + >>> a['b'] = '3.2' + >>> a.as_float('b') + 3.2000000000000002 + """ + return float(self[key]) + + def as_list(self, key): + """ + A convenience method which fetches the specified value, guaranteeing + that it is a list. + + >>> a = ConfigObj() + >>> a['a'] = 1 + >>> a.as_list('a') + [1] + >>> a['a'] = (1,) + >>> a.as_list('a') + [1] + >>> a['a'] = [1] + >>> a.as_list('a') + [1] + """ + result = self[key] + if isinstance(result, (tuple, list)): + return list(result) + return [result] + + def restore_default(self, key): + """ + Restore (and return) default value for the specified key. + + This method will only work for a ConfigObj that was created + with a configspec and has been validated. + + If there is no default value for this key, ``KeyError`` is raised. + """ + default = self.default_values[key] + dict.__setitem__(self, key, default) + if key not in self.defaults: + self.defaults.append(key) + return default + + def restore_defaults(self): + """ + Recursively restore default values to all members + that have them. + + This method will only work for a ConfigObj that was created + with a configspec and has been validated. + + It doesn't delete or modify entries without default values. + """ + for key in self.default_values: + self.restore_default(key) + + for section in self.sections: + self[section].restore_defaults() + + +class ConfigObj(Section): + """An object to read, create, and write config files.""" + + _keyword = re.compile(r'''^ # line start + (\s*) # indentation + ( # keyword + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'"=].*?) # no quotes + ) + \s*=\s* # divider + (.*) # value (including list values and comments) + $ # line end + ''', + re.VERBOSE) + + _sectionmarker = re.compile(r'''^ + (\s*) # 1: indentation + ((?:\[\s*)+) # 2: section marker open + ( # 3: section name open + (?:"\s*\S.*?\s*")| # at least one non-space with double quotes + (?:'\s*\S.*?\s*')| # at least one non-space with single quotes + (?:[^'"\s].*?) # at least one non-space unquoted + ) # section name close + ((?:\s*\])+) # 4: section marker close + \s*(\#.*)? # 5: optional comment + $''', + re.VERBOSE) + + # this regexp pulls list values out as a single string + # or single values and comments + # FIXME: this regex adds a '' to the end of comma terminated lists + # workaround in ``_handle_value`` + _valueexp = re.compile(r'''^ + (?: + (?: + ( + (?: + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\#][^,\#]*?) # unquoted + ) + \s*,\s* # comma + )* # match all list items ending in a comma (if any) + ) + ( + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\#\s][^,]*?)| # unquoted + (?:(? 1: + msg = "Parsing failed with several errors.\nFirst error %s" % info + error = ConfigObjError(msg) + else: + error = self._errors[0] + # set the errors attribute; it's a list of tuples: + # (error_type, message, line_number) + error.errors = self._errors + # set the config attribute + error.config = self + raise error + # delete private attributes + del self._errors + + if configspec is None: + self.configspec = None + else: + self._handle_configspec(configspec) + + def _initialise(self, options=None): + if options is None: + options = OPTION_DEFAULTS + + # initialise a few variables + self.filename = None + self._errors = [] + self.raise_errors = options['raise_errors'] + self.interpolation = options['interpolation'] + self.list_values = options['list_values'] + self.create_empty = options['create_empty'] + self.file_error = options['file_error'] + self.stringify = options['stringify'] + self.indent_type = options['indent_type'] + self.encoding = options['encoding'] + self.default_encoding = options['default_encoding'] + self.BOM = False + self.newlines = None + self.write_empty_values = options['write_empty_values'] + self.unrepr = options['unrepr'] + + self.initial_comment = [] + self.final_comment = [] + self.configspec = None + + if self._inspec: + self.list_values = False + + # Clear section attributes as well + Section._initialise(self) + + def __repr__(self): + def _getval(key): + try: + return self[key] + except MissingInterpolationOption: + return dict.__getitem__(self, key) + return ('ConfigObj({%s})' % + ', '.join([('%s: %s' % (repr(key), repr(_getval(key)))) + for key in (self.scalars + self.sections)])) + + def _handle_bom(self, infile): + """ + Handle any BOM, and decode if necessary. + + If an encoding is specified, that *must* be used - but the BOM should + still be removed (and the BOM attribute set). + + (If the encoding is wrongly specified, then a BOM for an alternative + encoding won't be discovered or removed.) + + If an encoding is not specified, UTF8 or UTF16 BOM will be detected and + removed. The BOM attribute will be set. UTF16 will be decoded to + unicode. + + NOTE: This method must not be called with an empty ``infile``. + + Specifying the *wrong* encoding is likely to cause a + ``UnicodeDecodeError``. + + ``infile`` must always be returned as a list of lines, but may be + passed in as a single string. + """ + if ((self.encoding is not None) and + (self.encoding.lower() not in BOM_LIST)): + # No need to check for a BOM + # the encoding specified doesn't have one + # just decode + return self._decode(infile, self.encoding) + + if isinstance(infile, (list, tuple)): + line = infile[0] + else: + line = infile + if self.encoding is not None: + # encoding explicitly supplied + # And it could have an associated BOM + # TODO: if encoding is just UTF16 - we ought to check for both + # TODO: big endian and little endian versions. + enc = BOM_LIST[self.encoding.lower()] + if enc == 'utf_16': + # For UTF16 we try big endian and little endian + for BOM, (encoding, final_encoding) in list(BOMS.items()): + if not final_encoding: + # skip UTF8 + continue + if isinstance(infile, bytes) and infile.startswith(BOM): + ### BOM discovered + ##self.BOM = True + # Don't need to remove BOM + return self._decode(infile, encoding) + + # If we get this far, will *probably* raise a DecodeError + # As it doesn't appear to start with a BOM + return self._decode(infile, self.encoding) + + # Must be UTF8 + BOM = BOM_SET[enc] + if isinstance(line, bytes) and not line.startswith(BOM): + return self._decode(infile, self.encoding) + + newline = line[len(BOM):] + + # BOM removed + if isinstance(infile, (list, tuple)): + infile[0] = newline + else: + infile = newline + self.BOM = True + return self._decode(infile, self.encoding) + + # No encoding specified - so we need to check for UTF8/UTF16 + for BOM, (encoding, final_encoding) in list(BOMS.items()): + if isinstance(line, bytes) and not line.startswith(BOM): + continue + else: + # BOM discovered + # self.encoding = final_encoding + if not final_encoding: + self.BOM = True + # UTF8 + # remove BOM + newline = line[len(BOM):] + if isinstance(infile, (list, tuple)): + infile[0] = newline + else: + infile = newline + # UTF8 - don't decode + if isinstance(infile, str): + return infile.splitlines(True) + else: + return infile + + infile = self._decode(infile, encoding) + if isinstance(infile, str): + # infile read from a file will be a single string + return infile.splitlines(True) + return self._decode(infile, encoding) + # UTF16 - have to decode + + + # No BOM discovered and no encoding specified, just return + if isinstance(infile, str): + # infile read from a file will be a single string + return infile.splitlines(True) + + if isinstance(infile, bytes): + return self._decode(infile, self.encoding) + return infile + + def _decode(self, infile, encoding): + """ + Decode infile to unicode. Using the specified encoding. + + if is a string, it also needs converting to a list. + """ + + encoding = encoding or 'utf-8' + + # If `infile` is a Unicode string, return as such + if isinstance(infile, str): + return infile + + # If `infile` is bytes type; decode and split + if isinstance(infile, bytes): + return infile.decode(encoding).splitlines(True) + + # If `infile` is a mix of bytes and unicode strings + for i, line in enumerate(infile): + if isinstance(line, bytes): + infile[i] = line.decode(encoding) + return infile + + def _decode_element(self, line): + """Decode element to unicode if necessary.""" + if not self.encoding: + return line + if isinstance(line, bytes) and self.default_encoding: + return line.decode(self.default_encoding) + return line + + def _str(self, value): + """ + Used by ``stringify`` within validate, to turn non-string values + into strings. + """ + + # Bytes type string should NOT be stringified at any cost. + if isinstance(value, bytes): + return value + if not isinstance(value, str): + return str(value) + else: + return value + + def _parse(self, infile): + """Actually parse the config file.""" + + temp_list_values = self.list_values + if self.unrepr: + self.list_values = False + + comment_list = [] + done_start = False + this_section = self + maxline = len(infile) - 1 + cur_index = -1 + reset_comment = False + + while cur_index < maxline: + if reset_comment: + comment_list = [] + cur_index += 1 + line = infile[cur_index] + sline = line.strip() + # do we have anything on the line ? + if not sline or sline.startswith('#'): + reset_comment = False + comment_list.append(line) + continue + + if not done_start: + # preserve initial comment + self.initial_comment = comment_list + comment_list = [] + done_start = True + + reset_comment = True + # first we check if it's a section marker + mat = self._sectionmarker.match(line) + if mat is not None: + # is a section line + (indent, sect_open, sect_name, sect_close, comment) = mat.groups() + if indent and (self.indent_type is None): + self.indent_type = indent + cur_depth = sect_open.count('[') + if cur_depth != sect_close.count(']'): + self._handle_error("Cannot compute the section depth at line %s.", + NestingError, infile, cur_index) + continue + + if cur_depth < this_section.depth: + # the new section is dropping back to a previous level + try: + parent = self._match_depth(this_section, + cur_depth).parent + except SyntaxError: + self._handle_error("Cannot compute nesting level at line %s.", + NestingError, infile, cur_index) + continue + elif cur_depth == this_section.depth: + # the new section is a sibling of the current section + parent = this_section.parent + elif cur_depth == this_section.depth + 1: + # the new section is a child the current section + parent = this_section + else: + self._handle_error("Section too nested at line %s.", + NestingError, infile, cur_index) + + sect_name = self._unquote(sect_name) + if sect_name in parent: + self._handle_error('Duplicate section name at line %s.', + DuplicateError, infile, cur_index) + continue + + # create the new section + this_section = Section( + parent, + cur_depth, + self, + name=sect_name) + parent[sect_name] = this_section + parent.inline_comments[sect_name] = comment + parent.comments[sect_name] = comment_list + continue + # + # it's not a section marker, + # so it should be a valid ``key = value`` line + mat = self._keyword.match(line) + if mat is None: + # it neither matched as a keyword + # or a section marker + self._handle_error( + 'Invalid line at line "%s".', + ParseError, infile, cur_index) + else: + # is a keyword value + # value will include any inline comment + (indent, key, value) = mat.groups() + if indent and (self.indent_type is None): + self.indent_type = indent + # check for a multiline value + if value[:3] in ['"""', "'''"]: + try: + value, comment, cur_index = self._multiline( + value, infile, cur_index, maxline) + except SyntaxError: + self._handle_error( + 'Parse error in value at line %s.', + ParseError, infile, cur_index) + continue + else: + if self.unrepr: + comment = '' + try: + value = unrepr(value) + except Exception as e: + if type(e) == UnknownType: + msg = 'Unknown name or type in value at line %s.' + else: + msg = 'Parse error in value at line %s.' + self._handle_error(msg, UnreprError, infile, + cur_index) + continue + else: + if self.unrepr: + comment = '' + try: + value = unrepr(value) + except Exception as e: + if isinstance(e, UnknownType): + msg = 'Unknown name or type in value at line %s.' + else: + msg = 'Parse error in value at line %s.' + self._handle_error(msg, UnreprError, infile, + cur_index) + continue + else: + # extract comment and lists + try: + (value, comment) = self._handle_value(value) + except SyntaxError: + self._handle_error( + 'Parse error in value at line %s.', + ParseError, infile, cur_index) + continue + # + key = self._unquote(key) + if key in this_section: + self._handle_error( + 'Duplicate keyword name at line %s.', + DuplicateError, infile, cur_index) + continue + # add the key. + # we set unrepr because if we have got this far we will never + # be creating a new section + this_section.__setitem__(key, value, unrepr=True) + this_section.inline_comments[key] = comment + this_section.comments[key] = comment_list + continue + # + if self.indent_type is None: + # no indentation used, set the type accordingly + self.indent_type = '' + + # preserve the final comment + if not self and not self.initial_comment: + self.initial_comment = comment_list + elif not reset_comment: + self.final_comment = comment_list + self.list_values = temp_list_values + + def _match_depth(self, sect, depth): + """ + Given a section and a depth level, walk back through the sections + parents to see if the depth level matches a previous section. + + Return a reference to the right section, + or raise a SyntaxError. + """ + while depth < sect.depth: + if sect is sect.parent: + # we've reached the top level already + raise SyntaxError() + sect = sect.parent + if sect.depth == depth: + return sect + # shouldn't get here + raise SyntaxError() + + def _handle_error(self, text, ErrorClass, infile, cur_index): + """ + Handle an error according to the error settings. + + Either raise the error or store it. + The error will have occured at ``cur_index`` + """ + line = infile[cur_index] + cur_index += 1 + message = text % cur_index + error = ErrorClass(message, cur_index, line) + if self.raise_errors: + # raise the error - parsing stops here + raise error + # store the error + # reraise when parsing has finished + self._errors.append(error) + + def _unquote(self, value): + """Return an unquoted version of a value""" + if not value: + # should only happen during parsing of lists + raise SyntaxError + if (value[0] == value[-1]) and (value[0] in ('"', "'")): + value = value[1:-1] + return value + + def _quote(self, value, multiline=True): + """ + Return a safely quoted version of a value. + + Raise a ConfigObjError if the value cannot be safely quoted. + If multiline is ``True`` (default) then use triple quotes + if necessary. + + * Don't quote values that don't need it. + * Recursively quote members of a list and return a comma joined list. + * Multiline is ``False`` for lists. + * Obey list syntax for empty and single member lists. + + If ``list_values=False`` then the value is only quoted if it contains + a ``\\n`` (is multiline) or '#'. + + If ``write_empty_values`` is set, and the value is an empty string, it + won't be quoted. + """ + if multiline and self.write_empty_values and value == '': + # Only if multiline is set, so that it is used for values not + # keys, and not values that are part of a list + return '' + + if multiline and isinstance(value, (list, tuple)): + if not value: + return ',' + elif len(value) == 1: + return self._quote(value[0], multiline=False) + ',' + return ', '.join([self._quote(val, multiline=False) + for val in value]) + if not isinstance(value, str): + if self.stringify and not isinstance(value, bytes): + value = str(value) + else: + raise TypeError('Value "%s" is not a string.' % value) + + if not value: + return '""' + + no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value + need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value )) + hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value) + check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote + + if check_for_single: + if not self.list_values: + # we don't quote if ``list_values=False`` + quot = noquot + # for normal values either single or double quotes will do + elif '\n' in value: + # will only happen if multiline is off - e.g. '\n' in key + raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) + elif ((value[0] not in wspace_plus) and + (value[-1] not in wspace_plus) and + (',' not in value)): + quot = noquot + else: + quot = self._get_single_quote(value) + else: + # if value has '\n' or "'" *and* '"', it will need triple quotes + quot = self._get_triple_quote(value) + + if quot == noquot and '#' in value and self.list_values: + quot = self._get_single_quote(value) + + return quot % value + + def _get_single_quote(self, value): + if ("'" in value) and ('"' in value): + raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) + elif '"' in value: + quot = squot + else: + quot = dquot + return quot + + def _get_triple_quote(self, value): + if (value.find('"""') != -1) and (value.find("'''") != -1): + raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) + if value.find('"""') == -1: + quot = tdquot + else: + quot = tsquot + return quot + + def _handle_value(self, value): + """ + Given a value string, unquote, remove comment, + handle lists. (including empty and single member lists) + """ + if self._inspec: + # Parsing a configspec so don't handle comments + return (value, '') + # do we look for lists in values ? + if not self.list_values: + mat = self._nolistvalue.match(value) + if mat is None: + raise SyntaxError() + # NOTE: we don't unquote here + return mat.groups() + # + mat = self._valueexp.match(value) + if mat is None: + # the value is badly constructed, probably badly quoted, + # or an invalid list + raise SyntaxError() + (list_values, single, empty_list, comment) = mat.groups() + if (list_values == '') and (single is None): + # change this if you want to accept empty values + raise SyntaxError() + # NOTE: note there is no error handling from here if the regex + # is wrong: then incorrect values will slip through + if empty_list is not None: + # the single comma - meaning an empty list + return ([], comment) + if single is not None: + # handle empty values + if list_values and not single: + # FIXME: the '' is a workaround because our regex now matches + # '' at the end of a list if it has a trailing comma + single = None + else: + single = single or '""' + single = self._unquote(single) + if list_values == '': + # not a list value + return (single, comment) + the_list = self._listvalueexp.findall(list_values) + the_list = [self._unquote(val) for val in the_list] + if single is not None: + the_list += [single] + return (the_list, comment) + + def _multiline(self, value, infile, cur_index, maxline): + """Extract the value, where we are in a multiline situation.""" + quot = value[:3] + newvalue = value[3:] + single_line = self._triple_quote[quot][0] + multi_line = self._triple_quote[quot][1] + mat = single_line.match(value) + if mat is not None: + retval = list(mat.groups()) + retval.append(cur_index) + return retval + elif newvalue.find(quot) != -1: + # somehow the triple quote is missing + raise SyntaxError() + # + while cur_index < maxline: + cur_index += 1 + newvalue += '\n' + line = infile[cur_index] + if line.find(quot) == -1: + newvalue += line + else: + # end of multiline, process it + break + else: + # we've got to the end of the config, oops... + raise SyntaxError() + mat = multi_line.match(line) + if mat is None: + # a badly formed line + raise SyntaxError() + (value, comment) = mat.groups() + return (newvalue + value, comment, cur_index) + + def _handle_configspec(self, configspec): + """Parse the configspec.""" + # FIXME: Should we check that the configspec was created with the + # correct settings ? (i.e. ``list_values=False``) + if not isinstance(configspec, ConfigObj): + try: + configspec = ConfigObj(configspec, + raise_errors=True, + file_error=True, + _inspec=True) + except ConfigObjError as e: + # FIXME: Should these errors have a reference + # to the already parsed ConfigObj ? + raise ConfigspecError('Parsing configspec failed: %s' % e) + except IOError as e: + raise IOError('Reading configspec failed: %s' % e) + + self.configspec = configspec + + def _set_configspec(self, section, copy): + """ + Called by validate. Handles setting the configspec on subsections + including sections to be validated by __many__ + """ + configspec = section.configspec + many = configspec.get('__many__') + if isinstance(many, dict): + for entry in section.sections: + if entry not in configspec: + section[entry].configspec = many + + for entry in configspec.sections: + if entry == '__many__': + continue + if entry not in section: + section[entry] = {} + section[entry]._created = True + if copy: + # copy comments + section.comments[entry] = configspec.comments.get(entry, []) + section.inline_comments[entry] = configspec.inline_comments.get(entry, '') + + # Could be a scalar when we expect a section + if isinstance(section[entry], Section): + section[entry].configspec = configspec[entry] + + def _write_line(self, indent_string, entry, this_entry, comment): + """Write an individual line, for the write method""" + # NOTE: the calls to self._quote here handles non-StringType values. + if not self.unrepr: + val = self._decode_element(self._quote(this_entry)) + else: + val = repr(this_entry) + return '%s%s%s%s%s' % (indent_string, + self._decode_element(self._quote(entry, multiline=False)), + ' = ', + val, + self._decode_element(comment)) + + def _write_marker(self, indent_string, depth, entry, comment): + """Write a section marker line""" + return '%s%s%s%s%s' % (indent_string, + '[' * depth, + self._quote(self._decode_element(entry), multiline=False), + ']' * depth, + self._decode_element(comment)) + + def _handle_comment(self, comment): + """Deal with a comment.""" + if not comment: + return '' + start = self.indent_type + if not comment.startswith('#'): + start += ' # ' + return (start + comment) + + # Public methods + + def write(self, outfile=None, section=None): + """ + Write the current ConfigObj as a file + + tekNico: FIXME: use StringIO instead of real files + + >>> filename = a.filename + >>> a.filename = 'test.ini' + >>> a.write() + >>> a.filename = filename + >>> a == ConfigObj('test.ini', raise_errors=True) + 1 + >>> import os + >>> os.remove('test.ini') + """ + if self.indent_type is None: + # this can be true if initialised from a dictionary + self.indent_type = DEFAULT_INDENT_TYPE + + out = [] + cs = '#' + csp = '# ' + if section is None: + int_val = self.interpolation + self.interpolation = False + section = self + for line in self.initial_comment: + line = self._decode_element(line) + stripped_line = line.strip() + if stripped_line and not stripped_line.startswith(cs): + line = csp + line + out.append(line) + + indent_string = self.indent_type * section.depth + for entry in (section.scalars + section.sections): + if entry in section.defaults: + # don't write out default values + continue + for comment_line in section.comments[entry]: + comment_line = self._decode_element(comment_line.lstrip()) + if comment_line and not comment_line.startswith(cs): + comment_line = csp + comment_line + out.append(indent_string + comment_line) + this_entry = section[entry] + comment = self._handle_comment(section.inline_comments[entry]) + + if isinstance(this_entry, dict): + # a section + out.append(self._write_marker( + indent_string, + this_entry.depth, + entry, + comment)) + out.extend(self.write(section=this_entry)) + else: + out.append(self._write_line( + indent_string, + entry, + this_entry, + comment)) + + if section is self: + for line in self.final_comment: + line = self._decode_element(line) + stripped_line = line.strip() + if stripped_line and not stripped_line.startswith(cs): + line = csp + line + out.append(line) + self.interpolation = int_val + + if section is not self: + return out + + if (self.filename is None) and (outfile is None): + + # output a list of lines + # might need to encode + # NOTE: This will *screw* UTF16, each line will start with the BOM + if self.encoding: + out = [l.encode(self.encoding) for l in out] + if (self.BOM and ((self.encoding is None) or + (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))): + # Add the UTF8 BOM + if not out: + out.append('') + out[0] = BOM_UTF8 + out[0] + return out + + + # Turn the list to a string, joined with correct newlines + newline = self.newlines or os.linesep + if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w' + and sys.platform == 'win32' and newline == '\r\n'): + # Windows specific hack to avoid writing '\r\r\n' + newline = '\n' + output = newline.join(out) + if self.encoding: + output = output.encode(self.encoding) + + if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)): + # Add the UTF8 BOM + output = BOM_UTF8 + output + + if not output.endswith(newline): + output += newline + if outfile is not None: + outfile.write(output) + else: + h = open(self.filename, 'wb') + output = output.encode() # encoding the data to bytes + h.write(output) + h.close() + + def validate(self, validator, preserve_errors=False, copy=False, + section=None): + """ + Test the ConfigObj against a configspec. + + It uses the ``validator`` object from *validate.py*. + + To run ``validate`` on the current ConfigObj, call: :: + + test = config.validate(validator) + + (Normally having previously passed in the configspec when the ConfigObj + was created - you can dynamically assign a dictionary of checks to the + ``configspec`` attribute of a section though). + + It returns ``True`` if everything passes, or a dictionary of + pass/fails (True/False). If every member of a subsection passes, it + will just have the value ``True``. (It also returns ``False`` if all + members fail). + + In addition, it converts the values from strings to their native + types if their checks pass (and ``stringify`` is set). + + If ``preserve_errors`` is ``True`` (``False`` is default) then instead + of a marking a fail with a ``False``, it will preserve the actual + exception object. This can contain info about the reason for failure. + For example the ``VdtValueTooSmallError`` indicates that the value + supplied was too small. If a value (or section) is missing it will + still be marked as ``False``. + + You must have the validate module to use ``preserve_errors=True``. + + You can then use the ``flatten_errors`` function to turn your nested + results dictionary into a flattened list of failures - useful for + displaying meaningful error messages. + """ + if section is None: + if self.configspec is None: + raise ValueError('No configspec supplied.') + if preserve_errors: + # We do this once to remove a top level dependency on the validate module + # Which makes importing configobj faster + from validate import VdtMissingValue + self._vdtMissingValue = VdtMissingValue + + section = self + + if copy: + section.initial_comment = section.configspec.initial_comment + section.final_comment = section.configspec.final_comment + section.encoding = section.configspec.encoding + section.BOM = section.configspec.BOM + section.newlines = section.configspec.newlines + section.indent_type = section.configspec.indent_type + + # + # section.default_values.clear() #?? + configspec = section.configspec + self._set_configspec(section, copy) + + def validate_entry(entry, spec, val, missing, ret_true, ret_false): + section.default_values.pop(entry, None) + + try: + section.default_values[entry] = validator.get_default_value(configspec[entry]) + except (KeyError, AttributeError, validator.baseErrorClass): + # No default, bad default or validator has no 'get_default_value' + # (e.g. SimpleVal) + pass + + try: + check = validator.check(spec, + val, + missing=missing + ) + except validator.baseErrorClass as e: + if not preserve_errors or isinstance(e, self._vdtMissingValue): + out[entry] = False + else: + # preserve the error + out[entry] = e + ret_false = False + ret_true = False + else: + ret_false = False + out[entry] = True + if self.stringify or missing: + # if we are doing type conversion + # or the value is a supplied default + if not self.stringify: + if isinstance(check, (list, tuple)): + # preserve lists + check = [self._str(item) for item in check] + elif missing and check is None: + # convert the None from a default to a '' + check = '' + else: + check = self._str(check) + if (check != val) or missing: + section[entry] = check + if not copy and missing and entry not in section.defaults: + section.defaults.append(entry) + return ret_true, ret_false + + # + out = {} + ret_true = True + ret_false = True + + unvalidated = [k for k in section.scalars if k not in configspec] + incorrect_sections = [k for k in configspec.sections if k in section.scalars] + incorrect_scalars = [k for k in configspec.scalars if k in section.sections] + + for entry in configspec.scalars: + if entry in ('__many__', '___many___'): + # reserved names + continue + if (not entry in section.scalars) or (entry in section.defaults): + # missing entries + # or entries from defaults + missing = True + val = None + if copy and entry not in section.scalars: + # copy comments + section.comments[entry] = ( + configspec.comments.get(entry, [])) + section.inline_comments[entry] = ( + configspec.inline_comments.get(entry, '')) + # + else: + missing = False + val = section[entry] + + ret_true, ret_false = validate_entry(entry, configspec[entry], val, + missing, ret_true, ret_false) + + many = None + if '__many__' in configspec.scalars: + many = configspec['__many__'] + elif '___many___' in configspec.scalars: + many = configspec['___many___'] + + if many is not None: + for entry in unvalidated: + val = section[entry] + ret_true, ret_false = validate_entry(entry, many, val, False, + ret_true, ret_false) + unvalidated = [] + + for entry in incorrect_scalars: + ret_true = False + if not preserve_errors: + out[entry] = False + else: + ret_false = False + msg = 'Value %r was provided as a section' % entry + out[entry] = validator.baseErrorClass(msg) + for entry in incorrect_sections: + ret_true = False + if not preserve_errors: + out[entry] = False + else: + ret_false = False + msg = 'Section %r was provided as a single value' % entry + out[entry] = validator.baseErrorClass(msg) + + # Missing sections will have been created as empty ones when the + # configspec was read. + for entry in section.sections: + # FIXME: this means DEFAULT is not copied in copy mode + if section is self and entry == 'DEFAULT': + continue + if section[entry].configspec is None: + unvalidated.append(entry) + continue + if copy: + section.comments[entry] = configspec.comments.get(entry, []) + section.inline_comments[entry] = configspec.inline_comments.get(entry, '') + check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry]) + out[entry] = check + if check == False: + ret_true = False + elif check == True: + ret_false = False + else: + ret_true = False + + section.extra_values = unvalidated + if preserve_errors and not section._created: + # If the section wasn't created (i.e. it wasn't missing) + # then we can't return False, we need to preserve errors + ret_false = False + # + if ret_false and preserve_errors and out: + # If we are preserving errors, but all + # the failures are from missing sections / values + # then we can return False. Otherwise there is a + # real failure that we need to preserve. + ret_false = not any(out.values()) + if ret_true: + return True + elif ret_false: + return False + return out + + + def reset(self): + """Clear ConfigObj instance and restore to 'freshly created' state.""" + self.clear() + self._initialise() + # FIXME: Should be done by '_initialise', but ConfigObj constructor (and reload) + # requires an empty dictionary + self.configspec = None + # Just to be sure ;-) + self._original_configspec = None + + def reload(self): + """ + Reload a ConfigObj from file. + + This method raises a ``ReloadError`` if the ConfigObj doesn't have + a filename attribute pointing to a file. + """ + if not isinstance(self.filename, str): + raise ReloadError() + + filename = self.filename + current_options = {} + for entry in OPTION_DEFAULTS: + if entry == 'configspec': + continue + current_options[entry] = getattr(self, entry) + + configspec = self._original_configspec + current_options['configspec'] = configspec + + self.clear() + self._initialise(current_options) + self._load(filename, configspec) + +class SimpleVal(object): + """ + A simple validator. + Can be used to check that all members expected are present. + + To use it, provide a configspec with all your members in (the value given + will be ignored). Pass an instance of ``SimpleVal`` to the ``validate`` + method of your ``ConfigObj``. ``validate`` will return ``True`` if all + members are present, or a dictionary with True/False meaning + present/missing. (Whole missing sections will be replaced with ``False``) + """ + + def __init__(self): + self.baseErrorClass = ConfigObjError + + def check(self, check, member, missing=False): + """A dummy check method, always returns the value unchanged.""" + if missing: + raise self.baseErrorClass() + return member + +def flatten_errors(cfg, res, levels=None, results=None): + """ + An example function that will turn a nested dictionary of results + (as returned by ``ConfigObj.validate``) into a flat list. + + ``cfg`` is the ConfigObj instance being checked, ``res`` is the results + dictionary returned by ``validate``. + + (This is a recursive function, so you shouldn't use the ``levels`` or + ``results`` arguments - they are used by the function.) + + Returns a list of keys that failed. Each member of the list is a tuple:: + + ([list of sections...], key, result) + + If ``validate`` was called with ``preserve_errors=False`` (the default) + then ``result`` will always be ``False``. + + *list of sections* is a flattened list of sections that the key was found + in. + + If the section was missing (or a section was expected and a scalar provided + - or vice-versa) then key will be ``None``. + + If the value (or section) was missing then ``result`` will be ``False``. + + If ``validate`` was called with ``preserve_errors=True`` and a value + was present, but failed the check, then ``result`` will be the exception + object returned. You can use this as a string that describes the failure. + + For example *The value "3" is of the wrong type*. + """ + if levels is None: + # first time called + levels = [] + results = [] + if res == True: + return results + if res == False or isinstance(res, Exception): + results.append((levels[:], None, res)) + if levels: + levels.pop() + return results + for (key, val) in list(res.items()): + if val == True: + continue + if isinstance(cfg.get(key), dict): + # Go down one level + levels.append(key) + flatten_errors(cfg[key], val, levels, results) + continue + results.append((levels[:], key, val)) + # + # Go up one level + if levels: + levels.pop() + # + return results + +def get_extra_values(conf, _prepend=()): + """ + Find all the values and sections not in the configspec from a validated + ConfigObj. + + ``get_extra_values`` returns a list of tuples where each tuple represents + either an extra section, or an extra value. + + The tuples contain two values, a tuple representing the section the value + is in and the name of the extra values. For extra values in the top level + section the first member will be an empty tuple. For values in the 'foo' + section the first member will be ``('foo',)``. For members in the 'bar' + subsection of the 'foo' section the first member will be ``('foo', 'bar')``. + + NOTE: If you call ``get_extra_values`` on a ConfigObj instance that hasn't + been validated it will return an empty list. + """ + out = [] + + out.extend([(_prepend, name) for name in conf.extra_values]) + for name in conf.sections: + if name not in conf.extra_values: + out.extend(get_extra_values(conf[name], _prepend + (name,))) + return out + + +"""*A programming language is a medium of expression.* - Paul Graham""" \ No newline at end of file diff --git a/astropy/extern/configobj_py3/validate.py b/astropy/extern/configobj_py3/validate.py new file mode 100644 index 0000000..af12814 --- /dev/null +++ b/astropy/extern/configobj_py3/validate.py @@ -0,0 +1,1419 @@ +# validate.py +# A Validator object +# Copyright (C) 2005-2010 Michael Foord, Mark Andrews, Nicola Larosa +# E-mail: fuzzyman AT voidspace DOT org DOT uk +# mark AT la-la DOT com +# nico AT tekNico DOT net + +# This software is licensed under the terms of the BSD license. +# http://www.voidspace.org.uk/python/license.shtml +# Basically you're free to copy, modify, distribute and relicense it, +# So long as you keep a copy of the license with it. + +# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml +# For information about bugfixes, updates and support, please join the +# ConfigObj mailing list: +# http://lists.sourceforge.net/lists/listinfo/configobj-develop +# Comments, suggestions and bug reports welcome. + +""" + The Validator object is used to check that supplied values + conform to a specification. + + The value can be supplied as a string - e.g. from a config file. + In this case the check will also *convert* the value to + the required type. This allows you to add validation + as a transparent layer to access data stored as strings. + The validation checks that the data is correct *and* + converts it to the expected type. + + Some standard checks are provided for basic data types. + Additional checks are easy to write. They can be + provided when the ``Validator`` is instantiated or + added afterwards. + + The standard functions work with the following basic data types : + + * integers + * floats + * booleans + * strings + * ip_addr + + plus lists of these datatypes + + Adding additional checks is done through coding simple functions. + + The full set of standard checks are : + + * 'integer': matches integer values (including negative) + Takes optional 'min' and 'max' arguments : :: + + integer() + integer(3, 9) # any value from 3 to 9 + integer(min=0) # any positive value + integer(max=9) + + * 'float': matches float values + Has the same parameters as the integer check. + + * 'boolean': matches boolean values - ``True`` or ``False`` + Acceptable string values for True are : + true, on, yes, 1 + Acceptable string values for False are : + false, off, no, 0 + + Any other value raises an error. + + * 'ip_addr': matches an Internet Protocol address, v.4, represented + by a dotted-quad string, i.e. '1.2.3.4'. + + * 'string': matches any string. + Takes optional keyword args 'min' and 'max' + to specify min and max lengths of the string. + + * 'list': matches any list. + Takes optional keyword args 'min', and 'max' to specify min and + max sizes of the list. (Always returns a list.) + + * 'tuple': matches any tuple. + Takes optional keyword args 'min', and 'max' to specify min and + max sizes of the tuple. (Always returns a tuple.) + + * 'int_list': Matches a list of integers. + Takes the same arguments as list. + + * 'float_list': Matches a list of floats. + Takes the same arguments as list. + + * 'bool_list': Matches a list of boolean values. + Takes the same arguments as list. + + * 'ip_addr_list': Matches a list of IP addresses. + Takes the same arguments as list. + + * 'string_list': Matches a list of strings. + Takes the same arguments as list. + + * 'mixed_list': Matches a list with different types in + specific positions. List size must match + the number of arguments. + + Each position can be one of : + 'integer', 'float', 'ip_addr', 'string', 'boolean' + + So to specify a list with two strings followed + by two integers, you write the check as : :: + + mixed_list('string', 'string', 'integer', 'integer') + + * 'pass': This check matches everything ! It never fails + and the value is unchanged. + + It is also the default if no check is specified. + + * 'option': This check matches any from a list of options. + You specify this check with : :: + + option('option 1', 'option 2', 'option 3') + + You can supply a default value (returned if no value is supplied) + using the default keyword argument. + + You specify a list argument for default using a list constructor syntax in + the check : :: + + checkname(arg1, arg2, default=list('val 1', 'val 2', 'val 3')) + + A badly formatted set of arguments will raise a ``VdtParamError``. +""" + +__version__ = '1.0.1' + + +__all__ = ( + '__version__', + 'dottedQuadToNum', + 'numToDottedQuad', + 'ValidateError', + 'VdtUnknownCheckError', + 'VdtParamError', + 'VdtTypeError', + 'VdtValueError', + 'VdtValueTooSmallError', + 'VdtValueTooBigError', + 'VdtValueTooShortError', + 'VdtValueTooLongError', + 'VdtMissingValue', + 'Validator', + 'is_integer', + 'is_float', + 'is_boolean', + 'is_list', + 'is_tuple', + 'is_ip_addr', + 'is_string', + 'is_int_list', + 'is_bool_list', + 'is_float_list', + 'is_string_list', + 'is_ip_addr_list', + 'is_mixed_list', + 'is_option', + '__docformat__', +) + + + +import re + + +_list_arg = re.compile(r''' + (?: + ([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*list\( + ( + (?: + \s* + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s\)][^,\)]*?) # unquoted + ) + \s*,\s* + )* + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s\)][^,\)]*?) # unquoted + )? # last one + ) + \) + ) +''', re.VERBOSE | re.DOTALL) # two groups + +_list_members = re.compile(r''' + ( + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s=][^,=]*?) # unquoted + ) + (?: + (?:\s*,\s*)|(?:\s*$) # comma + ) +''', re.VERBOSE | re.DOTALL) # one group + +_paramstring = r''' + (?: + ( + (?: + [a-zA-Z_][a-zA-Z0-9_]*\s*=\s*list\( + (?: + \s* + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s\)][^,\)]*?) # unquoted + ) + \s*,\s* + )* + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s\)][^,\)]*?) # unquoted + )? # last one + \) + )| + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s=][^,=]*?)| # unquoted + (?: # keyword argument + [a-zA-Z_][a-zA-Z0-9_]*\s*=\s* + (?: + (?:".*?")| # double quotes + (?:'.*?')| # single quotes + (?:[^'",\s=][^,=]*?) # unquoted + ) + ) + ) + ) + (?: + (?:\s*,\s*)|(?:\s*$) # comma + ) + ) + ''' + +_matchstring = '^%s*' % _paramstring + +def dottedQuadToNum(ip): + """ + Convert decimal dotted quad string to long integer + + >>> int(dottedQuadToNum('1 ')) + 1 + >>> int(dottedQuadToNum(' 1.2')) + 16777218 + >>> int(dottedQuadToNum(' 1.2.3 ')) + 16908291 + >>> int(dottedQuadToNum('1.2.3.4')) + 16909060 + >>> dottedQuadToNum('255.255.255.255') + 4294967295L + >>> dottedQuadToNum('255.255.255.256') + Traceback (most recent call last): + ValueError: Not a good dotted-quad IP: 255.255.255.256 + """ + + # import here to avoid it when ip_addr values are not used + import socket, struct + + try: + return struct.unpack('!L', + socket.inet_aton(ip.strip()))[0] + except socket.error: + # bug in inet_aton, corrected in Python 2.4 + if ip.strip() == '255.255.255.255': + return 0xFFFFFFFF + else: + raise ValueError('Not a good dotted-quad IP: %s' % ip) + return + +def numToDottedQuad(num): + """ + Convert long int to dotted quad string + + >>> numToDottedQuad(-1L) + Traceback (most recent call last): + ValueError: Not a good numeric IP: -1 + >>> numToDottedQuad(1L) + '0.0.0.1' + >>> numToDottedQuad(16777218L) + '1.0.0.2' + >>> numToDottedQuad(16908291L) + '1.2.0.3' + >>> numToDottedQuad(16909060L) + '1.2.3.4' + >>> numToDottedQuad(4294967295L) + '255.255.255.255' + >>> numToDottedQuad(4294967296L) + Traceback (most recent call last): + ValueError: Not a good numeric IP: 4294967296 + """ + + # import here to avoid it when ip_addr values are not used + import socket, struct + + # no need to intercept here, 4294967295L is fine + if num > 4294967295 or num < 0: + raise ValueError('Not a good numeric IP: %s' % num) + try: + return socket.inet_ntoa( + struct.pack('!L', int(num))) + except (socket.error, struct.error, OverflowError): + raise ValueError('Not a good numeric IP: %s' % num) + +class ValidateError(Exception): + """ + This error indicates that the check failed. + It can be the base class for more specific errors. + + Any check function that fails ought to raise this error. + (or a subclass) + + >>> raise ValidateError + Traceback (most recent call last): + ValidateError + """ + +class VdtMissingValue(ValidateError): + """No value was supplied to a check that needed one.""" + + +class VdtUnknownCheckError(ValidateError): + """An unknown check function was requested""" + + def __init__(self, value): + """ + >>> raise VdtUnknownCheckError('yoda') + Traceback (most recent call last): + VdtUnknownCheckError: the check "yoda" is unknown. + """ + ValidateError.__init__(self, 'the check "%s" is unknown.' % (value,)) + + +class VdtParamError(SyntaxError): + """An incorrect parameter was passed""" + + def __init__(self, name, value): + """ + >>> raise VdtParamError('yoda', 'jedi') + Traceback (most recent call last): + VdtParamError: passed an incorrect value "jedi" for parameter "yoda". + """ + SyntaxError.__init__(self, 'passed an incorrect value "%s" for parameter "%s".' % (value, name)) + + +class VdtTypeError(ValidateError): + """The value supplied was of the wrong type""" + + def __init__(self, value): + """ + >>> raise VdtTypeError('jedi') + Traceback (most recent call last): + VdtTypeError: the value "jedi" is of the wrong type. + """ + ValidateError.__init__(self, 'the value "%s" is of the wrong type.' % (value,)) + + +class VdtValueError(ValidateError): + """The value supplied was of the correct type, but was not an allowed value.""" + + def __init__(self, value): + """ + >>> raise VdtValueError('jedi') + Traceback (most recent call last): + VdtValueError: the value "jedi" is unacceptable. + """ + ValidateError.__init__(self, 'the value "%s" is unacceptable.' % (value,)) + + +class VdtValueTooSmallError(VdtValueError): + """The value supplied was of the correct type, but was too small.""" + + def __init__(self, value): + """ + >>> raise VdtValueTooSmallError('0') + Traceback (most recent call last): + VdtValueTooSmallError: the value "0" is too small. + """ + ValidateError.__init__(self, 'the value "%s" is too small.' % (value,)) + + +class VdtValueTooBigError(VdtValueError): + """The value supplied was of the correct type, but was too big.""" + + def __init__(self, value): + """ + >>> raise VdtValueTooBigError('1') + Traceback (most recent call last): + VdtValueTooBigError: the value "1" is too big. + """ + ValidateError.__init__(self, 'the value "%s" is too big.' % (value,)) + + +class VdtValueTooShortError(VdtValueError): + """The value supplied was of the correct type, but was too short.""" + + def __init__(self, value): + """ + >>> raise VdtValueTooShortError('jed') + Traceback (most recent call last): + VdtValueTooShortError: the value "jed" is too short. + """ + ValidateError.__init__( + self, + 'the value "%s" is too short.' % (value,)) + + +class VdtValueTooLongError(VdtValueError): + """The value supplied was of the correct type, but was too long.""" + + def __init__(self, value): + """ + >>> raise VdtValueTooLongError('jedie') + Traceback (most recent call last): + VdtValueTooLongError: the value "jedie" is too long. + """ + ValidateError.__init__(self, 'the value "%s" is too long.' % (value,)) + +class Validator(object): + """ + Validator is an object that allows you to register a set of 'checks'. + These checks take input and test that it conforms to the check. + + This can also involve converting the value from a string into + the correct datatype. + + The ``check`` method takes an input string which configures which + check is to be used and applies that check to a supplied value. + + An example input string would be: + 'int_range(param1, param2)' + + You would then provide something like: + + >>> def int_range_check(value, min, max): + ... # turn min and max from strings to integers + ... min = int(min) + ... max = int(max) + ... # check that value is of the correct type. + ... # possible valid inputs are integers or strings + ... # that represent integers + ... if not isinstance(value, (int, long, str)): + ... raise VdtTypeError(value) + ... elif isinstance(value, str): + ... # if we are given a string + ... # attempt to convert to an integer + ... try: + ... value = int(value) + ... except ValueError: + ... raise VdtValueError(value) + ... # check the value is between our constraints + ... if not min <= value: + ... raise VdtValueTooSmallError(value) + ... if not value <= max: + ... raise VdtValueTooBigError(value) + ... return value + + >>> fdict = {'int_range': int_range_check} + >>> vtr1 = Validator(fdict) + >>> vtr1.check('int_range(20, 40)', '30') + 30 + >>> vtr1.check('int_range(20, 40)', '60') + Traceback (most recent call last): + VdtValueTooBigError: the value "60" is too big. + + New functions can be added with : :: + + >>> vtr2 = Validator() + >>> vtr2.functions['int_range'] = int_range_check + + Or by passing in a dictionary of functions when Validator + is instantiated. + + Your functions *can* use keyword arguments, + but the first argument should always be 'value'. + + If the function doesn't take additional arguments, + the parentheses are optional in the check. + It can be written with either of : :: + + keyword = function_name + keyword = function_name() + + The first program to utilise Validator() was Michael Foord's + ConfigObj, an alternative to ConfigParser which supports lists and + can validate a config file using a config schema. + For more details on using Validator with ConfigObj see: + http://www.voidspace.org.uk/python/configobj.html + """ + + # this regex does the initial parsing of the checks + _func_re = re.compile(r'(.+?)\((.*)\)', re.DOTALL) + + # this regex takes apart keyword arguments + _key_arg = re.compile(r'^([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.*)$', re.DOTALL) + + + # this regex finds keyword=list(....) type values + _list_arg = _list_arg + + # this regex takes individual values out of lists - in one pass + _list_members = _list_members + + # These regexes check a set of arguments for validity + # and then pull the members out + _paramfinder = re.compile(_paramstring, re.VERBOSE | re.DOTALL) + _matchfinder = re.compile(_matchstring, re.VERBOSE | re.DOTALL) + + + def __init__(self, functions=None): + """ + >>> vtri = Validator() + """ + self.functions = { + '': self._pass, + 'integer': is_integer, + 'float': is_float, + 'boolean': is_boolean, + 'ip_addr': is_ip_addr, + 'string': is_string, + 'list': is_list, + 'tuple': is_tuple, + 'int_list': is_int_list, + 'float_list': is_float_list, + 'bool_list': is_bool_list, + 'ip_addr_list': is_ip_addr_list, + 'string_list': is_string_list, + 'mixed_list': is_mixed_list, + 'pass': self._pass, + 'option': is_option, + 'force_list': force_list, + } + if functions is not None: + self.functions.update(functions) + # tekNico: for use by ConfigObj + self.baseErrorClass = ValidateError + self._cache = {} + + + def check(self, check, value, missing=False): + """ + Usage: check(check, value) + + Arguments: + check: string representing check to apply (including arguments) + value: object to be checked + Returns value, converted to correct type if necessary + + If the check fails, raises a ``ValidateError`` subclass. + + >>> vtor.check('yoda', '') + Traceback (most recent call last): + VdtUnknownCheckError: the check "yoda" is unknown. + >>> vtor.check('yoda()', '') + Traceback (most recent call last): + VdtUnknownCheckError: the check "yoda" is unknown. + + >>> vtor.check('string(default="")', '', missing=True) + '' + """ + fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check) + + if missing: + if default is None: + # no information needed here - to be handled by caller + raise VdtMissingValue() + value = self._handle_none(default) + + if value is None: + return None + + return self._check_value(value, fun_name, fun_args, fun_kwargs) + + + def _handle_none(self, value): + if value == 'None': + return None + elif value in ("'None'", '"None"'): + # Special case a quoted None + value = self._unquote(value) + return value + + + def _parse_with_caching(self, check): + if check in self._cache: + fun_name, fun_args, fun_kwargs, default = self._cache[check] + # We call list and dict below to work with *copies* of the data + # rather than the original (which are mutable of course) + fun_args = list(fun_args) + fun_kwargs = dict(fun_kwargs) + else: + fun_name, fun_args, fun_kwargs, default = self._parse_check(check) + fun_kwargs = dict([(str(key), value) for (key, value) in list(fun_kwargs.items())]) + self._cache[check] = fun_name, list(fun_args), dict(fun_kwargs), default + return fun_name, fun_args, fun_kwargs, default + + + def _check_value(self, value, fun_name, fun_args, fun_kwargs): + try: + fun = self.functions[fun_name] + except KeyError: + raise VdtUnknownCheckError(fun_name) + else: + return fun(value, *fun_args, **fun_kwargs) + + + def _parse_check(self, check): + fun_match = self._func_re.match(check) + if fun_match: + fun_name = fun_match.group(1) + arg_string = fun_match.group(2) + arg_match = self._matchfinder.match(arg_string) + if arg_match is None: + # Bad syntax + raise VdtParamError('Bad syntax in check "%s".' % check) + fun_args = [] + fun_kwargs = {} + # pull out args of group 2 + for arg in self._paramfinder.findall(arg_string): + # args may need whitespace removing (before removing quotes) + arg = arg.strip() + listmatch = self._list_arg.match(arg) + if listmatch: + key, val = self._list_handle(listmatch) + fun_kwargs[key] = val + continue + keymatch = self._key_arg.match(arg) + if keymatch: + val = keymatch.group(2) + if not val in ("'None'", '"None"'): + # Special case a quoted None + val = self._unquote(val) + fun_kwargs[keymatch.group(1)] = val + continue + + fun_args.append(self._unquote(arg)) + else: + # allows for function names without (args) + return check, (), {}, None + + # Default must be deleted if the value is specified too, + # otherwise the check function will get a spurious "default" keyword arg + default = fun_kwargs.pop('default', None) + return fun_name, fun_args, fun_kwargs, default + + + def _unquote(self, val): + """Unquote a value if necessary.""" + if (len(val) >= 2) and (val[0] in ("'", '"')) and (val[0] == val[-1]): + val = val[1:-1] + return val + + + def _list_handle(self, listmatch): + """Take apart a ``keyword=list('val, 'val')`` type string.""" + out = [] + name = listmatch.group(1) + args = listmatch.group(2) + for arg in self._list_members.findall(args): + out.append(self._unquote(arg)) + return name, out + + + def _pass(self, value): + """ + Dummy check that always passes + + >>> vtor.check('', 0) + 0 + >>> vtor.check('', '0') + '0' + """ + return value + + + def get_default_value(self, check): + """ + Given a check, return the default value for the check + (converted to the right type). + + If the check doesn't specify a default value then a + ``KeyError`` will be raised. + """ + fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check) + if default is None: + raise KeyError('Check "%s" has no default value.' % check) + value = self._handle_none(default) + if value is None: + return value + return self._check_value(value, fun_name, fun_args, fun_kwargs) + +def _is_num_param(names, values, to_float=False): + """ + Return numbers from inputs or raise VdtParamError. + + Lets ``None`` pass through. + Pass in keyword argument ``to_float=True`` to + use float for the conversion rather than int. + + >>> _is_num_param(('', ''), (0, 1.0)) + [0, 1] + >>> _is_num_param(('', ''), (0, 1.0), to_float=True) + [0.0, 1.0] + >>> _is_num_param(('a'), ('a')) + Traceback (most recent call last): + VdtParamError: passed an incorrect value "a" for parameter "a". + """ + fun = to_float and float or int + out_params = [] + for (name, val) in zip(names, values): + if val is None: + out_params.append(val) + elif isinstance(val, (int, int, float, str)): + try: + out_params.append(fun(val)) + except ValueError as e: + raise VdtParamError(name, val) + else: + raise VdtParamError(name, val) + return out_params + +# built in checks +# you can override these by setting the appropriate name +# in Validator.functions +# note: if the params are specified wrongly in your input string, +# you will also raise errors. + +def is_integer(value, min=None, max=None): + """ + A check that tests that a given value is an integer (int, or long) + and optionally, between bounds. A negative value is accepted, while + a float will fail. + + If the value is a string, then the conversion is done - if possible. + Otherwise a VdtError is raised. + + >>> vtor.check('integer', '-1') + -1 + >>> vtor.check('integer', '0') + 0 + >>> vtor.check('integer', 9) + 9 + >>> vtor.check('integer', 'a') + Traceback (most recent call last): + VdtTypeError: the value "a" is of the wrong type. + >>> vtor.check('integer', '2.2') + Traceback (most recent call last): + VdtTypeError: the value "2.2" is of the wrong type. + >>> vtor.check('integer(10)', '20') + 20 + >>> vtor.check('integer(max=20)', '15') + 15 + >>> vtor.check('integer(10)', '9') + Traceback (most recent call last): + VdtValueTooSmallError: the value "9" is too small. + >>> vtor.check('integer(10)', 9) + Traceback (most recent call last): + VdtValueTooSmallError: the value "9" is too small. + >>> vtor.check('integer(max=20)', '35') + Traceback (most recent call last): + VdtValueTooBigError: the value "35" is too big. + >>> vtor.check('integer(max=20)', 35) + Traceback (most recent call last): + VdtValueTooBigError: the value "35" is too big. + >>> vtor.check('integer(0, 9)', False) + 0 + """ + (min_val, max_val) = _is_num_param(('min', 'max'), (min, max)) + if not isinstance(value, (int, int, str)): + raise VdtTypeError(value) + if isinstance(value, str): + # if it's a string - does it represent an integer ? + try: + value = int(value) + except ValueError: + raise VdtTypeError(value) + if (min_val is not None) and (value < min_val): + raise VdtValueTooSmallError(value) + if (max_val is not None) and (value > max_val): + raise VdtValueTooBigError(value) + return value + +def is_float(value, min=None, max=None): + """ + A check that tests that a given value is a float + (an integer will be accepted), and optionally - that it is between bounds. + + If the value is a string, then the conversion is done - if possible. + Otherwise a VdtError is raised. + + This can accept negative values. + + >>> vtor.check('float', '2') + 2.0 + + From now on we multiply the value to avoid comparing decimals + + >>> vtor.check('float', '-6.8') * 10 + -68.0 + >>> vtor.check('float', '12.2') * 10 + 122.0 + >>> vtor.check('float', 8.4) * 10 + 84.0 + >>> vtor.check('float', 'a') + Traceback (most recent call last): + VdtTypeError: the value "a" is of the wrong type. + >>> vtor.check('float(10.1)', '10.2') * 10 + 102.0 + >>> vtor.check('float(max=20.2)', '15.1') * 10 + 151.0 + >>> vtor.check('float(10.0)', '9.0') + Traceback (most recent call last): + VdtValueTooSmallError: the value "9.0" is too small. + >>> vtor.check('float(max=20.0)', '35.0') + Traceback (most recent call last): + VdtValueTooBigError: the value "35.0" is too big. + """ + (min_val, max_val) = _is_num_param( + ('min', 'max'), (min, max), to_float=True) + if not isinstance(value, (int, int, float, str)): + raise VdtTypeError(value) + if not isinstance(value, float): + # if it's a string - does it represent a float ? + try: + value = float(value) + except ValueError: + raise VdtTypeError(value) + if (min_val is not None) and (value < min_val): + raise VdtValueTooSmallError(value) + if (max_val is not None) and (value > max_val): + raise VdtValueTooBigError(value) + return value + +bool_dict = { + True: True, 'on': True, '1': True, 'true': True, 'yes': True, + False: False, 'off': False, '0': False, 'false': False, 'no': False, +} + +def is_boolean(value): + """ + Check if the value represents a boolean. + + >>> vtor.check('boolean', 0) + 0 + >>> vtor.check('boolean', False) + 0 + >>> vtor.check('boolean', '0') + 0 + >>> vtor.check('boolean', 'off') + 0 + >>> vtor.check('boolean', 'false') + 0 + >>> vtor.check('boolean', 'no') + 0 + >>> vtor.check('boolean', 'nO') + 0 + >>> vtor.check('boolean', 'NO') + 0 + >>> vtor.check('boolean', 1) + 1 + >>> vtor.check('boolean', True) + 1 + >>> vtor.check('boolean', '1') + 1 + >>> vtor.check('boolean', 'on') + 1 + >>> vtor.check('boolean', 'true') + 1 + >>> vtor.check('boolean', 'yes') + 1 + >>> vtor.check('boolean', 'Yes') + 1 + >>> vtor.check('boolean', 'YES') + 1 + >>> vtor.check('boolean', '') + Traceback (most recent call last): + VdtTypeError: the value "" is of the wrong type. + >>> vtor.check('boolean', 'up') + Traceback (most recent call last): + VdtTypeError: the value "up" is of the wrong type. + + """ + if isinstance(value, str): + try: + return bool_dict[value.lower()] + except KeyError: + raise VdtTypeError(value) + # we do an equality test rather than an identity test + # this ensures Python 2.2 compatibilty + # and allows 0 and 1 to represent True and False + if value == False: + return False + elif value == True: + return True + else: + raise VdtTypeError(value) + +def is_ip_addr(value): + """ + Check that the supplied value is an Internet Protocol address, v.4, + represented by a dotted-quad string, i.e. '1.2.3.4'. + + >>> vtor.check('ip_addr', '1 ') + '1' + >>> vtor.check('ip_addr', ' 1.2') + '1.2' + >>> vtor.check('ip_addr', ' 1.2.3 ') + '1.2.3' + >>> vtor.check('ip_addr', '1.2.3.4') + '1.2.3.4' + >>> vtor.check('ip_addr', '0.0.0.0') + '0.0.0.0' + >>> vtor.check('ip_addr', '255.255.255.255') + '255.255.255.255' + >>> vtor.check('ip_addr', '255.255.255.256') + Traceback (most recent call last): + VdtValueError: the value "255.255.255.256" is unacceptable. + >>> vtor.check('ip_addr', '1.2.3.4.5') + Traceback (most recent call last): + VdtValueError: the value "1.2.3.4.5" is unacceptable. + >>> vtor.check('ip_addr', 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + """ + if not isinstance(value, str): + raise VdtTypeError(value) + value = value.strip() + try: + dottedQuadToNum(value) + except ValueError: + raise VdtValueError(value) + return value + +def is_list(value, min=None, max=None): + """ + Check that the value is a list of values. + + You can optionally specify the minimum and maximum number of members. + + It does no check on list members. + + >>> vtor.check('list', ()) + [] + >>> vtor.check('list', []) + [] + >>> vtor.check('list', (1, 2)) + [1, 2] + >>> vtor.check('list', [1, 2]) + [1, 2] + >>> vtor.check('list(3)', (1, 2)) + Traceback (most recent call last): + VdtValueTooShortError: the value "(1, 2)" is too short. + >>> vtor.check('list(max=5)', (1, 2, 3, 4, 5, 6)) + Traceback (most recent call last): + VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long. + >>> vtor.check('list(min=3, max=5)', (1, 2, 3, 4)) + [1, 2, 3, 4] + >>> vtor.check('list', 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + >>> vtor.check('list', '12') + Traceback (most recent call last): + VdtTypeError: the value "12" is of the wrong type. + """ + (min_len, max_len) = _is_num_param(('min', 'max'), (min, max)) + if isinstance(value, str): + raise VdtTypeError(value) + try: + num_members = len(value) + except TypeError: + raise VdtTypeError(value) + if min_len is not None and num_members < min_len: + raise VdtValueTooShortError(value) + if max_len is not None and num_members > max_len: + raise VdtValueTooLongError(value) + return list(value) + +def is_tuple(value, min=None, max=None): + """ + Check that the value is a tuple of values. + + You can optionally specify the minimum and maximum number of members. + + It does no check on members. + + >>> vtor.check('tuple', ()) + () + >>> vtor.check('tuple', []) + () + >>> vtor.check('tuple', (1, 2)) + (1, 2) + >>> vtor.check('tuple', [1, 2]) + (1, 2) + >>> vtor.check('tuple(3)', (1, 2)) + Traceback (most recent call last): + VdtValueTooShortError: the value "(1, 2)" is too short. + >>> vtor.check('tuple(max=5)', (1, 2, 3, 4, 5, 6)) + Traceback (most recent call last): + VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long. + >>> vtor.check('tuple(min=3, max=5)', (1, 2, 3, 4)) + (1, 2, 3, 4) + >>> vtor.check('tuple', 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + >>> vtor.check('tuple', '12') + Traceback (most recent call last): + VdtTypeError: the value "12" is of the wrong type. + """ + return tuple(is_list(value, min, max)) + +def is_string(value, min=None, max=None): + """ + Check that the supplied value is a string. + + You can optionally specify the minimum and maximum number of members. + + >>> vtor.check('string', '0') + '0' + >>> vtor.check('string', 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + >>> vtor.check('string(2)', '12') + '12' + >>> vtor.check('string(2)', '1') + Traceback (most recent call last): + VdtValueTooShortError: the value "1" is too short. + >>> vtor.check('string(min=2, max=3)', '123') + '123' + >>> vtor.check('string(min=2, max=3)', '1234') + Traceback (most recent call last): + VdtValueTooLongError: the value "1234" is too long. + """ + if not isinstance(value, str): + raise VdtTypeError(value) + (min_len, max_len) = _is_num_param(('min', 'max'), (min, max)) + try: + num_members = len(value) + except TypeError: + raise VdtTypeError(value) + if min_len is not None and num_members < min_len: + raise VdtValueTooShortError(value) + if max_len is not None and num_members > max_len: + raise VdtValueTooLongError(value) + return value + + +def is_int_list(value, min=None, max=None): + """ + Check that the value is a list of integers. + + You can optionally specify the minimum and maximum number of members. + + Each list member is checked that it is an integer. + + >>> vtor.check('int_list', ()) + [] + >>> vtor.check('int_list', []) + [] + >>> vtor.check('int_list', (1, 2)) + [1, 2] + >>> vtor.check('int_list', [1, 2]) + [1, 2] + >>> vtor.check('int_list', [1, 'a']) + Traceback (most recent call last): + VdtTypeError: the value "a" is of the wrong type. + """ + return [is_integer(mem) for mem in is_list(value, min, max)] + + +def is_bool_list(value, min=None, max=None): + """ + Check that the value is a list of booleans. + + You can optionally specify the minimum and maximum number of members. + + Each list member is checked that it is a boolean. + + >>> vtor.check('bool_list', ()) + [] + >>> vtor.check('bool_list', []) + [] + >>> check_res = vtor.check('bool_list', (True, False)) + >>> check_res == [True, False] + 1 + >>> check_res = vtor.check('bool_list', [True, False]) + >>> check_res == [True, False] + 1 + >>> vtor.check('bool_list', [True, 'a']) + Traceback (most recent call last): + VdtTypeError: the value "a" is of the wrong type. + """ + return [is_boolean(mem) for mem in is_list(value, min, max)] + + +def is_float_list(value, min=None, max=None): + """ + Check that the value is a list of floats. + + You can optionally specify the minimum and maximum number of members. + + Each list member is checked that it is a float. + + >>> vtor.check('float_list', ()) + [] + >>> vtor.check('float_list', []) + [] + >>> vtor.check('float_list', (1, 2.0)) + [1.0, 2.0] + >>> vtor.check('float_list', [1, 2.0]) + [1.0, 2.0] + >>> vtor.check('float_list', [1, 'a']) + Traceback (most recent call last): + VdtTypeError: the value "a" is of the wrong type. + """ + return [is_float(mem) for mem in is_list(value, min, max)] + +def is_string_list(value, min=None, max=None): + """ + Check that the value is a list of strings. + + You can optionally specify the minimum and maximum number of members. + + Each list member is checked that it is a string. + + >>> vtor.check('string_list', ()) + [] + >>> vtor.check('string_list', []) + [] + >>> vtor.check('string_list', ('a', 'b')) + ['a', 'b'] + >>> vtor.check('string_list', ['a', 1]) + Traceback (most recent call last): + VdtTypeError: the value "1" is of the wrong type. + >>> vtor.check('string_list', 'hello') + Traceback (most recent call last): + VdtTypeError: the value "hello" is of the wrong type. + """ + if isinstance(value, str): + raise VdtTypeError(value) + return [is_string(mem) for mem in is_list(value, min, max)] + +def is_ip_addr_list(value, min=None, max=None): + """ + Check that the value is a list of IP addresses. + + You can optionally specify the minimum and maximum number of members. + + Each list member is checked that it is an IP address. + + >>> vtor.check('ip_addr_list', ()) + [] + >>> vtor.check('ip_addr_list', []) + [] + >>> vtor.check('ip_addr_list', ('1.2.3.4', '5.6.7.8')) + ['1.2.3.4', '5.6.7.8'] + >>> vtor.check('ip_addr_list', ['a']) + Traceback (most recent call last): + VdtValueError: the value "a" is unacceptable. + """ + return [is_ip_addr(mem) for mem in is_list(value, min, max)] + +def force_list(value, min=None, max=None): + """ + Check that a value is a list, coercing strings into + a list with one member. Useful where users forget the + trailing comma that turns a single value into a list. + + You can optionally specify the minimum and maximum number of members. + A minumum of greater than one will fail if the user only supplies a + string. + + >>> vtor.check('force_list', ()) + [] + >>> vtor.check('force_list', []) + [] + >>> vtor.check('force_list', 'hello') + ['hello'] + """ + if not isinstance(value, (list, tuple)): + value = [value] + return is_list(value, min, max) + +fun_dict = { + 'integer': is_integer, + 'float': is_float, + 'ip_addr': is_ip_addr, + 'string': is_string, + 'boolean': is_boolean, +} + + +def is_mixed_list(value, *args): + """ + Check that the value is a list. + Allow specifying the type of each member. + Work on lists of specific lengths. + + You specify each member as a positional argument specifying type + + Each type should be one of the following strings : + 'integer', 'float', 'ip_addr', 'string', 'boolean' + + So you can specify a list of two strings, followed by + two integers as : + + mixed_list('string', 'string', 'integer', 'integer') + + The length of the list must match the number of positional + arguments you supply. + + >>> mix_str = "mixed_list('integer', 'float', 'ip_addr', 'string', 'boolean')" + >>> check_res = vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', True)) + >>> check_res == [1, 2.0, '1.2.3.4', 'a', True] + 1 + >>> check_res = vtor.check(mix_str, ('1', '2.0', '1.2.3.4', 'a', 'True')) + >>> check_res == [1, 2.0, '1.2.3.4', 'a', True] + 1 + >>> vtor.check(mix_str, ('b', 2.0, '1.2.3.4', 'a', True)) + Traceback (most recent call last): + VdtTypeError: the value "b" is of the wrong type. + >>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a')) + Traceback (most recent call last): + VdtValueTooShortError: the value "(1, 2.0, '1.2.3.4', 'a')" is too short. + >>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', 1, 'b')) + Traceback (most recent call last): + VdtValueTooLongError: the value "(1, 2.0, '1.2.3.4', 'a', 1, 'b')" is too long. + >>> vtor.check(mix_str, 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + + This test requires an elaborate setup, because of a change in error string + output from the interpreter between Python 2.2 and 2.3 . + + >>> res_seq = ( + ... 'passed an incorrect value "', + ... 'yoda', + ... '" for parameter "mixed_list".', + ... ) + >>> res_str = "'".join(res_seq) + >>> try: + ... vtor.check('mixed_list("yoda")', ('a')) + ... except VdtParamError, err: + ... str(err) == res_str + 1 + """ + try: + length = len(value) + except TypeError: + raise VdtTypeError(value) + if length < len(args): + raise VdtValueTooShortError(value) + elif length > len(args): + raise VdtValueTooLongError(value) + try: + return [fun_dict[arg](val) for arg, val in zip(args, value)] + except KeyError as e: + raise VdtParamError('mixed_list', e) + + +def is_option(value, *options): + """ + This check matches the value to any of a set of options. + + >>> vtor.check('option("yoda", "jedi")', 'yoda') + 'yoda' + >>> vtor.check('option("yoda", "jedi")', 'jed') + Traceback (most recent call last): + VdtValueError: the value "jed" is unacceptable. + >>> vtor.check('option("yoda", "jedi")', 0) + Traceback (most recent call last): + VdtTypeError: the value "0" is of the wrong type. + """ + if not isinstance(value, str): + raise VdtTypeError(value) + if not value in options: + raise VdtValueError(value) + return value + +def _test(value, *args, **keywargs): + """ + A function that exists for test purposes. + + >>> checks = [ + ... '3, 6, min=1, max=3, test=list(a, b, c)', + ... '3', + ... '3, 6', + ... '3,', + ... 'min=1, test="a b c"', + ... 'min=5, test="a, b, c"', + ... 'min=1, max=3, test="a, b, c"', + ... 'min=-100, test=-99', + ... 'min=1, max=3', + ... '3, 6, test="36"', + ... '3, 6, test="a, b, c"', + ... '3, max=3, test=list("a", "b", "c")', + ... '''3, max=3, test=list("'a'", 'b', "x=(c)")''', + ... "test='x=fish(3)'", + ... ] + >>> v = Validator({'test': _test}) + >>> for entry in checks: + ... print v.check(('test(%s)' % entry), 3) + (3, ('3', '6'), {'test': ['a', 'b', 'c'], 'max': '3', 'min': '1'}) + (3, ('3',), {}) + (3, ('3', '6'), {}) + (3, ('3',), {}) + (3, (), {'test': 'a b c', 'min': '1'}) + (3, (), {'test': 'a, b, c', 'min': '5'}) + (3, (), {'test': 'a, b, c', 'max': '3', 'min': '1'}) + (3, (), {'test': '-99', 'min': '-100'}) + (3, (), {'max': '3', 'min': '1'}) + (3, ('3', '6'), {'test': '36'}) + (3, ('3', '6'), {'test': 'a, b, c'}) + (3, ('3',), {'test': ['a', 'b', 'c'], 'max': '3'}) + (3, ('3',), {'test': ["'a'", 'b', 'x=(c)'], 'max': '3'}) + (3, (), {'test': 'x=fish(3)'}) + + >>> v = Validator() + >>> v.check('integer(default=6)', '3') + 3 + >>> v.check('integer(default=6)', None, True) + 6 + >>> v.get_default_value('integer(default=6)') + 6 + >>> v.get_default_value('float(default=6)') + 6.0 + >>> v.get_default_value('pass(default=None)') + >>> v.get_default_value("string(default='None')") + 'None' + >>> v.get_default_value('pass') + Traceback (most recent call last): + KeyError: 'Check "pass" has no default value.' + >>> v.get_default_value('pass(default=list(1, 2, 3, 4))') + ['1', '2', '3', '4'] + + >>> v = Validator() + >>> v.check("pass(default=None)", None, True) + >>> v.check("pass(default='None')", None, True) + 'None' + >>> v.check('pass(default="None")', None, True) + 'None' + >>> v.check('pass(default=list(1, 2, 3, 4))', None, True) + ['1', '2', '3', '4'] + + Bug test for unicode arguments + >>> v = Validator() + >>> v.check(u'string(min=4)', u'test') + u'test' + + >>> v = Validator() + >>> v.get_default_value(u'string(min=4, default="1234")') + u'1234' + >>> v.check(u'string(min=4, default="1234")', u'test') + u'test' + + >>> v = Validator() + >>> default = v.get_default_value('string(default=None)') + >>> default == None + 1 + """ + return (value, args, keywargs) + + +def _test2(): + """ + >>> + >>> v = Validator() + >>> v.get_default_value('string(default="#ff00dd")') + '#ff00dd' + >>> v.get_default_value('integer(default=3) # comment') + 3 + """ + +def _test3(): + r""" + >>> vtor.check('string(default="")', '', missing=True) + '' + >>> vtor.check('string(default="\n")', '', missing=True) + '\n' + >>> print vtor.check('string(default="\n")', '', missing=True), + + >>> vtor.check('string()', '\n') + '\n' + >>> vtor.check('string(default="\n\n\n")', '', missing=True) + '\n\n\n' + >>> vtor.check('string()', 'random \n text goes here\n\n') + 'random \n text goes here\n\n' + >>> vtor.check('string(default=" \nrandom text\ngoes \n here\n\n ")', + ... '', missing=True) + ' \nrandom text\ngoes \n here\n\n ' + >>> vtor.check("string(default='\n\n\n')", '', missing=True) + '\n\n\n' + >>> vtor.check("option('\n','a','b',default='\n')", '', missing=True) + '\n' + >>> vtor.check("string_list()", ['foo', '\n', 'bar']) + ['foo', '\n', 'bar'] + >>> vtor.check("string_list(default=list('\n'))", '', missing=True) + ['\n'] + """ + +if __name__ == '__main__': + # run the code tests in doctest format + import sys + import doctest + m = sys.modules.get('__main__') + globs = m.__dict__.copy() + globs.update({ + 'vtor': Validator(), + }) + doctest.testmod(m, globs=globs) \ No newline at end of file diff --git a/astropy/extern/js/jquery-1.11.0.js b/astropy/extern/js/jquery-1.11.0.js new file mode 100644 index 0000000..3c88fa8 --- /dev/null +++ b/astropy/extern/js/jquery-1.11.0.js @@ -0,0 +1,10337 @@ +/*! + * jQuery JavaScript Library v1.11.0 + * http://jquery.com/ + * + * Includes Sizzle.js + * http://sizzlejs.com/ + * + * Copyright 2005, 2014 jQuery Foundation, Inc. and other contributors + * Released under the MIT license + * http://jquery.org/license + * + * Date: 2014-01-23T21:02Z + */ + +(function( global, factory ) { + + if ( typeof module === "object" && typeof module.exports === "object" ) { + // For CommonJS and CommonJS-like environments where a proper window is present, + // execute the factory and get jQuery + // For environments that do not inherently posses a window with a document + // (such as Node.js), expose a jQuery-making factory as module.exports + // This accentuates the need for the creation of a real window + // e.g. var jQuery = require("jquery")(window); + // See ticket #14549 for more info + module.exports = global.document ? + factory( global, true ) : + function( w ) { + if ( !w.document ) { + throw new Error( "jQuery requires a window with a document" ); + } + return factory( w ); + }; + } else { + factory( global ); + } + +// Pass this if window is not defined yet +}(typeof window !== "undefined" ? window : this, function( window, noGlobal ) { + +// Can't do this because several apps including ASP.NET trace +// the stack via arguments.caller.callee and Firefox dies if +// you try to trace through "use strict" call chains. (#13335) +// Support: Firefox 18+ +// + +var deletedIds = []; + +var slice = deletedIds.slice; + +var concat = deletedIds.concat; + +var push = deletedIds.push; + +var indexOf = deletedIds.indexOf; + +var class2type = {}; + +var toString = class2type.toString; + +var hasOwn = class2type.hasOwnProperty; + +var trim = "".trim; + +var support = {}; + + + +var + version = "1.11.0", + + // Define a local copy of jQuery + jQuery = function( selector, context ) { + // The jQuery object is actually just the init constructor 'enhanced' + // Need init if jQuery is called (just allow error to be thrown if not included) + return new jQuery.fn.init( selector, context ); + }, + + // Make sure we trim BOM and NBSP (here's looking at you, Safari 5.0 and IE) + rtrim = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, + + // Matches dashed string for camelizing + rmsPrefix = /^-ms-/, + rdashAlpha = /-([\da-z])/gi, + + // Used by jQuery.camelCase as callback to replace() + fcamelCase = function( all, letter ) { + return letter.toUpperCase(); + }; + +jQuery.fn = jQuery.prototype = { + // The current version of jQuery being used + jquery: version, + + constructor: jQuery, + + // Start with an empty selector + selector: "", + + // The default length of a jQuery object is 0 + length: 0, + + toArray: function() { + return slice.call( this ); + }, + + // Get the Nth element in the matched element set OR + // Get the whole matched element set as a clean array + get: function( num ) { + return num != null ? + + // Return a 'clean' array + ( num < 0 ? this[ num + this.length ] : this[ num ] ) : + + // Return just the object + slice.call( this ); + }, + + // Take an array of elements and push it onto the stack + // (returning the new matched element set) + pushStack: function( elems ) { + + // Build a new jQuery matched element set + var ret = jQuery.merge( this.constructor(), elems ); + + // Add the old object onto the stack (as a reference) + ret.prevObject = this; + ret.context = this.context; + + // Return the newly-formed element set + return ret; + }, + + // Execute a callback for every element in the matched set. + // (You can seed the arguments with an array of args, but this is + // only used internally.) + each: function( callback, args ) { + return jQuery.each( this, callback, args ); + }, + + map: function( callback ) { + return this.pushStack( jQuery.map(this, function( elem, i ) { + return callback.call( elem, i, elem ); + })); + }, + + slice: function() { + return this.pushStack( slice.apply( this, arguments ) ); + }, + + first: function() { + return this.eq( 0 ); + }, + + last: function() { + return this.eq( -1 ); + }, + + eq: function( i ) { + var len = this.length, + j = +i + ( i < 0 ? len : 0 ); + return this.pushStack( j >= 0 && j < len ? [ this[j] ] : [] ); + }, + + end: function() { + return this.prevObject || this.constructor(null); + }, + + // For internal use only. + // Behaves like an Array's method, not like a jQuery method. + push: push, + sort: deletedIds.sort, + splice: deletedIds.splice +}; + +jQuery.extend = jQuery.fn.extend = function() { + var src, copyIsArray, copy, name, options, clone, + target = arguments[0] || {}, + i = 1, + length = arguments.length, + deep = false; + + // Handle a deep copy situation + if ( typeof target === "boolean" ) { + deep = target; + + // skip the boolean and the target + target = arguments[ i ] || {}; + i++; + } + + // Handle case when target is a string or something (possible in deep copy) + if ( typeof target !== "object" && !jQuery.isFunction(target) ) { + target = {}; + } + + // extend jQuery itself if only one argument is passed + if ( i === length ) { + target = this; + i--; + } + + for ( ; i < length; i++ ) { + // Only deal with non-null/undefined values + if ( (options = arguments[ i ]) != null ) { + // Extend the base object + for ( name in options ) { + src = target[ name ]; + copy = options[ name ]; + + // Prevent never-ending loop + if ( target === copy ) { + continue; + } + + // Recurse if we're merging plain objects or arrays + if ( deep && copy && ( jQuery.isPlainObject(copy) || (copyIsArray = jQuery.isArray(copy)) ) ) { + if ( copyIsArray ) { + copyIsArray = false; + clone = src && jQuery.isArray(src) ? src : []; + + } else { + clone = src && jQuery.isPlainObject(src) ? src : {}; + } + + // Never move original objects, clone them + target[ name ] = jQuery.extend( deep, clone, copy ); + + // Don't bring in undefined values + } else if ( copy !== undefined ) { + target[ name ] = copy; + } + } + } + } + + // Return the modified object + return target; +}; + +jQuery.extend({ + // Unique for each copy of jQuery on the page + expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ), + + // Assume jQuery is ready without the ready module + isReady: true, + + error: function( msg ) { + throw new Error( msg ); + }, + + noop: function() {}, + + // See test/unit/core.js for details concerning isFunction. + // Since version 1.3, DOM methods and functions like alert + // aren't supported. They return false on IE (#2968). + isFunction: function( obj ) { + return jQuery.type(obj) === "function"; + }, + + isArray: Array.isArray || function( obj ) { + return jQuery.type(obj) === "array"; + }, + + isWindow: function( obj ) { + /* jshint eqeqeq: false */ + return obj != null && obj == obj.window; + }, + + isNumeric: function( obj ) { + // parseFloat NaNs numeric-cast false positives (null|true|false|"") + // ...but misinterprets leading-number strings, particularly hex literals ("0x...") + // subtraction forces infinities to NaN + return obj - parseFloat( obj ) >= 0; + }, + + isEmptyObject: function( obj ) { + var name; + for ( name in obj ) { + return false; + } + return true; + }, + + isPlainObject: function( obj ) { + var key; + + // Must be an Object. + // Because of IE, we also have to check the presence of the constructor property. + // Make sure that DOM nodes and window objects don't pass through, as well + if ( !obj || jQuery.type(obj) !== "object" || obj.nodeType || jQuery.isWindow( obj ) ) { + return false; + } + + try { + // Not own constructor property must be Object + if ( obj.constructor && + !hasOwn.call(obj, "constructor") && + !hasOwn.call(obj.constructor.prototype, "isPrototypeOf") ) { + return false; + } + } catch ( e ) { + // IE8,9 Will throw exceptions on certain host objects #9897 + return false; + } + + // Support: IE<9 + // Handle iteration over inherited properties before own properties. + if ( support.ownLast ) { + for ( key in obj ) { + return hasOwn.call( obj, key ); + } + } + + // Own properties are enumerated firstly, so to speed up, + // if last one is own, then all properties are own. + for ( key in obj ) {} + + return key === undefined || hasOwn.call( obj, key ); + }, + + type: function( obj ) { + if ( obj == null ) { + return obj + ""; + } + return typeof obj === "object" || typeof obj === "function" ? + class2type[ toString.call(obj) ] || "object" : + typeof obj; + }, + + // Evaluates a script in a global context + // Workarounds based on findings by Jim Driscoll + // http://weblogs.java.net/blog/driscoll/archive/2009/09/08/eval-javascript-global-context + globalEval: function( data ) { + if ( data && jQuery.trim( data ) ) { + // We use execScript on Internet Explorer + // We use an anonymous function so that context is window + // rather than jQuery in Firefox + ( window.execScript || function( data ) { + window[ "eval" ].call( window, data ); + } )( data ); + } + }, + + // Convert dashed to camelCase; used by the css and data modules + // Microsoft forgot to hump their vendor prefix (#9572) + camelCase: function( string ) { + return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); + }, + + nodeName: function( elem, name ) { + return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); + }, + + // args is for internal usage only + each: function( obj, callback, args ) { + var value, + i = 0, + length = obj.length, + isArray = isArraylike( obj ); + + if ( args ) { + if ( isArray ) { + for ( ; i < length; i++ ) { + value = callback.apply( obj[ i ], args ); + + if ( value === false ) { + break; + } + } + } else { + for ( i in obj ) { + value = callback.apply( obj[ i ], args ); + + if ( value === false ) { + break; + } + } + } + + // A special, fast, case for the most common use of each + } else { + if ( isArray ) { + for ( ; i < length; i++ ) { + value = callback.call( obj[ i ], i, obj[ i ] ); + + if ( value === false ) { + break; + } + } + } else { + for ( i in obj ) { + value = callback.call( obj[ i ], i, obj[ i ] ); + + if ( value === false ) { + break; + } + } + } + } + + return obj; + }, + + // Use native String.trim function wherever possible + trim: trim && !trim.call("\uFEFF\xA0") ? + function( text ) { + return text == null ? + "" : + trim.call( text ); + } : + + // Otherwise use our own trimming functionality + function( text ) { + return text == null ? + "" : + ( text + "" ).replace( rtrim, "" ); + }, + + // results is for internal usage only + makeArray: function( arr, results ) { + var ret = results || []; + + if ( arr != null ) { + if ( isArraylike( Object(arr) ) ) { + jQuery.merge( ret, + typeof arr === "string" ? + [ arr ] : arr + ); + } else { + push.call( ret, arr ); + } + } + + return ret; + }, + + inArray: function( elem, arr, i ) { + var len; + + if ( arr ) { + if ( indexOf ) { + return indexOf.call( arr, elem, i ); + } + + len = arr.length; + i = i ? i < 0 ? Math.max( 0, len + i ) : i : 0; + + for ( ; i < len; i++ ) { + // Skip accessing in sparse arrays + if ( i in arr && arr[ i ] === elem ) { + return i; + } + } + } + + return -1; + }, + + merge: function( first, second ) { + var len = +second.length, + j = 0, + i = first.length; + + while ( j < len ) { + first[ i++ ] = second[ j++ ]; + } + + // Support: IE<9 + // Workaround casting of .length to NaN on otherwise arraylike objects (e.g., NodeLists) + if ( len !== len ) { + while ( second[j] !== undefined ) { + first[ i++ ] = second[ j++ ]; + } + } + + first.length = i; + + return first; + }, + + grep: function( elems, callback, invert ) { + var callbackInverse, + matches = [], + i = 0, + length = elems.length, + callbackExpect = !invert; + + // Go through the array, only saving the items + // that pass the validator function + for ( ; i < length; i++ ) { + callbackInverse = !callback( elems[ i ], i ); + if ( callbackInverse !== callbackExpect ) { + matches.push( elems[ i ] ); + } + } + + return matches; + }, + + // arg is for internal usage only + map: function( elems, callback, arg ) { + var value, + i = 0, + length = elems.length, + isArray = isArraylike( elems ), + ret = []; + + // Go through the array, translating each of the items to their new values + if ( isArray ) { + for ( ; i < length; i++ ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + + // Go through every key on the object, + } else { + for ( i in elems ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + } + + // Flatten any nested arrays + return concat.apply( [], ret ); + }, + + // A global GUID counter for objects + guid: 1, + + // Bind a function to a context, optionally partially applying any + // arguments. + proxy: function( fn, context ) { + var args, proxy, tmp; + + if ( typeof context === "string" ) { + tmp = fn[ context ]; + context = fn; + fn = tmp; + } + + // Quick check to determine if target is callable, in the spec + // this throws a TypeError, but we will just return undefined. + if ( !jQuery.isFunction( fn ) ) { + return undefined; + } + + // Simulated bind + args = slice.call( arguments, 2 ); + proxy = function() { + return fn.apply( context || this, args.concat( slice.call( arguments ) ) ); + }; + + // Set the guid of unique handler to the same of original handler, so it can be removed + proxy.guid = fn.guid = fn.guid || jQuery.guid++; + + return proxy; + }, + + now: function() { + return +( new Date() ); + }, + + // jQuery.support is not used in Core but other projects attach their + // properties to it so it needs to exist. + support: support +}); + +// Populate the class2type map +jQuery.each("Boolean Number String Function Array Date RegExp Object Error".split(" "), function(i, name) { + class2type[ "[object " + name + "]" ] = name.toLowerCase(); +}); + +function isArraylike( obj ) { + var length = obj.length, + type = jQuery.type( obj ); + + if ( type === "function" || jQuery.isWindow( obj ) ) { + return false; + } + + if ( obj.nodeType === 1 && length ) { + return true; + } + + return type === "array" || length === 0 || + typeof length === "number" && length > 0 && ( length - 1 ) in obj; +} +var Sizzle = +/*! + * Sizzle CSS Selector Engine v1.10.16 + * http://sizzlejs.com/ + * + * Copyright 2013 jQuery Foundation, Inc. and other contributors + * Released under the MIT license + * http://jquery.org/license + * + * Date: 2014-01-13 + */ +(function( window ) { + +var i, + support, + Expr, + getText, + isXML, + compile, + outermostContext, + sortInput, + hasDuplicate, + + // Local document vars + setDocument, + document, + docElem, + documentIsHTML, + rbuggyQSA, + rbuggyMatches, + matches, + contains, + + // Instance-specific data + expando = "sizzle" + -(new Date()), + preferredDoc = window.document, + dirruns = 0, + done = 0, + classCache = createCache(), + tokenCache = createCache(), + compilerCache = createCache(), + sortOrder = function( a, b ) { + if ( a === b ) { + hasDuplicate = true; + } + return 0; + }, + + // General-purpose constants + strundefined = typeof undefined, + MAX_NEGATIVE = 1 << 31, + + // Instance methods + hasOwn = ({}).hasOwnProperty, + arr = [], + pop = arr.pop, + push_native = arr.push, + push = arr.push, + slice = arr.slice, + // Use a stripped-down indexOf if we can't use a native one + indexOf = arr.indexOf || function( elem ) { + var i = 0, + len = this.length; + for ( ; i < len; i++ ) { + if ( this[i] === elem ) { + return i; + } + } + return -1; + }, + + booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped", + + // Regular expressions + + // Whitespace characters http://www.w3.org/TR/css3-selectors/#whitespace + whitespace = "[\\x20\\t\\r\\n\\f]", + // http://www.w3.org/TR/css3-syntax/#characters + characterEncoding = "(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+", + + // Loosely modeled on CSS identifier characters + // An unquoted value should be a CSS identifier http://www.w3.org/TR/css3-selectors/#attribute-selectors + // Proper syntax: http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier + identifier = characterEncoding.replace( "w", "w#" ), + + // Acceptable operators http://www.w3.org/TR/selectors/#attribute-selectors + attributes = "\\[" + whitespace + "*(" + characterEncoding + ")" + whitespace + + "*(?:([*^$|!~]?=)" + whitespace + "*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|(" + identifier + ")|)|)" + whitespace + "*\\]", + + // Prefer arguments quoted, + // then not containing pseudos/brackets, + // then attribute selectors/non-parenthetical expressions, + // then anything else + // These preferences are here to reduce the number of selectors + // needing tokenize in the PSEUDO preFilter + pseudos = ":(" + characterEncoding + ")(?:\\(((['\"])((?:\\\\.|[^\\\\])*?)\\3|((?:\\\\.|[^\\\\()[\\]]|" + attributes.replace( 3, 8 ) + ")*)|.*)\\)|)", + + // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter + rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + whitespace + "+$", "g" ), + + rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), + rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + "*" ), + + rattributeQuotes = new RegExp( "=" + whitespace + "*([^\\]'\"]*?)" + whitespace + "*\\]", "g" ), + + rpseudo = new RegExp( pseudos ), + ridentifier = new RegExp( "^" + identifier + "$" ), + + matchExpr = { + "ID": new RegExp( "^#(" + characterEncoding + ")" ), + "CLASS": new RegExp( "^\\.(" + characterEncoding + ")" ), + "TAG": new RegExp( "^(" + characterEncoding.replace( "w", "w*" ) + ")" ), + "ATTR": new RegExp( "^" + attributes ), + "PSEUDO": new RegExp( "^" + pseudos ), + "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + whitespace + + "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + whitespace + + "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), + "bool": new RegExp( "^(?:" + booleans + ")$", "i" ), + // For use in libraries implementing .is() + // We use this for POS matching in `select` + "needsContext": new RegExp( "^" + whitespace + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + + whitespace + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) + }, + + rinputs = /^(?:input|select|textarea|button)$/i, + rheader = /^h\d$/i, + + rnative = /^[^{]+\{\s*\[native \w/, + + // Easily-parseable/retrievable ID or TAG or CLASS selectors + rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, + + rsibling = /[+~]/, + rescape = /'|\\/g, + + // CSS escapes http://www.w3.org/TR/CSS21/syndata.html#escaped-characters + runescape = new RegExp( "\\\\([\\da-f]{1,6}" + whitespace + "?|(" + whitespace + ")|.)", "ig" ), + funescape = function( _, escaped, escapedWhitespace ) { + var high = "0x" + escaped - 0x10000; + // NaN means non-codepoint + // Support: Firefox + // Workaround erroneous numeric interpretation of +"0x" + return high !== high || escapedWhitespace ? + escaped : + high < 0 ? + // BMP codepoint + String.fromCharCode( high + 0x10000 ) : + // Supplemental Plane codepoint (surrogate pair) + String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 ); + }; + +// Optimize for push.apply( _, NodeList ) +try { + push.apply( + (arr = slice.call( preferredDoc.childNodes )), + preferredDoc.childNodes + ); + // Support: Android<4.0 + // Detect silently failing push.apply + arr[ preferredDoc.childNodes.length ].nodeType; +} catch ( e ) { + push = { apply: arr.length ? + + // Leverage slice if possible + function( target, els ) { + push_native.apply( target, slice.call(els) ); + } : + + // Support: IE<9 + // Otherwise append directly + function( target, els ) { + var j = target.length, + i = 0; + // Can't trust NodeList.length + while ( (target[j++] = els[i++]) ) {} + target.length = j - 1; + } + }; +} + +function Sizzle( selector, context, results, seed ) { + var match, elem, m, nodeType, + // QSA vars + i, groups, old, nid, newContext, newSelector; + + if ( ( context ? context.ownerDocument || context : preferredDoc ) !== document ) { + setDocument( context ); + } + + context = context || document; + results = results || []; + + if ( !selector || typeof selector !== "string" ) { + return results; + } + + if ( (nodeType = context.nodeType) !== 1 && nodeType !== 9 ) { + return []; + } + + if ( documentIsHTML && !seed ) { + + // Shortcuts + if ( (match = rquickExpr.exec( selector )) ) { + // Speed-up: Sizzle("#ID") + if ( (m = match[1]) ) { + if ( nodeType === 9 ) { + elem = context.getElementById( m ); + // Check parentNode to catch when Blackberry 4.6 returns + // nodes that are no longer in the document (jQuery #6963) + if ( elem && elem.parentNode ) { + // Handle the case where IE, Opera, and Webkit return items + // by name instead of ID + if ( elem.id === m ) { + results.push( elem ); + return results; + } + } else { + return results; + } + } else { + // Context is not a document + if ( context.ownerDocument && (elem = context.ownerDocument.getElementById( m )) && + contains( context, elem ) && elem.id === m ) { + results.push( elem ); + return results; + } + } + + // Speed-up: Sizzle("TAG") + } else if ( match[2] ) { + push.apply( results, context.getElementsByTagName( selector ) ); + return results; + + // Speed-up: Sizzle(".CLASS") + } else if ( (m = match[3]) && support.getElementsByClassName && context.getElementsByClassName ) { + push.apply( results, context.getElementsByClassName( m ) ); + return results; + } + } + + // QSA path + if ( support.qsa && (!rbuggyQSA || !rbuggyQSA.test( selector )) ) { + nid = old = expando; + newContext = context; + newSelector = nodeType === 9 && selector; + + // qSA works strangely on Element-rooted queries + // We can work around this by specifying an extra ID on the root + // and working up from there (Thanks to Andrew Dupont for the technique) + // IE 8 doesn't work on object elements + if ( nodeType === 1 && context.nodeName.toLowerCase() !== "object" ) { + groups = tokenize( selector ); + + if ( (old = context.getAttribute("id")) ) { + nid = old.replace( rescape, "\\$&" ); + } else { + context.setAttribute( "id", nid ); + } + nid = "[id='" + nid + "'] "; + + i = groups.length; + while ( i-- ) { + groups[i] = nid + toSelector( groups[i] ); + } + newContext = rsibling.test( selector ) && testContext( context.parentNode ) || context; + newSelector = groups.join(","); + } + + if ( newSelector ) { + try { + push.apply( results, + newContext.querySelectorAll( newSelector ) + ); + return results; + } catch(qsaError) { + } finally { + if ( !old ) { + context.removeAttribute("id"); + } + } + } + } + } + + // All others + return select( selector.replace( rtrim, "$1" ), context, results, seed ); +} + +/** + * Create key-value caches of limited size + * @returns {Function(string, Object)} Returns the Object data after storing it on itself with + * property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength) + * deleting the oldest entry + */ +function createCache() { + var keys = []; + + function cache( key, value ) { + // Use (key + " ") to avoid collision with native prototype properties (see Issue #157) + if ( keys.push( key + " " ) > Expr.cacheLength ) { + // Only keep the most recent entries + delete cache[ keys.shift() ]; + } + return (cache[ key + " " ] = value); + } + return cache; +} + +/** + * Mark a function for special use by Sizzle + * @param {Function} fn The function to mark + */ +function markFunction( fn ) { + fn[ expando ] = true; + return fn; +} + +/** + * Support testing using an element + * @param {Function} fn Passed the created div and expects a boolean result + */ +function assert( fn ) { + var div = document.createElement("div"); + + try { + return !!fn( div ); + } catch (e) { + return false; + } finally { + // Remove from its parent by default + if ( div.parentNode ) { + div.parentNode.removeChild( div ); + } + // release memory in IE + div = null; + } +} + +/** + * Adds the same handler for all of the specified attrs + * @param {String} attrs Pipe-separated list of attributes + * @param {Function} handler The method that will be applied + */ +function addHandle( attrs, handler ) { + var arr = attrs.split("|"), + i = attrs.length; + + while ( i-- ) { + Expr.attrHandle[ arr[i] ] = handler; + } +} + +/** + * Checks document order of two siblings + * @param {Element} a + * @param {Element} b + * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b + */ +function siblingCheck( a, b ) { + var cur = b && a, + diff = cur && a.nodeType === 1 && b.nodeType === 1 && + ( ~b.sourceIndex || MAX_NEGATIVE ) - + ( ~a.sourceIndex || MAX_NEGATIVE ); + + // Use IE sourceIndex if available on both nodes + if ( diff ) { + return diff; + } + + // Check if b follows a + if ( cur ) { + while ( (cur = cur.nextSibling) ) { + if ( cur === b ) { + return -1; + } + } + } + + return a ? 1 : -1; +} + +/** + * Returns a function to use in pseudos for input types + * @param {String} type + */ +function createInputPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for buttons + * @param {String} type + */ +function createButtonPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return (name === "input" || name === "button") && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for positionals + * @param {Function} fn + */ +function createPositionalPseudo( fn ) { + return markFunction(function( argument ) { + argument = +argument; + return markFunction(function( seed, matches ) { + var j, + matchIndexes = fn( [], seed.length, argument ), + i = matchIndexes.length; + + // Match elements found at the specified indexes + while ( i-- ) { + if ( seed[ (j = matchIndexes[i]) ] ) { + seed[j] = !(matches[j] = seed[j]); + } + } + }); + }); +} + +/** + * Checks a node for validity as a Sizzle context + * @param {Element|Object=} context + * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value + */ +function testContext( context ) { + return context && typeof context.getElementsByTagName !== strundefined && context; +} + +// Expose support vars for convenience +support = Sizzle.support = {}; + +/** + * Detects XML nodes + * @param {Element|Object} elem An element or a document + * @returns {Boolean} True iff elem is a non-HTML XML node + */ +isXML = Sizzle.isXML = function( elem ) { + // documentElement is verified for cases where it doesn't yet exist + // (such as loading iframes in IE - #4833) + var documentElement = elem && (elem.ownerDocument || elem).documentElement; + return documentElement ? documentElement.nodeName !== "HTML" : false; +}; + +/** + * Sets document-related variables once based on the current document + * @param {Element|Object} [doc] An element or document object to use to set the document + * @returns {Object} Returns the current document + */ +setDocument = Sizzle.setDocument = function( node ) { + var hasCompare, + doc = node ? node.ownerDocument || node : preferredDoc, + parent = doc.defaultView; + + // If no document and documentElement is available, return + if ( doc === document || doc.nodeType !== 9 || !doc.documentElement ) { + return document; + } + + // Set our document + document = doc; + docElem = doc.documentElement; + + // Support tests + documentIsHTML = !isXML( doc ); + + // Support: IE>8 + // If iframe document is assigned to "document" variable and if iframe has been reloaded, + // IE will throw "permission denied" error when accessing "document" variable, see jQuery #13936 + // IE6-8 do not support the defaultView property so parent will be undefined + if ( parent && parent !== parent.top ) { + // IE11 does not have attachEvent, so all must suffer + if ( parent.addEventListener ) { + parent.addEventListener( "unload", function() { + setDocument(); + }, false ); + } else if ( parent.attachEvent ) { + parent.attachEvent( "onunload", function() { + setDocument(); + }); + } + } + + /* Attributes + ---------------------------------------------------------------------- */ + + // Support: IE<8 + // Verify that getAttribute really returns attributes and not properties (excepting IE8 booleans) + support.attributes = assert(function( div ) { + div.className = "i"; + return !div.getAttribute("className"); + }); + + /* getElement(s)By* + ---------------------------------------------------------------------- */ + + // Check if getElementsByTagName("*") returns only elements + support.getElementsByTagName = assert(function( div ) { + div.appendChild( doc.createComment("") ); + return !div.getElementsByTagName("*").length; + }); + + // Check if getElementsByClassName can be trusted + support.getElementsByClassName = rnative.test( doc.getElementsByClassName ) && assert(function( div ) { + div.innerHTML = "
"; + + // Support: Safari<4 + // Catch class over-caching + div.firstChild.className = "i"; + // Support: Opera<10 + // Catch gEBCN failure to find non-leading classes + return div.getElementsByClassName("i").length === 2; + }); + + // Support: IE<10 + // Check if getElementById returns elements by name + // The broken getElementById methods don't pick up programatically-set names, + // so use a roundabout getElementsByName test + support.getById = assert(function( div ) { + docElem.appendChild( div ).id = expando; + return !doc.getElementsByName || !doc.getElementsByName( expando ).length; + }); + + // ID find and filter + if ( support.getById ) { + Expr.find["ID"] = function( id, context ) { + if ( typeof context.getElementById !== strundefined && documentIsHTML ) { + var m = context.getElementById( id ); + // Check parentNode to catch when Blackberry 4.6 returns + // nodes that are no longer in the document #6963 + return m && m.parentNode ? [m] : []; + } + }; + Expr.filter["ID"] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + return elem.getAttribute("id") === attrId; + }; + }; + } else { + // Support: IE6/7 + // getElementById is not reliable as a find shortcut + delete Expr.find["ID"]; + + Expr.filter["ID"] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + var node = typeof elem.getAttributeNode !== strundefined && elem.getAttributeNode("id"); + return node && node.value === attrId; + }; + }; + } + + // Tag + Expr.find["TAG"] = support.getElementsByTagName ? + function( tag, context ) { + if ( typeof context.getElementsByTagName !== strundefined ) { + return context.getElementsByTagName( tag ); + } + } : + function( tag, context ) { + var elem, + tmp = [], + i = 0, + results = context.getElementsByTagName( tag ); + + // Filter out possible comments + if ( tag === "*" ) { + while ( (elem = results[i++]) ) { + if ( elem.nodeType === 1 ) { + tmp.push( elem ); + } + } + + return tmp; + } + return results; + }; + + // Class + Expr.find["CLASS"] = support.getElementsByClassName && function( className, context ) { + if ( typeof context.getElementsByClassName !== strundefined && documentIsHTML ) { + return context.getElementsByClassName( className ); + } + }; + + /* QSA/matchesSelector + ---------------------------------------------------------------------- */ + + // QSA and matchesSelector support + + // matchesSelector(:active) reports false when true (IE9/Opera 11.5) + rbuggyMatches = []; + + // qSa(:focus) reports false when true (Chrome 21) + // We allow this because of a bug in IE8/9 that throws an error + // whenever `document.activeElement` is accessed on an iframe + // So, we allow :focus to pass through QSA all the time to avoid the IE error + // See http://bugs.jquery.com/ticket/13378 + rbuggyQSA = []; + + if ( (support.qsa = rnative.test( doc.querySelectorAll )) ) { + // Build QSA regex + // Regex strategy adopted from Diego Perini + assert(function( div ) { + // Select is set to empty string on purpose + // This is to test IE's treatment of not explicitly + // setting a boolean content attribute, + // since its presence should be enough + // http://bugs.jquery.com/ticket/12359 + div.innerHTML = ""; + + // Support: IE8, Opera 10-12 + // Nothing should be selected when empty strings follow ^= or $= or *= + if ( div.querySelectorAll("[t^='']").length ) { + rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" ); + } + + // Support: IE8 + // Boolean attributes and "value" are not treated correctly + if ( !div.querySelectorAll("[selected]").length ) { + rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" ); + } + + // Webkit/Opera - :checked should return selected option elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + // IE8 throws error here and will not see later tests + if ( !div.querySelectorAll(":checked").length ) { + rbuggyQSA.push(":checked"); + } + }); + + assert(function( div ) { + // Support: Windows 8 Native Apps + // The type and name attributes are restricted during .innerHTML assignment + var input = doc.createElement("input"); + input.setAttribute( "type", "hidden" ); + div.appendChild( input ).setAttribute( "name", "D" ); + + // Support: IE8 + // Enforce case-sensitivity of name attribute + if ( div.querySelectorAll("[name=d]").length ) { + rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" ); + } + + // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) + // IE8 throws error here and will not see later tests + if ( !div.querySelectorAll(":enabled").length ) { + rbuggyQSA.push( ":enabled", ":disabled" ); + } + + // Opera 10-11 does not throw on post-comma invalid pseudos + div.querySelectorAll("*,:x"); + rbuggyQSA.push(",.*:"); + }); + } + + if ( (support.matchesSelector = rnative.test( (matches = docElem.webkitMatchesSelector || + docElem.mozMatchesSelector || + docElem.oMatchesSelector || + docElem.msMatchesSelector) )) ) { + + assert(function( div ) { + // Check to see if it's possible to do matchesSelector + // on a disconnected node (IE 9) + support.disconnectedMatch = matches.call( div, "div" ); + + // This should fail with an exception + // Gecko does not error, returns false instead + matches.call( div, "[s!='']:x" ); + rbuggyMatches.push( "!=", pseudos ); + }); + } + + rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join("|") ); + rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join("|") ); + + /* Contains + ---------------------------------------------------------------------- */ + hasCompare = rnative.test( docElem.compareDocumentPosition ); + + // Element contains another + // Purposefully does not implement inclusive descendent + // As in, an element does not contain itself + contains = hasCompare || rnative.test( docElem.contains ) ? + function( a, b ) { + var adown = a.nodeType === 9 ? a.documentElement : a, + bup = b && b.parentNode; + return a === bup || !!( bup && bup.nodeType === 1 && ( + adown.contains ? + adown.contains( bup ) : + a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16 + )); + } : + function( a, b ) { + if ( b ) { + while ( (b = b.parentNode) ) { + if ( b === a ) { + return true; + } + } + } + return false; + }; + + /* Sorting + ---------------------------------------------------------------------- */ + + // Document order sorting + sortOrder = hasCompare ? + function( a, b ) { + + // Flag for duplicate removal + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + // Sort on method existence if only one input has compareDocumentPosition + var compare = !a.compareDocumentPosition - !b.compareDocumentPosition; + if ( compare ) { + return compare; + } + + // Calculate position if both inputs belong to the same document + compare = ( a.ownerDocument || a ) === ( b.ownerDocument || b ) ? + a.compareDocumentPosition( b ) : + + // Otherwise we know they are disconnected + 1; + + // Disconnected nodes + if ( compare & 1 || + (!support.sortDetached && b.compareDocumentPosition( a ) === compare) ) { + + // Choose the first element that is related to our preferred document + if ( a === doc || a.ownerDocument === preferredDoc && contains(preferredDoc, a) ) { + return -1; + } + if ( b === doc || b.ownerDocument === preferredDoc && contains(preferredDoc, b) ) { + return 1; + } + + // Maintain original order + return sortInput ? + ( indexOf.call( sortInput, a ) - indexOf.call( sortInput, b ) ) : + 0; + } + + return compare & 4 ? -1 : 1; + } : + function( a, b ) { + // Exit early if the nodes are identical + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + var cur, + i = 0, + aup = a.parentNode, + bup = b.parentNode, + ap = [ a ], + bp = [ b ]; + + // Parentless nodes are either documents or disconnected + if ( !aup || !bup ) { + return a === doc ? -1 : + b === doc ? 1 : + aup ? -1 : + bup ? 1 : + sortInput ? + ( indexOf.call( sortInput, a ) - indexOf.call( sortInput, b ) ) : + 0; + + // If the nodes are siblings, we can do a quick check + } else if ( aup === bup ) { + return siblingCheck( a, b ); + } + + // Otherwise we need full lists of their ancestors for comparison + cur = a; + while ( (cur = cur.parentNode) ) { + ap.unshift( cur ); + } + cur = b; + while ( (cur = cur.parentNode) ) { + bp.unshift( cur ); + } + + // Walk down the tree looking for a discrepancy + while ( ap[i] === bp[i] ) { + i++; + } + + return i ? + // Do a sibling check if the nodes have a common ancestor + siblingCheck( ap[i], bp[i] ) : + + // Otherwise nodes in our document sort first + ap[i] === preferredDoc ? -1 : + bp[i] === preferredDoc ? 1 : + 0; + }; + + return doc; +}; + +Sizzle.matches = function( expr, elements ) { + return Sizzle( expr, null, null, elements ); +}; + +Sizzle.matchesSelector = function( elem, expr ) { + // Set document vars if needed + if ( ( elem.ownerDocument || elem ) !== document ) { + setDocument( elem ); + } + + // Make sure that attribute selectors are quoted + expr = expr.replace( rattributeQuotes, "='$1']" ); + + if ( support.matchesSelector && documentIsHTML && + ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) && + ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) { + + try { + var ret = matches.call( elem, expr ); + + // IE 9's matchesSelector returns false on disconnected nodes + if ( ret || support.disconnectedMatch || + // As well, disconnected nodes are said to be in a document + // fragment in IE 9 + elem.document && elem.document.nodeType !== 11 ) { + return ret; + } + } catch(e) {} + } + + return Sizzle( expr, document, null, [elem] ).length > 0; +}; + +Sizzle.contains = function( context, elem ) { + // Set document vars if needed + if ( ( context.ownerDocument || context ) !== document ) { + setDocument( context ); + } + return contains( context, elem ); +}; + +Sizzle.attr = function( elem, name ) { + // Set document vars if needed + if ( ( elem.ownerDocument || elem ) !== document ) { + setDocument( elem ); + } + + var fn = Expr.attrHandle[ name.toLowerCase() ], + // Don't get fooled by Object.prototype properties (jQuery #13807) + val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ? + fn( elem, name, !documentIsHTML ) : + undefined; + + return val !== undefined ? + val : + support.attributes || !documentIsHTML ? + elem.getAttribute( name ) : + (val = elem.getAttributeNode(name)) && val.specified ? + val.value : + null; +}; + +Sizzle.error = function( msg ) { + throw new Error( "Syntax error, unrecognized expression: " + msg ); +}; + +/** + * Document sorting and removing duplicates + * @param {ArrayLike} results + */ +Sizzle.uniqueSort = function( results ) { + var elem, + duplicates = [], + j = 0, + i = 0; + + // Unless we *know* we can detect duplicates, assume their presence + hasDuplicate = !support.detectDuplicates; + sortInput = !support.sortStable && results.slice( 0 ); + results.sort( sortOrder ); + + if ( hasDuplicate ) { + while ( (elem = results[i++]) ) { + if ( elem === results[ i ] ) { + j = duplicates.push( i ); + } + } + while ( j-- ) { + results.splice( duplicates[ j ], 1 ); + } + } + + // Clear input after sorting to release objects + // See https://github.com/jquery/sizzle/pull/225 + sortInput = null; + + return results; +}; + +/** + * Utility function for retrieving the text value of an array of DOM nodes + * @param {Array|Element} elem + */ +getText = Sizzle.getText = function( elem ) { + var node, + ret = "", + i = 0, + nodeType = elem.nodeType; + + if ( !nodeType ) { + // If no nodeType, this is expected to be an array + while ( (node = elem[i++]) ) { + // Do not traverse comment nodes + ret += getText( node ); + } + } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { + // Use textContent for elements + // innerText usage removed for consistency of new lines (jQuery #11153) + if ( typeof elem.textContent === "string" ) { + return elem.textContent; + } else { + // Traverse its children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + ret += getText( elem ); + } + } + } else if ( nodeType === 3 || nodeType === 4 ) { + return elem.nodeValue; + } + // Do not include comment or processing instruction nodes + + return ret; +}; + +Expr = Sizzle.selectors = { + + // Can be adjusted by the user + cacheLength: 50, + + createPseudo: markFunction, + + match: matchExpr, + + attrHandle: {}, + + find: {}, + + relative: { + ">": { dir: "parentNode", first: true }, + " ": { dir: "parentNode" }, + "+": { dir: "previousSibling", first: true }, + "~": { dir: "previousSibling" } + }, + + preFilter: { + "ATTR": function( match ) { + match[1] = match[1].replace( runescape, funescape ); + + // Move the given value to match[3] whether quoted or unquoted + match[3] = ( match[4] || match[5] || "" ).replace( runescape, funescape ); + + if ( match[2] === "~=" ) { + match[3] = " " + match[3] + " "; + } + + return match.slice( 0, 4 ); + }, + + "CHILD": function( match ) { + /* matches from matchExpr["CHILD"] + 1 type (only|nth|...) + 2 what (child|of-type) + 3 argument (even|odd|\d*|\d*n([+-]\d+)?|...) + 4 xn-component of xn+y argument ([+-]?\d*n|) + 5 sign of xn-component + 6 x of xn-component + 7 sign of y-component + 8 y of y-component + */ + match[1] = match[1].toLowerCase(); + + if ( match[1].slice( 0, 3 ) === "nth" ) { + // nth-* requires argument + if ( !match[3] ) { + Sizzle.error( match[0] ); + } + + // numeric x and y parameters for Expr.filter.CHILD + // remember that false/true cast respectively to 0/1 + match[4] = +( match[4] ? match[5] + (match[6] || 1) : 2 * ( match[3] === "even" || match[3] === "odd" ) ); + match[5] = +( ( match[7] + match[8] ) || match[3] === "odd" ); + + // other types prohibit arguments + } else if ( match[3] ) { + Sizzle.error( match[0] ); + } + + return match; + }, + + "PSEUDO": function( match ) { + var excess, + unquoted = !match[5] && match[2]; + + if ( matchExpr["CHILD"].test( match[0] ) ) { + return null; + } + + // Accept quoted arguments as-is + if ( match[3] && match[4] !== undefined ) { + match[2] = match[4]; + + // Strip excess characters from unquoted arguments + } else if ( unquoted && rpseudo.test( unquoted ) && + // Get excess from tokenize (recursively) + (excess = tokenize( unquoted, true )) && + // advance to the next closing parenthesis + (excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length) ) { + + // excess is a negative index + match[0] = match[0].slice( 0, excess ); + match[2] = unquoted.slice( 0, excess ); + } + + // Return only captures needed by the pseudo filter method (type and argument) + return match.slice( 0, 3 ); + } + }, + + filter: { + + "TAG": function( nodeNameSelector ) { + var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase(); + return nodeNameSelector === "*" ? + function() { return true; } : + function( elem ) { + return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; + }; + }, + + "CLASS": function( className ) { + var pattern = classCache[ className + " " ]; + + return pattern || + (pattern = new RegExp( "(^|" + whitespace + ")" + className + "(" + whitespace + "|$)" )) && + classCache( className, function( elem ) { + return pattern.test( typeof elem.className === "string" && elem.className || typeof elem.getAttribute !== strundefined && elem.getAttribute("class") || "" ); + }); + }, + + "ATTR": function( name, operator, check ) { + return function( elem ) { + var result = Sizzle.attr( elem, name ); + + if ( result == null ) { + return operator === "!="; + } + if ( !operator ) { + return true; + } + + result += ""; + + return operator === "=" ? result === check : + operator === "!=" ? result !== check : + operator === "^=" ? check && result.indexOf( check ) === 0 : + operator === "*=" ? check && result.indexOf( check ) > -1 : + operator === "$=" ? check && result.slice( -check.length ) === check : + operator === "~=" ? ( " " + result + " " ).indexOf( check ) > -1 : + operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" : + false; + }; + }, + + "CHILD": function( type, what, argument, first, last ) { + var simple = type.slice( 0, 3 ) !== "nth", + forward = type.slice( -4 ) !== "last", + ofType = what === "of-type"; + + return first === 1 && last === 0 ? + + // Shortcut for :nth-*(n) + function( elem ) { + return !!elem.parentNode; + } : + + function( elem, context, xml ) { + var cache, outerCache, node, diff, nodeIndex, start, + dir = simple !== forward ? "nextSibling" : "previousSibling", + parent = elem.parentNode, + name = ofType && elem.nodeName.toLowerCase(), + useCache = !xml && !ofType; + + if ( parent ) { + + // :(first|last|only)-(child|of-type) + if ( simple ) { + while ( dir ) { + node = elem; + while ( (node = node[ dir ]) ) { + if ( ofType ? node.nodeName.toLowerCase() === name : node.nodeType === 1 ) { + return false; + } + } + // Reverse direction for :only-* (if we haven't yet done so) + start = dir = type === "only" && !start && "nextSibling"; + } + return true; + } + + start = [ forward ? parent.firstChild : parent.lastChild ]; + + // non-xml :nth-child(...) stores cache data on `parent` + if ( forward && useCache ) { + // Seek `elem` from a previously-cached index + outerCache = parent[ expando ] || (parent[ expando ] = {}); + cache = outerCache[ type ] || []; + nodeIndex = cache[0] === dirruns && cache[1]; + diff = cache[0] === dirruns && cache[2]; + node = nodeIndex && parent.childNodes[ nodeIndex ]; + + while ( (node = ++nodeIndex && node && node[ dir ] || + + // Fallback to seeking `elem` from the start + (diff = nodeIndex = 0) || start.pop()) ) { + + // When found, cache indexes on `parent` and break + if ( node.nodeType === 1 && ++diff && node === elem ) { + outerCache[ type ] = [ dirruns, nodeIndex, diff ]; + break; + } + } + + // Use previously-cached element index if available + } else if ( useCache && (cache = (elem[ expando ] || (elem[ expando ] = {}))[ type ]) && cache[0] === dirruns ) { + diff = cache[1]; + + // xml :nth-child(...) or :nth-last-child(...) or :nth(-last)?-of-type(...) + } else { + // Use the same loop as above to seek `elem` from the start + while ( (node = ++nodeIndex && node && node[ dir ] || + (diff = nodeIndex = 0) || start.pop()) ) { + + if ( ( ofType ? node.nodeName.toLowerCase() === name : node.nodeType === 1 ) && ++diff ) { + // Cache the index of each encountered element + if ( useCache ) { + (node[ expando ] || (node[ expando ] = {}))[ type ] = [ dirruns, diff ]; + } + + if ( node === elem ) { + break; + } + } + } + } + + // Incorporate the offset, then check against cycle size + diff -= last; + return diff === first || ( diff % first === 0 && diff / first >= 0 ); + } + }; + }, + + "PSEUDO": function( pseudo, argument ) { + // pseudo-class names are case-insensitive + // http://www.w3.org/TR/selectors/#pseudo-classes + // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters + // Remember that setFilters inherits from pseudos + var args, + fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || + Sizzle.error( "unsupported pseudo: " + pseudo ); + + // The user may use createPseudo to indicate that + // arguments are needed to create the filter function + // just as Sizzle does + if ( fn[ expando ] ) { + return fn( argument ); + } + + // But maintain support for old signatures + if ( fn.length > 1 ) { + args = [ pseudo, pseudo, "", argument ]; + return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? + markFunction(function( seed, matches ) { + var idx, + matched = fn( seed, argument ), + i = matched.length; + while ( i-- ) { + idx = indexOf.call( seed, matched[i] ); + seed[ idx ] = !( matches[ idx ] = matched[i] ); + } + }) : + function( elem ) { + return fn( elem, 0, args ); + }; + } + + return fn; + } + }, + + pseudos: { + // Potentially complex pseudos + "not": markFunction(function( selector ) { + // Trim the selector passed to compile + // to avoid treating leading and trailing + // spaces as combinators + var input = [], + results = [], + matcher = compile( selector.replace( rtrim, "$1" ) ); + + return matcher[ expando ] ? + markFunction(function( seed, matches, context, xml ) { + var elem, + unmatched = matcher( seed, null, xml, [] ), + i = seed.length; + + // Match elements unmatched by `matcher` + while ( i-- ) { + if ( (elem = unmatched[i]) ) { + seed[i] = !(matches[i] = elem); + } + } + }) : + function( elem, context, xml ) { + input[0] = elem; + matcher( input, null, xml, results ); + return !results.pop(); + }; + }), + + "has": markFunction(function( selector ) { + return function( elem ) { + return Sizzle( selector, elem ).length > 0; + }; + }), + + "contains": markFunction(function( text ) { + return function( elem ) { + return ( elem.textContent || elem.innerText || getText( elem ) ).indexOf( text ) > -1; + }; + }), + + // "Whether an element is represented by a :lang() selector + // is based solely on the element's language value + // being equal to the identifier C, + // or beginning with the identifier C immediately followed by "-". + // The matching of C against the element's language value is performed case-insensitively. + // The identifier C does not have to be a valid language name." + // http://www.w3.org/TR/selectors/#lang-pseudo + "lang": markFunction( function( lang ) { + // lang value must be a valid identifier + if ( !ridentifier.test(lang || "") ) { + Sizzle.error( "unsupported lang: " + lang ); + } + lang = lang.replace( runescape, funescape ).toLowerCase(); + return function( elem ) { + var elemLang; + do { + if ( (elemLang = documentIsHTML ? + elem.lang : + elem.getAttribute("xml:lang") || elem.getAttribute("lang")) ) { + + elemLang = elemLang.toLowerCase(); + return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0; + } + } while ( (elem = elem.parentNode) && elem.nodeType === 1 ); + return false; + }; + }), + + // Miscellaneous + "target": function( elem ) { + var hash = window.location && window.location.hash; + return hash && hash.slice( 1 ) === elem.id; + }, + + "root": function( elem ) { + return elem === docElem; + }, + + "focus": function( elem ) { + return elem === document.activeElement && (!document.hasFocus || document.hasFocus()) && !!(elem.type || elem.href || ~elem.tabIndex); + }, + + // Boolean properties + "enabled": function( elem ) { + return elem.disabled === false; + }, + + "disabled": function( elem ) { + return elem.disabled === true; + }, + + "checked": function( elem ) { + // In CSS3, :checked should return both checked and selected elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + var nodeName = elem.nodeName.toLowerCase(); + return (nodeName === "input" && !!elem.checked) || (nodeName === "option" && !!elem.selected); + }, + + "selected": function( elem ) { + // Accessing this property makes selected-by-default + // options in Safari work properly + if ( elem.parentNode ) { + elem.parentNode.selectedIndex; + } + + return elem.selected === true; + }, + + // Contents + "empty": function( elem ) { + // http://www.w3.org/TR/selectors/#empty-pseudo + // :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5), + // but not by others (comment: 8; processing instruction: 7; etc.) + // nodeType < 6 works because attributes (2) do not appear as children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + if ( elem.nodeType < 6 ) { + return false; + } + } + return true; + }, + + "parent": function( elem ) { + return !Expr.pseudos["empty"]( elem ); + }, + + // Element/input types + "header": function( elem ) { + return rheader.test( elem.nodeName ); + }, + + "input": function( elem ) { + return rinputs.test( elem.nodeName ); + }, + + "button": function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === "button" || name === "button"; + }, + + "text": function( elem ) { + var attr; + return elem.nodeName.toLowerCase() === "input" && + elem.type === "text" && + + // Support: IE<8 + // New HTML5 attribute values (e.g., "search") appear with elem.type === "text" + ( (attr = elem.getAttribute("type")) == null || attr.toLowerCase() === "text" ); + }, + + // Position-in-collection + "first": createPositionalPseudo(function() { + return [ 0 ]; + }), + + "last": createPositionalPseudo(function( matchIndexes, length ) { + return [ length - 1 ]; + }), + + "eq": createPositionalPseudo(function( matchIndexes, length, argument ) { + return [ argument < 0 ? argument + length : argument ]; + }), + + "even": createPositionalPseudo(function( matchIndexes, length ) { + var i = 0; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + }), + + "odd": createPositionalPseudo(function( matchIndexes, length ) { + var i = 1; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + }), + + "lt": createPositionalPseudo(function( matchIndexes, length, argument ) { + var i = argument < 0 ? argument + length : argument; + for ( ; --i >= 0; ) { + matchIndexes.push( i ); + } + return matchIndexes; + }), + + "gt": createPositionalPseudo(function( matchIndexes, length, argument ) { + var i = argument < 0 ? argument + length : argument; + for ( ; ++i < length; ) { + matchIndexes.push( i ); + } + return matchIndexes; + }) + } +}; + +Expr.pseudos["nth"] = Expr.pseudos["eq"]; + +// Add button/input type pseudos +for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) { + Expr.pseudos[ i ] = createInputPseudo( i ); +} +for ( i in { submit: true, reset: true } ) { + Expr.pseudos[ i ] = createButtonPseudo( i ); +} + +// Easy API for creating new setFilters +function setFilters() {} +setFilters.prototype = Expr.filters = Expr.pseudos; +Expr.setFilters = new setFilters(); + +function tokenize( selector, parseOnly ) { + var matched, match, tokens, type, + soFar, groups, preFilters, + cached = tokenCache[ selector + " " ]; + + if ( cached ) { + return parseOnly ? 0 : cached.slice( 0 ); + } + + soFar = selector; + groups = []; + preFilters = Expr.preFilter; + + while ( soFar ) { + + // Comma and first run + if ( !matched || (match = rcomma.exec( soFar )) ) { + if ( match ) { + // Don't consume trailing commas as valid + soFar = soFar.slice( match[0].length ) || soFar; + } + groups.push( (tokens = []) ); + } + + matched = false; + + // Combinators + if ( (match = rcombinators.exec( soFar )) ) { + matched = match.shift(); + tokens.push({ + value: matched, + // Cast descendant combinators to space + type: match[0].replace( rtrim, " " ) + }); + soFar = soFar.slice( matched.length ); + } + + // Filters + for ( type in Expr.filter ) { + if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] || + (match = preFilters[ type ]( match ))) ) { + matched = match.shift(); + tokens.push({ + value: matched, + type: type, + matches: match + }); + soFar = soFar.slice( matched.length ); + } + } + + if ( !matched ) { + break; + } + } + + // Return the length of the invalid excess + // if we're just parsing + // Otherwise, throw an error or return tokens + return parseOnly ? + soFar.length : + soFar ? + Sizzle.error( selector ) : + // Cache the tokens + tokenCache( selector, groups ).slice( 0 ); +} + +function toSelector( tokens ) { + var i = 0, + len = tokens.length, + selector = ""; + for ( ; i < len; i++ ) { + selector += tokens[i].value; + } + return selector; +} + +function addCombinator( matcher, combinator, base ) { + var dir = combinator.dir, + checkNonElements = base && dir === "parentNode", + doneName = done++; + + return combinator.first ? + // Check against closest ancestor/preceding element + function( elem, context, xml ) { + while ( (elem = elem[ dir ]) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + return matcher( elem, context, xml ); + } + } + } : + + // Check against all ancestor/preceding elements + function( elem, context, xml ) { + var oldCache, outerCache, + newCache = [ dirruns, doneName ]; + + // We can't set arbitrary data on XML nodes, so they don't benefit from dir caching + if ( xml ) { + while ( (elem = elem[ dir ]) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + if ( matcher( elem, context, xml ) ) { + return true; + } + } + } + } else { + while ( (elem = elem[ dir ]) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + outerCache = elem[ expando ] || (elem[ expando ] = {}); + if ( (oldCache = outerCache[ dir ]) && + oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) { + + // Assign to newCache so results back-propagate to previous elements + return (newCache[ 2 ] = oldCache[ 2 ]); + } else { + // Reuse newcache so results back-propagate to previous elements + outerCache[ dir ] = newCache; + + // A match means we're done; a fail means we have to keep checking + if ( (newCache[ 2 ] = matcher( elem, context, xml )) ) { + return true; + } + } + } + } + } + }; +} + +function elementMatcher( matchers ) { + return matchers.length > 1 ? + function( elem, context, xml ) { + var i = matchers.length; + while ( i-- ) { + if ( !matchers[i]( elem, context, xml ) ) { + return false; + } + } + return true; + } : + matchers[0]; +} + +function condense( unmatched, map, filter, context, xml ) { + var elem, + newUnmatched = [], + i = 0, + len = unmatched.length, + mapped = map != null; + + for ( ; i < len; i++ ) { + if ( (elem = unmatched[i]) ) { + if ( !filter || filter( elem, context, xml ) ) { + newUnmatched.push( elem ); + if ( mapped ) { + map.push( i ); + } + } + } + } + + return newUnmatched; +} + +function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { + if ( postFilter && !postFilter[ expando ] ) { + postFilter = setMatcher( postFilter ); + } + if ( postFinder && !postFinder[ expando ] ) { + postFinder = setMatcher( postFinder, postSelector ); + } + return markFunction(function( seed, results, context, xml ) { + var temp, i, elem, + preMap = [], + postMap = [], + preexisting = results.length, + + // Get initial elements from seed or context + elems = seed || multipleContexts( selector || "*", context.nodeType ? [ context ] : context, [] ), + + // Prefilter to get matcher input, preserving a map for seed-results synchronization + matcherIn = preFilter && ( seed || !selector ) ? + condense( elems, preMap, preFilter, context, xml ) : + elems, + + matcherOut = matcher ? + // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, + postFinder || ( seed ? preFilter : preexisting || postFilter ) ? + + // ...intermediate processing is necessary + [] : + + // ...otherwise use results directly + results : + matcherIn; + + // Find primary matches + if ( matcher ) { + matcher( matcherIn, matcherOut, context, xml ); + } + + // Apply postFilter + if ( postFilter ) { + temp = condense( matcherOut, postMap ); + postFilter( temp, [], context, xml ); + + // Un-match failing elements by moving them back to matcherIn + i = temp.length; + while ( i-- ) { + if ( (elem = temp[i]) ) { + matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem); + } + } + } + + if ( seed ) { + if ( postFinder || preFilter ) { + if ( postFinder ) { + // Get the final matcherOut by condensing this intermediate into postFinder contexts + temp = []; + i = matcherOut.length; + while ( i-- ) { + if ( (elem = matcherOut[i]) ) { + // Restore matcherIn since elem is not yet a final match + temp.push( (matcherIn[i] = elem) ); + } + } + postFinder( null, (matcherOut = []), temp, xml ); + } + + // Move matched elements from seed to results to keep them synchronized + i = matcherOut.length; + while ( i-- ) { + if ( (elem = matcherOut[i]) && + (temp = postFinder ? indexOf.call( seed, elem ) : preMap[i]) > -1 ) { + + seed[temp] = !(results[temp] = elem); + } + } + } + + // Add elements to results, through postFinder if defined + } else { + matcherOut = condense( + matcherOut === results ? + matcherOut.splice( preexisting, matcherOut.length ) : + matcherOut + ); + if ( postFinder ) { + postFinder( null, results, matcherOut, xml ); + } else { + push.apply( results, matcherOut ); + } + } + }); +} + +function matcherFromTokens( tokens ) { + var checkContext, matcher, j, + len = tokens.length, + leadingRelative = Expr.relative[ tokens[0].type ], + implicitRelative = leadingRelative || Expr.relative[" "], + i = leadingRelative ? 1 : 0, + + // The foundational matcher ensures that elements are reachable from top-level context(s) + matchContext = addCombinator( function( elem ) { + return elem === checkContext; + }, implicitRelative, true ), + matchAnyContext = addCombinator( function( elem ) { + return indexOf.call( checkContext, elem ) > -1; + }, implicitRelative, true ), + matchers = [ function( elem, context, xml ) { + return ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( + (checkContext = context).nodeType ? + matchContext( elem, context, xml ) : + matchAnyContext( elem, context, xml ) ); + } ]; + + for ( ; i < len; i++ ) { + if ( (matcher = Expr.relative[ tokens[i].type ]) ) { + matchers = [ addCombinator(elementMatcher( matchers ), matcher) ]; + } else { + matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches ); + + // Return special upon seeing a positional matcher + if ( matcher[ expando ] ) { + // Find the next relative operator (if any) for proper handling + j = ++i; + for ( ; j < len; j++ ) { + if ( Expr.relative[ tokens[j].type ] ) { + break; + } + } + return setMatcher( + i > 1 && elementMatcher( matchers ), + i > 1 && toSelector( + // If the preceding token was a descendant combinator, insert an implicit any-element `*` + tokens.slice( 0, i - 1 ).concat({ value: tokens[ i - 2 ].type === " " ? "*" : "" }) + ).replace( rtrim, "$1" ), + matcher, + i < j && matcherFromTokens( tokens.slice( i, j ) ), + j < len && matcherFromTokens( (tokens = tokens.slice( j )) ), + j < len && toSelector( tokens ) + ); + } + matchers.push( matcher ); + } + } + + return elementMatcher( matchers ); +} + +function matcherFromGroupMatchers( elementMatchers, setMatchers ) { + var bySet = setMatchers.length > 0, + byElement = elementMatchers.length > 0, + superMatcher = function( seed, context, xml, results, outermost ) { + var elem, j, matcher, + matchedCount = 0, + i = "0", + unmatched = seed && [], + setMatched = [], + contextBackup = outermostContext, + // We must always have either seed elements or outermost context + elems = seed || byElement && Expr.find["TAG"]( "*", outermost ), + // Use integer dirruns iff this is the outermost matcher + dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.random() || 0.1), + len = elems.length; + + if ( outermost ) { + outermostContext = context !== document && context; + } + + // Add elements passing elementMatchers directly to results + // Keep `i` a string if there are no elements so `matchedCount` will be "00" below + // Support: IE<9, Safari + // Tolerate NodeList properties (IE: "length"; Safari: ) matching elements by id + for ( ; i !== len && (elem = elems[i]) != null; i++ ) { + if ( byElement && elem ) { + j = 0; + while ( (matcher = elementMatchers[j++]) ) { + if ( matcher( elem, context, xml ) ) { + results.push( elem ); + break; + } + } + if ( outermost ) { + dirruns = dirrunsUnique; + } + } + + // Track unmatched elements for set filters + if ( bySet ) { + // They will have gone through all possible matchers + if ( (elem = !matcher && elem) ) { + matchedCount--; + } + + // Lengthen the array for every element, matched or not + if ( seed ) { + unmatched.push( elem ); + } + } + } + + // Apply set filters to unmatched elements + matchedCount += i; + if ( bySet && i !== matchedCount ) { + j = 0; + while ( (matcher = setMatchers[j++]) ) { + matcher( unmatched, setMatched, context, xml ); + } + + if ( seed ) { + // Reintegrate element matches to eliminate the need for sorting + if ( matchedCount > 0 ) { + while ( i-- ) { + if ( !(unmatched[i] || setMatched[i]) ) { + setMatched[i] = pop.call( results ); + } + } + } + + // Discard index placeholder values to get only actual matches + setMatched = condense( setMatched ); + } + + // Add matches to results + push.apply( results, setMatched ); + + // Seedless set matches succeeding multiple successful matchers stipulate sorting + if ( outermost && !seed && setMatched.length > 0 && + ( matchedCount + setMatchers.length ) > 1 ) { + + Sizzle.uniqueSort( results ); + } + } + + // Override manipulation of globals by nested matchers + if ( outermost ) { + dirruns = dirrunsUnique; + outermostContext = contextBackup; + } + + return unmatched; + }; + + return bySet ? + markFunction( superMatcher ) : + superMatcher; +} + +compile = Sizzle.compile = function( selector, group /* Internal Use Only */ ) { + var i, + setMatchers = [], + elementMatchers = [], + cached = compilerCache[ selector + " " ]; + + if ( !cached ) { + // Generate a function of recursive functions that can be used to check each element + if ( !group ) { + group = tokenize( selector ); + } + i = group.length; + while ( i-- ) { + cached = matcherFromTokens( group[i] ); + if ( cached[ expando ] ) { + setMatchers.push( cached ); + } else { + elementMatchers.push( cached ); + } + } + + // Cache the compiled function + cached = compilerCache( selector, matcherFromGroupMatchers( elementMatchers, setMatchers ) ); + } + return cached; +}; + +function multipleContexts( selector, contexts, results ) { + var i = 0, + len = contexts.length; + for ( ; i < len; i++ ) { + Sizzle( selector, contexts[i], results ); + } + return results; +} + +function select( selector, context, results, seed ) { + var i, tokens, token, type, find, + match = tokenize( selector ); + + if ( !seed ) { + // Try to minimize operations if there is only one group + if ( match.length === 1 ) { + + // Take a shortcut and set the context if the root selector is an ID + tokens = match[0] = match[0].slice( 0 ); + if ( tokens.length > 2 && (token = tokens[0]).type === "ID" && + support.getById && context.nodeType === 9 && documentIsHTML && + Expr.relative[ tokens[1].type ] ) { + + context = ( Expr.find["ID"]( token.matches[0].replace(runescape, funescape), context ) || [] )[0]; + if ( !context ) { + return results; + } + selector = selector.slice( tokens.shift().value.length ); + } + + // Fetch a seed set for right-to-left matching + i = matchExpr["needsContext"].test( selector ) ? 0 : tokens.length; + while ( i-- ) { + token = tokens[i]; + + // Abort if we hit a combinator + if ( Expr.relative[ (type = token.type) ] ) { + break; + } + if ( (find = Expr.find[ type ]) ) { + // Search, expanding context for leading sibling combinators + if ( (seed = find( + token.matches[0].replace( runescape, funescape ), + rsibling.test( tokens[0].type ) && testContext( context.parentNode ) || context + )) ) { + + // If seed is empty or no tokens remain, we can return early + tokens.splice( i, 1 ); + selector = seed.length && toSelector( tokens ); + if ( !selector ) { + push.apply( results, seed ); + return results; + } + + break; + } + } + } + } + } + + // Compile and execute a filtering function + // Provide `match` to avoid retokenization if we modified the selector above + compile( selector, match )( + seed, + context, + !documentIsHTML, + results, + rsibling.test( selector ) && testContext( context.parentNode ) || context + ); + return results; +} + +// One-time assignments + +// Sort stability +support.sortStable = expando.split("").sort( sortOrder ).join("") === expando; + +// Support: Chrome<14 +// Always assume duplicates if they aren't passed to the comparison function +support.detectDuplicates = !!hasDuplicate; + +// Initialize against the default document +setDocument(); + +// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27) +// Detached nodes confoundingly follow *each other* +support.sortDetached = assert(function( div1 ) { + // Should return 1, but returns 4 (following) + return div1.compareDocumentPosition( document.createElement("div") ) & 1; +}); + +// Support: IE<8 +// Prevent attribute/property "interpolation" +// http://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx +if ( !assert(function( div ) { + div.innerHTML = ""; + return div.firstChild.getAttribute("href") === "#" ; +}) ) { + addHandle( "type|href|height|width", function( elem, name, isXML ) { + if ( !isXML ) { + return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 ); + } + }); +} + +// Support: IE<9 +// Use defaultValue in place of getAttribute("value") +if ( !support.attributes || !assert(function( div ) { + div.innerHTML = ""; + div.firstChild.setAttribute( "value", "" ); + return div.firstChild.getAttribute( "value" ) === ""; +}) ) { + addHandle( "value", function( elem, name, isXML ) { + if ( !isXML && elem.nodeName.toLowerCase() === "input" ) { + return elem.defaultValue; + } + }); +} + +// Support: IE<9 +// Use getAttributeNode to fetch booleans when getAttribute lies +if ( !assert(function( div ) { + return div.getAttribute("disabled") == null; +}) ) { + addHandle( booleans, function( elem, name, isXML ) { + var val; + if ( !isXML ) { + return elem[ name ] === true ? name.toLowerCase() : + (val = elem.getAttributeNode( name )) && val.specified ? + val.value : + null; + } + }); +} + +return Sizzle; + +})( window ); + + + +jQuery.find = Sizzle; +jQuery.expr = Sizzle.selectors; +jQuery.expr[":"] = jQuery.expr.pseudos; +jQuery.unique = Sizzle.uniqueSort; +jQuery.text = Sizzle.getText; +jQuery.isXMLDoc = Sizzle.isXML; +jQuery.contains = Sizzle.contains; + + + +var rneedsContext = jQuery.expr.match.needsContext; + +var rsingleTag = (/^<(\w+)\s*\/?>(?:<\/\1>|)$/); + + + +var risSimple = /^.[^:#\[\.,]*$/; + +// Implement the identical functionality for filter and not +function winnow( elements, qualifier, not ) { + if ( jQuery.isFunction( qualifier ) ) { + return jQuery.grep( elements, function( elem, i ) { + /* jshint -W018 */ + return !!qualifier.call( elem, i, elem ) !== not; + }); + + } + + if ( qualifier.nodeType ) { + return jQuery.grep( elements, function( elem ) { + return ( elem === qualifier ) !== not; + }); + + } + + if ( typeof qualifier === "string" ) { + if ( risSimple.test( qualifier ) ) { + return jQuery.filter( qualifier, elements, not ); + } + + qualifier = jQuery.filter( qualifier, elements ); + } + + return jQuery.grep( elements, function( elem ) { + return ( jQuery.inArray( elem, qualifier ) >= 0 ) !== not; + }); +} + +jQuery.filter = function( expr, elems, not ) { + var elem = elems[ 0 ]; + + if ( not ) { + expr = ":not(" + expr + ")"; + } + + return elems.length === 1 && elem.nodeType === 1 ? + jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : [] : + jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) { + return elem.nodeType === 1; + })); +}; + +jQuery.fn.extend({ + find: function( selector ) { + var i, + ret = [], + self = this, + len = self.length; + + if ( typeof selector !== "string" ) { + return this.pushStack( jQuery( selector ).filter(function() { + for ( i = 0; i < len; i++ ) { + if ( jQuery.contains( self[ i ], this ) ) { + return true; + } + } + }) ); + } + + for ( i = 0; i < len; i++ ) { + jQuery.find( selector, self[ i ], ret ); + } + + // Needed because $( selector, context ) becomes $( context ).find( selector ) + ret = this.pushStack( len > 1 ? jQuery.unique( ret ) : ret ); + ret.selector = this.selector ? this.selector + " " + selector : selector; + return ret; + }, + filter: function( selector ) { + return this.pushStack( winnow(this, selector || [], false) ); + }, + not: function( selector ) { + return this.pushStack( winnow(this, selector || [], true) ); + }, + is: function( selector ) { + return !!winnow( + this, + + // If this is a positional/relative selector, check membership in the returned set + // so $("p:first").is("p:last") won't return true for a doc with two "p". + typeof selector === "string" && rneedsContext.test( selector ) ? + jQuery( selector ) : + selector || [], + false + ).length; + } +}); + + +// Initialize a jQuery object + + +// A central reference to the root jQuery(document) +var rootjQuery, + + // Use the correct document accordingly with window argument (sandbox) + document = window.document, + + // A simple way to check for HTML strings + // Prioritize #id over to avoid XSS via location.hash (#9521) + // Strict HTML recognition (#11290: must start with <) + rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/, + + init = jQuery.fn.init = function( selector, context ) { + var match, elem; + + // HANDLE: $(""), $(null), $(undefined), $(false) + if ( !selector ) { + return this; + } + + // Handle HTML strings + if ( typeof selector === "string" ) { + if ( selector.charAt(0) === "<" && selector.charAt( selector.length - 1 ) === ">" && selector.length >= 3 ) { + // Assume that strings that start and end with <> are HTML and skip the regex check + match = [ null, selector, null ]; + + } else { + match = rquickExpr.exec( selector ); + } + + // Match html or make sure no context is specified for #id + if ( match && (match[1] || !context) ) { + + // HANDLE: $(html) -> $(array) + if ( match[1] ) { + context = context instanceof jQuery ? context[0] : context; + + // scripts is true for back-compat + // Intentionally let the error be thrown if parseHTML is not present + jQuery.merge( this, jQuery.parseHTML( + match[1], + context && context.nodeType ? context.ownerDocument || context : document, + true + ) ); + + // HANDLE: $(html, props) + if ( rsingleTag.test( match[1] ) && jQuery.isPlainObject( context ) ) { + for ( match in context ) { + // Properties of context are called as methods if possible + if ( jQuery.isFunction( this[ match ] ) ) { + this[ match ]( context[ match ] ); + + // ...and otherwise set as attributes + } else { + this.attr( match, context[ match ] ); + } + } + } + + return this; + + // HANDLE: $(#id) + } else { + elem = document.getElementById( match[2] ); + + // Check parentNode to catch when Blackberry 4.6 returns + // nodes that are no longer in the document #6963 + if ( elem && elem.parentNode ) { + // Handle the case where IE and Opera return items + // by name instead of ID + if ( elem.id !== match[2] ) { + return rootjQuery.find( selector ); + } + + // Otherwise, we inject the element directly into the jQuery object + this.length = 1; + this[0] = elem; + } + + this.context = document; + this.selector = selector; + return this; + } + + // HANDLE: $(expr, $(...)) + } else if ( !context || context.jquery ) { + return ( context || rootjQuery ).find( selector ); + + // HANDLE: $(expr, context) + // (which is just equivalent to: $(context).find(expr) + } else { + return this.constructor( context ).find( selector ); + } + + // HANDLE: $(DOMElement) + } else if ( selector.nodeType ) { + this.context = this[0] = selector; + this.length = 1; + return this; + + // HANDLE: $(function) + // Shortcut for document ready + } else if ( jQuery.isFunction( selector ) ) { + return typeof rootjQuery.ready !== "undefined" ? + rootjQuery.ready( selector ) : + // Execute immediately if ready is not present + selector( jQuery ); + } + + if ( selector.selector !== undefined ) { + this.selector = selector.selector; + this.context = selector.context; + } + + return jQuery.makeArray( selector, this ); + }; + +// Give the init function the jQuery prototype for later instantiation +init.prototype = jQuery.fn; + +// Initialize central reference +rootjQuery = jQuery( document ); + + +var rparentsprev = /^(?:parents|prev(?:Until|All))/, + // methods guaranteed to produce a unique set when starting from a unique set + guaranteedUnique = { + children: true, + contents: true, + next: true, + prev: true + }; + +jQuery.extend({ + dir: function( elem, dir, until ) { + var matched = [], + cur = elem[ dir ]; + + while ( cur && cur.nodeType !== 9 && (until === undefined || cur.nodeType !== 1 || !jQuery( cur ).is( until )) ) { + if ( cur.nodeType === 1 ) { + matched.push( cur ); + } + cur = cur[dir]; + } + return matched; + }, + + sibling: function( n, elem ) { + var r = []; + + for ( ; n; n = n.nextSibling ) { + if ( n.nodeType === 1 && n !== elem ) { + r.push( n ); + } + } + + return r; + } +}); + +jQuery.fn.extend({ + has: function( target ) { + var i, + targets = jQuery( target, this ), + len = targets.length; + + return this.filter(function() { + for ( i = 0; i < len; i++ ) { + if ( jQuery.contains( this, targets[i] ) ) { + return true; + } + } + }); + }, + + closest: function( selectors, context ) { + var cur, + i = 0, + l = this.length, + matched = [], + pos = rneedsContext.test( selectors ) || typeof selectors !== "string" ? + jQuery( selectors, context || this.context ) : + 0; + + for ( ; i < l; i++ ) { + for ( cur = this[i]; cur && cur !== context; cur = cur.parentNode ) { + // Always skip document fragments + if ( cur.nodeType < 11 && (pos ? + pos.index(cur) > -1 : + + // Don't pass non-elements to Sizzle + cur.nodeType === 1 && + jQuery.find.matchesSelector(cur, selectors)) ) { + + matched.push( cur ); + break; + } + } + } + + return this.pushStack( matched.length > 1 ? jQuery.unique( matched ) : matched ); + }, + + // Determine the position of an element within + // the matched set of elements + index: function( elem ) { + + // No argument, return index in parent + if ( !elem ) { + return ( this[0] && this[0].parentNode ) ? this.first().prevAll().length : -1; + } + + // index in selector + if ( typeof elem === "string" ) { + return jQuery.inArray( this[0], jQuery( elem ) ); + } + + // Locate the position of the desired element + return jQuery.inArray( + // If it receives a jQuery object, the first element is used + elem.jquery ? elem[0] : elem, this ); + }, + + add: function( selector, context ) { + return this.pushStack( + jQuery.unique( + jQuery.merge( this.get(), jQuery( selector, context ) ) + ) + ); + }, + + addBack: function( selector ) { + return this.add( selector == null ? + this.prevObject : this.prevObject.filter(selector) + ); + } +}); + +function sibling( cur, dir ) { + do { + cur = cur[ dir ]; + } while ( cur && cur.nodeType !== 1 ); + + return cur; +} + +jQuery.each({ + parent: function( elem ) { + var parent = elem.parentNode; + return parent && parent.nodeType !== 11 ? parent : null; + }, + parents: function( elem ) { + return jQuery.dir( elem, "parentNode" ); + }, + parentsUntil: function( elem, i, until ) { + return jQuery.dir( elem, "parentNode", until ); + }, + next: function( elem ) { + return sibling( elem, "nextSibling" ); + }, + prev: function( elem ) { + return sibling( elem, "previousSibling" ); + }, + nextAll: function( elem ) { + return jQuery.dir( elem, "nextSibling" ); + }, + prevAll: function( elem ) { + return jQuery.dir( elem, "previousSibling" ); + }, + nextUntil: function( elem, i, until ) { + return jQuery.dir( elem, "nextSibling", until ); + }, + prevUntil: function( elem, i, until ) { + return jQuery.dir( elem, "previousSibling", until ); + }, + siblings: function( elem ) { + return jQuery.sibling( ( elem.parentNode || {} ).firstChild, elem ); + }, + children: function( elem ) { + return jQuery.sibling( elem.firstChild ); + }, + contents: function( elem ) { + return jQuery.nodeName( elem, "iframe" ) ? + elem.contentDocument || elem.contentWindow.document : + jQuery.merge( [], elem.childNodes ); + } +}, function( name, fn ) { + jQuery.fn[ name ] = function( until, selector ) { + var ret = jQuery.map( this, fn, until ); + + if ( name.slice( -5 ) !== "Until" ) { + selector = until; + } + + if ( selector && typeof selector === "string" ) { + ret = jQuery.filter( selector, ret ); + } + + if ( this.length > 1 ) { + // Remove duplicates + if ( !guaranteedUnique[ name ] ) { + ret = jQuery.unique( ret ); + } + + // Reverse order for parents* and prev-derivatives + if ( rparentsprev.test( name ) ) { + ret = ret.reverse(); + } + } + + return this.pushStack( ret ); + }; +}); +var rnotwhite = (/\S+/g); + + + +// String to Object options format cache +var optionsCache = {}; + +// Convert String-formatted options into Object-formatted ones and store in cache +function createOptions( options ) { + var object = optionsCache[ options ] = {}; + jQuery.each( options.match( rnotwhite ) || [], function( _, flag ) { + object[ flag ] = true; + }); + return object; +} + +/* + * Create a callback list using the following parameters: + * + * options: an optional list of space-separated options that will change how + * the callback list behaves or a more traditional option object + * + * By default a callback list will act like an event callback list and can be + * "fired" multiple times. + * + * Possible options: + * + * once: will ensure the callback list can only be fired once (like a Deferred) + * + * memory: will keep track of previous values and will call any callback added + * after the list has been fired right away with the latest "memorized" + * values (like a Deferred) + * + * unique: will ensure a callback can only be added once (no duplicate in the list) + * + * stopOnFalse: interrupt callings when a callback returns false + * + */ +jQuery.Callbacks = function( options ) { + + // Convert options from String-formatted to Object-formatted if needed + // (we check in cache first) + options = typeof options === "string" ? + ( optionsCache[ options ] || createOptions( options ) ) : + jQuery.extend( {}, options ); + + var // Flag to know if list is currently firing + firing, + // Last fire value (for non-forgettable lists) + memory, + // Flag to know if list was already fired + fired, + // End of the loop when firing + firingLength, + // Index of currently firing callback (modified by remove if needed) + firingIndex, + // First callback to fire (used internally by add and fireWith) + firingStart, + // Actual callback list + list = [], + // Stack of fire calls for repeatable lists + stack = !options.once && [], + // Fire callbacks + fire = function( data ) { + memory = options.memory && data; + fired = true; + firingIndex = firingStart || 0; + firingStart = 0; + firingLength = list.length; + firing = true; + for ( ; list && firingIndex < firingLength; firingIndex++ ) { + if ( list[ firingIndex ].apply( data[ 0 ], data[ 1 ] ) === false && options.stopOnFalse ) { + memory = false; // To prevent further calls using add + break; + } + } + firing = false; + if ( list ) { + if ( stack ) { + if ( stack.length ) { + fire( stack.shift() ); + } + } else if ( memory ) { + list = []; + } else { + self.disable(); + } + } + }, + // Actual Callbacks object + self = { + // Add a callback or a collection of callbacks to the list + add: function() { + if ( list ) { + // First, we save the current length + var start = list.length; + (function add( args ) { + jQuery.each( args, function( _, arg ) { + var type = jQuery.type( arg ); + if ( type === "function" ) { + if ( !options.unique || !self.has( arg ) ) { + list.push( arg ); + } + } else if ( arg && arg.length && type !== "string" ) { + // Inspect recursively + add( arg ); + } + }); + })( arguments ); + // Do we need to add the callbacks to the + // current firing batch? + if ( firing ) { + firingLength = list.length; + // With memory, if we're not firing then + // we should call right away + } else if ( memory ) { + firingStart = start; + fire( memory ); + } + } + return this; + }, + // Remove a callback from the list + remove: function() { + if ( list ) { + jQuery.each( arguments, function( _, arg ) { + var index; + while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { + list.splice( index, 1 ); + // Handle firing indexes + if ( firing ) { + if ( index <= firingLength ) { + firingLength--; + } + if ( index <= firingIndex ) { + firingIndex--; + } + } + } + }); + } + return this; + }, + // Check if a given callback is in the list. + // If no argument is given, return whether or not list has callbacks attached. + has: function( fn ) { + return fn ? jQuery.inArray( fn, list ) > -1 : !!( list && list.length ); + }, + // Remove all callbacks from the list + empty: function() { + list = []; + firingLength = 0; + return this; + }, + // Have the list do nothing anymore + disable: function() { + list = stack = memory = undefined; + return this; + }, + // Is it disabled? + disabled: function() { + return !list; + }, + // Lock the list in its current state + lock: function() { + stack = undefined; + if ( !memory ) { + self.disable(); + } + return this; + }, + // Is it locked? + locked: function() { + return !stack; + }, + // Call all callbacks with the given context and arguments + fireWith: function( context, args ) { + if ( list && ( !fired || stack ) ) { + args = args || []; + args = [ context, args.slice ? args.slice() : args ]; + if ( firing ) { + stack.push( args ); + } else { + fire( args ); + } + } + return this; + }, + // Call all the callbacks with the given arguments + fire: function() { + self.fireWith( this, arguments ); + return this; + }, + // To know if the callbacks have already been called at least once + fired: function() { + return !!fired; + } + }; + + return self; +}; + + +jQuery.extend({ + + Deferred: function( func ) { + var tuples = [ + // action, add listener, listener list, final state + [ "resolve", "done", jQuery.Callbacks("once memory"), "resolved" ], + [ "reject", "fail", jQuery.Callbacks("once memory"), "rejected" ], + [ "notify", "progress", jQuery.Callbacks("memory") ] + ], + state = "pending", + promise = { + state: function() { + return state; + }, + always: function() { + deferred.done( arguments ).fail( arguments ); + return this; + }, + then: function( /* fnDone, fnFail, fnProgress */ ) { + var fns = arguments; + return jQuery.Deferred(function( newDefer ) { + jQuery.each( tuples, function( i, tuple ) { + var fn = jQuery.isFunction( fns[ i ] ) && fns[ i ]; + // deferred[ done | fail | progress ] for forwarding actions to newDefer + deferred[ tuple[1] ](function() { + var returned = fn && fn.apply( this, arguments ); + if ( returned && jQuery.isFunction( returned.promise ) ) { + returned.promise() + .done( newDefer.resolve ) + .fail( newDefer.reject ) + .progress( newDefer.notify ); + } else { + newDefer[ tuple[ 0 ] + "With" ]( this === promise ? newDefer.promise() : this, fn ? [ returned ] : arguments ); + } + }); + }); + fns = null; + }).promise(); + }, + // Get a promise for this deferred + // If obj is provided, the promise aspect is added to the object + promise: function( obj ) { + return obj != null ? jQuery.extend( obj, promise ) : promise; + } + }, + deferred = {}; + + // Keep pipe for back-compat + promise.pipe = promise.then; + + // Add list-specific methods + jQuery.each( tuples, function( i, tuple ) { + var list = tuple[ 2 ], + stateString = tuple[ 3 ]; + + // promise[ done | fail | progress ] = list.add + promise[ tuple[1] ] = list.add; + + // Handle state + if ( stateString ) { + list.add(function() { + // state = [ resolved | rejected ] + state = stateString; + + // [ reject_list | resolve_list ].disable; progress_list.lock + }, tuples[ i ^ 1 ][ 2 ].disable, tuples[ 2 ][ 2 ].lock ); + } + + // deferred[ resolve | reject | notify ] + deferred[ tuple[0] ] = function() { + deferred[ tuple[0] + "With" ]( this === deferred ? promise : this, arguments ); + return this; + }; + deferred[ tuple[0] + "With" ] = list.fireWith; + }); + + // Make the deferred a promise + promise.promise( deferred ); + + // Call given func if any + if ( func ) { + func.call( deferred, deferred ); + } + + // All done! + return deferred; + }, + + // Deferred helper + when: function( subordinate /* , ..., subordinateN */ ) { + var i = 0, + resolveValues = slice.call( arguments ), + length = resolveValues.length, + + // the count of uncompleted subordinates + remaining = length !== 1 || ( subordinate && jQuery.isFunction( subordinate.promise ) ) ? length : 0, + + // the master Deferred. If resolveValues consist of only a single Deferred, just use that. + deferred = remaining === 1 ? subordinate : jQuery.Deferred(), + + // Update function for both resolve and progress values + updateFunc = function( i, contexts, values ) { + return function( value ) { + contexts[ i ] = this; + values[ i ] = arguments.length > 1 ? slice.call( arguments ) : value; + if ( values === progressValues ) { + deferred.notifyWith( contexts, values ); + + } else if ( !(--remaining) ) { + deferred.resolveWith( contexts, values ); + } + }; + }, + + progressValues, progressContexts, resolveContexts; + + // add listeners to Deferred subordinates; treat others as resolved + if ( length > 1 ) { + progressValues = new Array( length ); + progressContexts = new Array( length ); + resolveContexts = new Array( length ); + for ( ; i < length; i++ ) { + if ( resolveValues[ i ] && jQuery.isFunction( resolveValues[ i ].promise ) ) { + resolveValues[ i ].promise() + .done( updateFunc( i, resolveContexts, resolveValues ) ) + .fail( deferred.reject ) + .progress( updateFunc( i, progressContexts, progressValues ) ); + } else { + --remaining; + } + } + } + + // if we're not waiting on anything, resolve the master + if ( !remaining ) { + deferred.resolveWith( resolveContexts, resolveValues ); + } + + return deferred.promise(); + } +}); + + +// The deferred used on DOM ready +var readyList; + +jQuery.fn.ready = function( fn ) { + // Add the callback + jQuery.ready.promise().done( fn ); + + return this; +}; + +jQuery.extend({ + // Is the DOM ready to be used? Set to true once it occurs. + isReady: false, + + // A counter to track how many items to wait for before + // the ready event fires. See #6781 + readyWait: 1, + + // Hold (or release) the ready event + holdReady: function( hold ) { + if ( hold ) { + jQuery.readyWait++; + } else { + jQuery.ready( true ); + } + }, + + // Handle when the DOM is ready + ready: function( wait ) { + + // Abort if there are pending holds or we're already ready + if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { + return; + } + + // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443). + if ( !document.body ) { + return setTimeout( jQuery.ready ); + } + + // Remember that the DOM is ready + jQuery.isReady = true; + + // If a normal DOM Ready event fired, decrement, and wait if need be + if ( wait !== true && --jQuery.readyWait > 0 ) { + return; + } + + // If there are functions bound, to execute + readyList.resolveWith( document, [ jQuery ] ); + + // Trigger any bound ready events + if ( jQuery.fn.trigger ) { + jQuery( document ).trigger("ready").off("ready"); + } + } +}); + +/** + * Clean-up method for dom ready events + */ +function detach() { + if ( document.addEventListener ) { + document.removeEventListener( "DOMContentLoaded", completed, false ); + window.removeEventListener( "load", completed, false ); + + } else { + document.detachEvent( "onreadystatechange", completed ); + window.detachEvent( "onload", completed ); + } +} + +/** + * The ready event handler and self cleanup method + */ +function completed() { + // readyState === "complete" is good enough for us to call the dom ready in oldIE + if ( document.addEventListener || event.type === "load" || document.readyState === "complete" ) { + detach(); + jQuery.ready(); + } +} + +jQuery.ready.promise = function( obj ) { + if ( !readyList ) { + + readyList = jQuery.Deferred(); + + // Catch cases where $(document).ready() is called after the browser event has already occurred. + // we once tried to use readyState "interactive" here, but it caused issues like the one + // discovered by ChrisS here: http://bugs.jquery.com/ticket/12282#comment:15 + if ( document.readyState === "complete" ) { + // Handle it asynchronously to allow scripts the opportunity to delay ready + setTimeout( jQuery.ready ); + + // Standards-based browsers support DOMContentLoaded + } else if ( document.addEventListener ) { + // Use the handy event callback + document.addEventListener( "DOMContentLoaded", completed, false ); + + // A fallback to window.onload, that will always work + window.addEventListener( "load", completed, false ); + + // If IE event model is used + } else { + // Ensure firing before onload, maybe late but safe also for iframes + document.attachEvent( "onreadystatechange", completed ); + + // A fallback to window.onload, that will always work + window.attachEvent( "onload", completed ); + + // If IE and not a frame + // continually check to see if the document is ready + var top = false; + + try { + top = window.frameElement == null && document.documentElement; + } catch(e) {} + + if ( top && top.doScroll ) { + (function doScrollCheck() { + if ( !jQuery.isReady ) { + + try { + // Use the trick by Diego Perini + // http://javascript.nwbox.com/IEContentLoaded/ + top.doScroll("left"); + } catch(e) { + return setTimeout( doScrollCheck, 50 ); + } + + // detach all dom ready events + detach(); + + // and execute any waiting functions + jQuery.ready(); + } + })(); + } + } + } + return readyList.promise( obj ); +}; + + +var strundefined = typeof undefined; + + + +// Support: IE<9 +// Iteration over object's inherited properties before its own +var i; +for ( i in jQuery( support ) ) { + break; +} +support.ownLast = i !== "0"; + +// Note: most support tests are defined in their respective modules. +// false until the test is run +support.inlineBlockNeedsLayout = false; + +jQuery(function() { + // We need to execute this one support test ASAP because we need to know + // if body.style.zoom needs to be set. + + var container, div, + body = document.getElementsByTagName("body")[0]; + + if ( !body ) { + // Return for frameset docs that don't have a body + return; + } + + // Setup + container = document.createElement( "div" ); + container.style.cssText = "border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px"; + + div = document.createElement( "div" ); + body.appendChild( container ).appendChild( div ); + + if ( typeof div.style.zoom !== strundefined ) { + // Support: IE<8 + // Check if natively block-level elements act like inline-block + // elements when setting their display to 'inline' and giving + // them layout + div.style.cssText = "border:0;margin:0;width:1px;padding:1px;display:inline;zoom:1"; + + if ( (support.inlineBlockNeedsLayout = ( div.offsetWidth === 3 )) ) { + // Prevent IE 6 from affecting layout for positioned elements #11048 + // Prevent IE from shrinking the body in IE 7 mode #12869 + // Support: IE<8 + body.style.zoom = 1; + } + } + + body.removeChild( container ); + + // Null elements to avoid leaks in IE + container = div = null; +}); + + + + +(function() { + var div = document.createElement( "div" ); + + // Execute the test only if not already executed in another module. + if (support.deleteExpando == null) { + // Support: IE<9 + support.deleteExpando = true; + try { + delete div.test; + } catch( e ) { + support.deleteExpando = false; + } + } + + // Null elements to avoid leaks in IE. + div = null; +})(); + + +/** + * Determines whether an object can have data + */ +jQuery.acceptData = function( elem ) { + var noData = jQuery.noData[ (elem.nodeName + " ").toLowerCase() ], + nodeType = +elem.nodeType || 1; + + // Do not set data on non-element DOM nodes because it will not be cleared (#8335). + return nodeType !== 1 && nodeType !== 9 ? + false : + + // Nodes accept data unless otherwise specified; rejection can be conditional + !noData || noData !== true && elem.getAttribute("classid") === noData; +}; + + +var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/, + rmultiDash = /([A-Z])/g; + +function dataAttr( elem, key, data ) { + // If nothing was found internally, try to fetch any + // data from the HTML5 data-* attribute + if ( data === undefined && elem.nodeType === 1 ) { + + var name = "data-" + key.replace( rmultiDash, "-$1" ).toLowerCase(); + + data = elem.getAttribute( name ); + + if ( typeof data === "string" ) { + try { + data = data === "true" ? true : + data === "false" ? false : + data === "null" ? null : + // Only convert to a number if it doesn't change the string + +data + "" === data ? +data : + rbrace.test( data ) ? jQuery.parseJSON( data ) : + data; + } catch( e ) {} + + // Make sure we set the data so it isn't changed later + jQuery.data( elem, key, data ); + + } else { + data = undefined; + } + } + + return data; +} + +// checks a cache object for emptiness +function isEmptyDataObject( obj ) { + var name; + for ( name in obj ) { + + // if the public data object is empty, the private is still empty + if ( name === "data" && jQuery.isEmptyObject( obj[name] ) ) { + continue; + } + if ( name !== "toJSON" ) { + return false; + } + } + + return true; +} + +function internalData( elem, name, data, pvt /* Internal Use Only */ ) { + if ( !jQuery.acceptData( elem ) ) { + return; + } + + var ret, thisCache, + internalKey = jQuery.expando, + + // We have to handle DOM nodes and JS objects differently because IE6-7 + // can't GC object references properly across the DOM-JS boundary + isNode = elem.nodeType, + + // Only DOM nodes need the global jQuery cache; JS object data is + // attached directly to the object so GC can occur automatically + cache = isNode ? jQuery.cache : elem, + + // Only defining an ID for JS objects if its cache already exists allows + // the code to shortcut on the same path as a DOM node with no cache + id = isNode ? elem[ internalKey ] : elem[ internalKey ] && internalKey; + + // Avoid doing any more work than we need to when trying to get data on an + // object that has no data at all + if ( (!id || !cache[id] || (!pvt && !cache[id].data)) && data === undefined && typeof name === "string" ) { + return; + } + + if ( !id ) { + // Only DOM nodes need a new unique ID for each element since their data + // ends up in the global cache + if ( isNode ) { + id = elem[ internalKey ] = deletedIds.pop() || jQuery.guid++; + } else { + id = internalKey; + } + } + + if ( !cache[ id ] ) { + // Avoid exposing jQuery metadata on plain JS objects when the object + // is serialized using JSON.stringify + cache[ id ] = isNode ? {} : { toJSON: jQuery.noop }; + } + + // An object can be passed to jQuery.data instead of a key/value pair; this gets + // shallow copied over onto the existing cache + if ( typeof name === "object" || typeof name === "function" ) { + if ( pvt ) { + cache[ id ] = jQuery.extend( cache[ id ], name ); + } else { + cache[ id ].data = jQuery.extend( cache[ id ].data, name ); + } + } + + thisCache = cache[ id ]; + + // jQuery data() is stored in a separate object inside the object's internal data + // cache in order to avoid key collisions between internal data and user-defined + // data. + if ( !pvt ) { + if ( !thisCache.data ) { + thisCache.data = {}; + } + + thisCache = thisCache.data; + } + + if ( data !== undefined ) { + thisCache[ jQuery.camelCase( name ) ] = data; + } + + // Check for both converted-to-camel and non-converted data property names + // If a data property was specified + if ( typeof name === "string" ) { + + // First Try to find as-is property data + ret = thisCache[ name ]; + + // Test for null|undefined property data + if ( ret == null ) { + + // Try to find the camelCased property + ret = thisCache[ jQuery.camelCase( name ) ]; + } + } else { + ret = thisCache; + } + + return ret; +} + +function internalRemoveData( elem, name, pvt ) { + if ( !jQuery.acceptData( elem ) ) { + return; + } + + var thisCache, i, + isNode = elem.nodeType, + + // See jQuery.data for more information + cache = isNode ? jQuery.cache : elem, + id = isNode ? elem[ jQuery.expando ] : jQuery.expando; + + // If there is already no cache entry for this object, there is no + // purpose in continuing + if ( !cache[ id ] ) { + return; + } + + if ( name ) { + + thisCache = pvt ? cache[ id ] : cache[ id ].data; + + if ( thisCache ) { + + // Support array or space separated string names for data keys + if ( !jQuery.isArray( name ) ) { + + // try the string as a key before any manipulation + if ( name in thisCache ) { + name = [ name ]; + } else { + + // split the camel cased version by spaces unless a key with the spaces exists + name = jQuery.camelCase( name ); + if ( name in thisCache ) { + name = [ name ]; + } else { + name = name.split(" "); + } + } + } else { + // If "name" is an array of keys... + // When data is initially created, via ("key", "val") signature, + // keys will be converted to camelCase. + // Since there is no way to tell _how_ a key was added, remove + // both plain key and camelCase key. #12786 + // This will only penalize the array argument path. + name = name.concat( jQuery.map( name, jQuery.camelCase ) ); + } + + i = name.length; + while ( i-- ) { + delete thisCache[ name[i] ]; + } + + // If there is no data left in the cache, we want to continue + // and let the cache object itself get destroyed + if ( pvt ? !isEmptyDataObject(thisCache) : !jQuery.isEmptyObject(thisCache) ) { + return; + } + } + } + + // See jQuery.data for more information + if ( !pvt ) { + delete cache[ id ].data; + + // Don't destroy the parent cache unless the internal data object + // had been the only thing left in it + if ( !isEmptyDataObject( cache[ id ] ) ) { + return; + } + } + + // Destroy the cache + if ( isNode ) { + jQuery.cleanData( [ elem ], true ); + + // Use delete when supported for expandos or `cache` is not a window per isWindow (#10080) + /* jshint eqeqeq: false */ + } else if ( support.deleteExpando || cache != cache.window ) { + /* jshint eqeqeq: true */ + delete cache[ id ]; + + // When all else fails, null + } else { + cache[ id ] = null; + } +} + +jQuery.extend({ + cache: {}, + + // The following elements (space-suffixed to avoid Object.prototype collisions) + // throw uncatchable exceptions if you attempt to set expando properties + noData: { + "applet ": true, + "embed ": true, + // ...but Flash objects (which have this classid) *can* handle expandos + "object ": "clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" + }, + + hasData: function( elem ) { + elem = elem.nodeType ? jQuery.cache[ elem[jQuery.expando] ] : elem[ jQuery.expando ]; + return !!elem && !isEmptyDataObject( elem ); + }, + + data: function( elem, name, data ) { + return internalData( elem, name, data ); + }, + + removeData: function( elem, name ) { + return internalRemoveData( elem, name ); + }, + + // For internal use only. + _data: function( elem, name, data ) { + return internalData( elem, name, data, true ); + }, + + _removeData: function( elem, name ) { + return internalRemoveData( elem, name, true ); + } +}); + +jQuery.fn.extend({ + data: function( key, value ) { + var i, name, data, + elem = this[0], + attrs = elem && elem.attributes; + + // Special expections of .data basically thwart jQuery.access, + // so implement the relevant behavior ourselves + + // Gets all values + if ( key === undefined ) { + if ( this.length ) { + data = jQuery.data( elem ); + + if ( elem.nodeType === 1 && !jQuery._data( elem, "parsedAttrs" ) ) { + i = attrs.length; + while ( i-- ) { + name = attrs[i].name; + + if ( name.indexOf("data-") === 0 ) { + name = jQuery.camelCase( name.slice(5) ); + + dataAttr( elem, name, data[ name ] ); + } + } + jQuery._data( elem, "parsedAttrs", true ); + } + } + + return data; + } + + // Sets multiple values + if ( typeof key === "object" ) { + return this.each(function() { + jQuery.data( this, key ); + }); + } + + return arguments.length > 1 ? + + // Sets one value + this.each(function() { + jQuery.data( this, key, value ); + }) : + + // Gets one value + // Try to fetch any internally stored data first + elem ? dataAttr( elem, key, jQuery.data( elem, key ) ) : undefined; + }, + + removeData: function( key ) { + return this.each(function() { + jQuery.removeData( this, key ); + }); + } +}); + + +jQuery.extend({ + queue: function( elem, type, data ) { + var queue; + + if ( elem ) { + type = ( type || "fx" ) + "queue"; + queue = jQuery._data( elem, type ); + + // Speed up dequeue by getting out quickly if this is just a lookup + if ( data ) { + if ( !queue || jQuery.isArray(data) ) { + queue = jQuery._data( elem, type, jQuery.makeArray(data) ); + } else { + queue.push( data ); + } + } + return queue || []; + } + }, + + dequeue: function( elem, type ) { + type = type || "fx"; + + var queue = jQuery.queue( elem, type ), + startLength = queue.length, + fn = queue.shift(), + hooks = jQuery._queueHooks( elem, type ), + next = function() { + jQuery.dequeue( elem, type ); + }; + + // If the fx queue is dequeued, always remove the progress sentinel + if ( fn === "inprogress" ) { + fn = queue.shift(); + startLength--; + } + + if ( fn ) { + + // Add a progress sentinel to prevent the fx queue from being + // automatically dequeued + if ( type === "fx" ) { + queue.unshift( "inprogress" ); + } + + // clear up the last queue stop function + delete hooks.stop; + fn.call( elem, next, hooks ); + } + + if ( !startLength && hooks ) { + hooks.empty.fire(); + } + }, + + // not intended for public consumption - generates a queueHooks object, or returns the current one + _queueHooks: function( elem, type ) { + var key = type + "queueHooks"; + return jQuery._data( elem, key ) || jQuery._data( elem, key, { + empty: jQuery.Callbacks("once memory").add(function() { + jQuery._removeData( elem, type + "queue" ); + jQuery._removeData( elem, key ); + }) + }); + } +}); + +jQuery.fn.extend({ + queue: function( type, data ) { + var setter = 2; + + if ( typeof type !== "string" ) { + data = type; + type = "fx"; + setter--; + } + + if ( arguments.length < setter ) { + return jQuery.queue( this[0], type ); + } + + return data === undefined ? + this : + this.each(function() { + var queue = jQuery.queue( this, type, data ); + + // ensure a hooks for this queue + jQuery._queueHooks( this, type ); + + if ( type === "fx" && queue[0] !== "inprogress" ) { + jQuery.dequeue( this, type ); + } + }); + }, + dequeue: function( type ) { + return this.each(function() { + jQuery.dequeue( this, type ); + }); + }, + clearQueue: function( type ) { + return this.queue( type || "fx", [] ); + }, + // Get a promise resolved when queues of a certain type + // are emptied (fx is the type by default) + promise: function( type, obj ) { + var tmp, + count = 1, + defer = jQuery.Deferred(), + elements = this, + i = this.length, + resolve = function() { + if ( !( --count ) ) { + defer.resolveWith( elements, [ elements ] ); + } + }; + + if ( typeof type !== "string" ) { + obj = type; + type = undefined; + } + type = type || "fx"; + + while ( i-- ) { + tmp = jQuery._data( elements[ i ], type + "queueHooks" ); + if ( tmp && tmp.empty ) { + count++; + tmp.empty.add( resolve ); + } + } + resolve(); + return defer.promise( obj ); + } +}); +var pnum = (/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/).source; + +var cssExpand = [ "Top", "Right", "Bottom", "Left" ]; + +var isHidden = function( elem, el ) { + // isHidden might be called from jQuery#filter function; + // in that case, element will be second argument + elem = el || elem; + return jQuery.css( elem, "display" ) === "none" || !jQuery.contains( elem.ownerDocument, elem ); + }; + + + +// Multifunctional method to get and set values of a collection +// The value/s can optionally be executed if it's a function +var access = jQuery.access = function( elems, fn, key, value, chainable, emptyGet, raw ) { + var i = 0, + length = elems.length, + bulk = key == null; + + // Sets many values + if ( jQuery.type( key ) === "object" ) { + chainable = true; + for ( i in key ) { + jQuery.access( elems, fn, i, key[i], true, emptyGet, raw ); + } + + // Sets one value + } else if ( value !== undefined ) { + chainable = true; + + if ( !jQuery.isFunction( value ) ) { + raw = true; + } + + if ( bulk ) { + // Bulk operations run against the entire set + if ( raw ) { + fn.call( elems, value ); + fn = null; + + // ...except when executing function values + } else { + bulk = fn; + fn = function( elem, key, value ) { + return bulk.call( jQuery( elem ), value ); + }; + } + } + + if ( fn ) { + for ( ; i < length; i++ ) { + fn( elems[i], key, raw ? value : value.call( elems[i], i, fn( elems[i], key ) ) ); + } + } + } + + return chainable ? + elems : + + // Gets + bulk ? + fn.call( elems ) : + length ? fn( elems[0], key ) : emptyGet; +}; +var rcheckableType = (/^(?:checkbox|radio)$/i); + + + +(function() { + var fragment = document.createDocumentFragment(), + div = document.createElement("div"), + input = document.createElement("input"); + + // Setup + div.setAttribute( "className", "t" ); + div.innerHTML = "
a"; + + // IE strips leading whitespace when .innerHTML is used + support.leadingWhitespace = div.firstChild.nodeType === 3; + + // Make sure that tbody elements aren't automatically inserted + // IE will insert them into empty tables + support.tbody = !div.getElementsByTagName( "tbody" ).length; + + // Make sure that link elements get serialized correctly by innerHTML + // This requires a wrapper element in IE + support.htmlSerialize = !!div.getElementsByTagName( "link" ).length; + + // Makes sure cloning an html5 element does not cause problems + // Where outerHTML is undefined, this still works + support.html5Clone = + document.createElement( "nav" ).cloneNode( true ).outerHTML !== "<:nav>"; + + // Check if a disconnected checkbox will retain its checked + // value of true after appended to the DOM (IE6/7) + input.type = "checkbox"; + input.checked = true; + fragment.appendChild( input ); + support.appendChecked = input.checked; + + // Make sure textarea (and checkbox) defaultValue is properly cloned + // Support: IE6-IE11+ + div.innerHTML = ""; + support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue; + + // #11217 - WebKit loses check when the name is after the checked attribute + fragment.appendChild( div ); + div.innerHTML = ""; + + // Support: Safari 5.1, iOS 5.1, Android 4.x, Android 2.3 + // old WebKit doesn't clone checked state correctly in fragments + support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked; + + // Support: IE<9 + // Opera does not clone events (and typeof div.attachEvent === undefined). + // IE9-10 clones events bound via attachEvent, but they don't trigger with .click() + support.noCloneEvent = true; + if ( div.attachEvent ) { + div.attachEvent( "onclick", function() { + support.noCloneEvent = false; + }); + + div.cloneNode( true ).click(); + } + + // Execute the test only if not already executed in another module. + if (support.deleteExpando == null) { + // Support: IE<9 + support.deleteExpando = true; + try { + delete div.test; + } catch( e ) { + support.deleteExpando = false; + } + } + + // Null elements to avoid leaks in IE. + fragment = div = input = null; +})(); + + +(function() { + var i, eventName, + div = document.createElement( "div" ); + + // Support: IE<9 (lack submit/change bubble), Firefox 23+ (lack focusin event) + for ( i in { submit: true, change: true, focusin: true }) { + eventName = "on" + i; + + if ( !(support[ i + "Bubbles" ] = eventName in window) ) { + // Beware of CSP restrictions (https://developer.mozilla.org/en/Security/CSP) + div.setAttribute( eventName, "t" ); + support[ i + "Bubbles" ] = div.attributes[ eventName ].expando === false; + } + } + + // Null elements to avoid leaks in IE. + div = null; +})(); + + +var rformElems = /^(?:input|select|textarea)$/i, + rkeyEvent = /^key/, + rmouseEvent = /^(?:mouse|contextmenu)|click/, + rfocusMorph = /^(?:focusinfocus|focusoutblur)$/, + rtypenamespace = /^([^.]*)(?:\.(.+)|)$/; + +function returnTrue() { + return true; +} + +function returnFalse() { + return false; +} + +function safeActiveElement() { + try { + return document.activeElement; + } catch ( err ) { } +} + +/* + * Helper functions for managing events -- not part of the public interface. + * Props to Dean Edwards' addEvent library for many of the ideas. + */ +jQuery.event = { + + global: {}, + + add: function( elem, types, handler, data, selector ) { + var tmp, events, t, handleObjIn, + special, eventHandle, handleObj, + handlers, type, namespaces, origType, + elemData = jQuery._data( elem ); + + // Don't attach events to noData or text/comment nodes (but allow plain objects) + if ( !elemData ) { + return; + } + + // Caller can pass in an object of custom data in lieu of the handler + if ( handler.handler ) { + handleObjIn = handler; + handler = handleObjIn.handler; + selector = handleObjIn.selector; + } + + // Make sure that the handler has a unique ID, used to find/remove it later + if ( !handler.guid ) { + handler.guid = jQuery.guid++; + } + + // Init the element's event structure and main handler, if this is the first + if ( !(events = elemData.events) ) { + events = elemData.events = {}; + } + if ( !(eventHandle = elemData.handle) ) { + eventHandle = elemData.handle = function( e ) { + // Discard the second event of a jQuery.event.trigger() and + // when an event is called after a page has unloaded + return typeof jQuery !== strundefined && (!e || jQuery.event.triggered !== e.type) ? + jQuery.event.dispatch.apply( eventHandle.elem, arguments ) : + undefined; + }; + // Add elem as a property of the handle fn to prevent a memory leak with IE non-native events + eventHandle.elem = elem; + } + + // Handle multiple events separated by a space + types = ( types || "" ).match( rnotwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[t] ) || []; + type = origType = tmp[1]; + namespaces = ( tmp[2] || "" ).split( "." ).sort(); + + // There *must* be a type, no attaching namespace-only handlers + if ( !type ) { + continue; + } + + // If event changes its type, use the special event handlers for the changed type + special = jQuery.event.special[ type ] || {}; + + // If selector defined, determine special event api type, otherwise given type + type = ( selector ? special.delegateType : special.bindType ) || type; + + // Update special based on newly reset type + special = jQuery.event.special[ type ] || {}; + + // handleObj is passed to all event handlers + handleObj = jQuery.extend({ + type: type, + origType: origType, + data: data, + handler: handler, + guid: handler.guid, + selector: selector, + needsContext: selector && jQuery.expr.match.needsContext.test( selector ), + namespace: namespaces.join(".") + }, handleObjIn ); + + // Init the event handler queue if we're the first + if ( !(handlers = events[ type ]) ) { + handlers = events[ type ] = []; + handlers.delegateCount = 0; + + // Only use addEventListener/attachEvent if the special events handler returns false + if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) { + // Bind the global event handler to the element + if ( elem.addEventListener ) { + elem.addEventListener( type, eventHandle, false ); + + } else if ( elem.attachEvent ) { + elem.attachEvent( "on" + type, eventHandle ); + } + } + } + + if ( special.add ) { + special.add.call( elem, handleObj ); + + if ( !handleObj.handler.guid ) { + handleObj.handler.guid = handler.guid; + } + } + + // Add to the element's handler list, delegates in front + if ( selector ) { + handlers.splice( handlers.delegateCount++, 0, handleObj ); + } else { + handlers.push( handleObj ); + } + + // Keep track of which events have ever been used, for event optimization + jQuery.event.global[ type ] = true; + } + + // Nullify elem to prevent memory leaks in IE + elem = null; + }, + + // Detach an event or set of events from an element + remove: function( elem, types, handler, selector, mappedTypes ) { + var j, handleObj, tmp, + origCount, t, events, + special, handlers, type, + namespaces, origType, + elemData = jQuery.hasData( elem ) && jQuery._data( elem ); + + if ( !elemData || !(events = elemData.events) ) { + return; + } + + // Once for each type.namespace in types; type may be omitted + types = ( types || "" ).match( rnotwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[t] ) || []; + type = origType = tmp[1]; + namespaces = ( tmp[2] || "" ).split( "." ).sort(); + + // Unbind all events (on this namespace, if provided) for the element + if ( !type ) { + for ( type in events ) { + jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); + } + continue; + } + + special = jQuery.event.special[ type ] || {}; + type = ( selector ? special.delegateType : special.bindType ) || type; + handlers = events[ type ] || []; + tmp = tmp[2] && new RegExp( "(^|\\.)" + namespaces.join("\\.(?:.*\\.|)") + "(\\.|$)" ); + + // Remove matching events + origCount = j = handlers.length; + while ( j-- ) { + handleObj = handlers[ j ]; + + if ( ( mappedTypes || origType === handleObj.origType ) && + ( !handler || handler.guid === handleObj.guid ) && + ( !tmp || tmp.test( handleObj.namespace ) ) && + ( !selector || selector === handleObj.selector || selector === "**" && handleObj.selector ) ) { + handlers.splice( j, 1 ); + + if ( handleObj.selector ) { + handlers.delegateCount--; + } + if ( special.remove ) { + special.remove.call( elem, handleObj ); + } + } + } + + // Remove generic event handler if we removed something and no more handlers exist + // (avoids potential for endless recursion during removal of special event handlers) + if ( origCount && !handlers.length ) { + if ( !special.teardown || special.teardown.call( elem, namespaces, elemData.handle ) === false ) { + jQuery.removeEvent( elem, type, elemData.handle ); + } + + delete events[ type ]; + } + } + + // Remove the expando if it's no longer used + if ( jQuery.isEmptyObject( events ) ) { + delete elemData.handle; + + // removeData also checks for emptiness and clears the expando if empty + // so use it instead of delete + jQuery._removeData( elem, "events" ); + } + }, + + trigger: function( event, data, elem, onlyHandlers ) { + var handle, ontype, cur, + bubbleType, special, tmp, i, + eventPath = [ elem || document ], + type = hasOwn.call( event, "type" ) ? event.type : event, + namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split(".") : []; + + cur = tmp = elem = elem || document; + + // Don't do events on text and comment nodes + if ( elem.nodeType === 3 || elem.nodeType === 8 ) { + return; + } + + // focus/blur morphs to focusin/out; ensure we're not firing them right now + if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { + return; + } + + if ( type.indexOf(".") >= 0 ) { + // Namespaced trigger; create a regexp to match event type in handle() + namespaces = type.split("."); + type = namespaces.shift(); + namespaces.sort(); + } + ontype = type.indexOf(":") < 0 && "on" + type; + + // Caller can pass in a jQuery.Event object, Object, or just an event type string + event = event[ jQuery.expando ] ? + event : + new jQuery.Event( type, typeof event === "object" && event ); + + // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) + event.isTrigger = onlyHandlers ? 2 : 3; + event.namespace = namespaces.join("."); + event.namespace_re = event.namespace ? + new RegExp( "(^|\\.)" + namespaces.join("\\.(?:.*\\.|)") + "(\\.|$)" ) : + null; + + // Clean up the event in case it is being reused + event.result = undefined; + if ( !event.target ) { + event.target = elem; + } + + // Clone any incoming data and prepend the event, creating the handler arg list + data = data == null ? + [ event ] : + jQuery.makeArray( data, [ event ] ); + + // Allow special events to draw outside the lines + special = jQuery.event.special[ type ] || {}; + if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { + return; + } + + // Determine event propagation path in advance, per W3C events spec (#9951) + // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) + if ( !onlyHandlers && !special.noBubble && !jQuery.isWindow( elem ) ) { + + bubbleType = special.delegateType || type; + if ( !rfocusMorph.test( bubbleType + type ) ) { + cur = cur.parentNode; + } + for ( ; cur; cur = cur.parentNode ) { + eventPath.push( cur ); + tmp = cur; + } + + // Only add window if we got to document (e.g., not plain obj or detached DOM) + if ( tmp === (elem.ownerDocument || document) ) { + eventPath.push( tmp.defaultView || tmp.parentWindow || window ); + } + } + + // Fire handlers on the event path + i = 0; + while ( (cur = eventPath[i++]) && !event.isPropagationStopped() ) { + + event.type = i > 1 ? + bubbleType : + special.bindType || type; + + // jQuery handler + handle = ( jQuery._data( cur, "events" ) || {} )[ event.type ] && jQuery._data( cur, "handle" ); + if ( handle ) { + handle.apply( cur, data ); + } + + // Native handler + handle = ontype && cur[ ontype ]; + if ( handle && handle.apply && jQuery.acceptData( cur ) ) { + event.result = handle.apply( cur, data ); + if ( event.result === false ) { + event.preventDefault(); + } + } + } + event.type = type; + + // If nobody prevented the default action, do it now + if ( !onlyHandlers && !event.isDefaultPrevented() ) { + + if ( (!special._default || special._default.apply( eventPath.pop(), data ) === false) && + jQuery.acceptData( elem ) ) { + + // Call a native DOM method on the target with the same name name as the event. + // Can't use an .isFunction() check here because IE6/7 fails that test. + // Don't do default actions on window, that's where global variables be (#6170) + if ( ontype && elem[ type ] && !jQuery.isWindow( elem ) ) { + + // Don't re-trigger an onFOO event when we call its FOO() method + tmp = elem[ ontype ]; + + if ( tmp ) { + elem[ ontype ] = null; + } + + // Prevent re-triggering of the same event, since we already bubbled it above + jQuery.event.triggered = type; + try { + elem[ type ](); + } catch ( e ) { + // IE<9 dies on focus/blur to hidden element (#1486,#12518) + // only reproducible on winXP IE8 native, not IE9 in IE8 mode + } + jQuery.event.triggered = undefined; + + if ( tmp ) { + elem[ ontype ] = tmp; + } + } + } + } + + return event.result; + }, + + dispatch: function( event ) { + + // Make a writable jQuery.Event from the native event object + event = jQuery.event.fix( event ); + + var i, ret, handleObj, matched, j, + handlerQueue = [], + args = slice.call( arguments ), + handlers = ( jQuery._data( this, "events" ) || {} )[ event.type ] || [], + special = jQuery.event.special[ event.type ] || {}; + + // Use the fix-ed jQuery.Event rather than the (read-only) native event + args[0] = event; + event.delegateTarget = this; + + // Call the preDispatch hook for the mapped type, and let it bail if desired + if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { + return; + } + + // Determine handlers + handlerQueue = jQuery.event.handlers.call( this, event, handlers ); + + // Run delegates first; they may want to stop propagation beneath us + i = 0; + while ( (matched = handlerQueue[ i++ ]) && !event.isPropagationStopped() ) { + event.currentTarget = matched.elem; + + j = 0; + while ( (handleObj = matched.handlers[ j++ ]) && !event.isImmediatePropagationStopped() ) { + + // Triggered event must either 1) have no namespace, or + // 2) have namespace(s) a subset or equal to those in the bound event (both can have no namespace). + if ( !event.namespace_re || event.namespace_re.test( handleObj.namespace ) ) { + + event.handleObj = handleObj; + event.data = handleObj.data; + + ret = ( (jQuery.event.special[ handleObj.origType ] || {}).handle || handleObj.handler ) + .apply( matched.elem, args ); + + if ( ret !== undefined ) { + if ( (event.result = ret) === false ) { + event.preventDefault(); + event.stopPropagation(); + } + } + } + } + } + + // Call the postDispatch hook for the mapped type + if ( special.postDispatch ) { + special.postDispatch.call( this, event ); + } + + return event.result; + }, + + handlers: function( event, handlers ) { + var sel, handleObj, matches, i, + handlerQueue = [], + delegateCount = handlers.delegateCount, + cur = event.target; + + // Find delegate handlers + // Black-hole SVG instance trees (#13180) + // Avoid non-left-click bubbling in Firefox (#3861) + if ( delegateCount && cur.nodeType && (!event.button || event.type !== "click") ) { + + /* jshint eqeqeq: false */ + for ( ; cur != this; cur = cur.parentNode || this ) { + /* jshint eqeqeq: true */ + + // Don't check non-elements (#13208) + // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) + if ( cur.nodeType === 1 && (cur.disabled !== true || event.type !== "click") ) { + matches = []; + for ( i = 0; i < delegateCount; i++ ) { + handleObj = handlers[ i ]; + + // Don't conflict with Object.prototype properties (#13203) + sel = handleObj.selector + " "; + + if ( matches[ sel ] === undefined ) { + matches[ sel ] = handleObj.needsContext ? + jQuery( sel, this ).index( cur ) >= 0 : + jQuery.find( sel, this, null, [ cur ] ).length; + } + if ( matches[ sel ] ) { + matches.push( handleObj ); + } + } + if ( matches.length ) { + handlerQueue.push({ elem: cur, handlers: matches }); + } + } + } + } + + // Add the remaining (directly-bound) handlers + if ( delegateCount < handlers.length ) { + handlerQueue.push({ elem: this, handlers: handlers.slice( delegateCount ) }); + } + + return handlerQueue; + }, + + fix: function( event ) { + if ( event[ jQuery.expando ] ) { + return event; + } + + // Create a writable copy of the event object and normalize some properties + var i, prop, copy, + type = event.type, + originalEvent = event, + fixHook = this.fixHooks[ type ]; + + if ( !fixHook ) { + this.fixHooks[ type ] = fixHook = + rmouseEvent.test( type ) ? this.mouseHooks : + rkeyEvent.test( type ) ? this.keyHooks : + {}; + } + copy = fixHook.props ? this.props.concat( fixHook.props ) : this.props; + + event = new jQuery.Event( originalEvent ); + + i = copy.length; + while ( i-- ) { + prop = copy[ i ]; + event[ prop ] = originalEvent[ prop ]; + } + + // Support: IE<9 + // Fix target property (#1925) + if ( !event.target ) { + event.target = originalEvent.srcElement || document; + } + + // Support: Chrome 23+, Safari? + // Target should not be a text node (#504, #13143) + if ( event.target.nodeType === 3 ) { + event.target = event.target.parentNode; + } + + // Support: IE<9 + // For mouse/key events, metaKey==false if it's undefined (#3368, #11328) + event.metaKey = !!event.metaKey; + + return fixHook.filter ? fixHook.filter( event, originalEvent ) : event; + }, + + // Includes some event props shared by KeyEvent and MouseEvent + props: "altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "), + + fixHooks: {}, + + keyHooks: { + props: "char charCode key keyCode".split(" "), + filter: function( event, original ) { + + // Add which for key events + if ( event.which == null ) { + event.which = original.charCode != null ? original.charCode : original.keyCode; + } + + return event; + } + }, + + mouseHooks: { + props: "button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "), + filter: function( event, original ) { + var body, eventDoc, doc, + button = original.button, + fromElement = original.fromElement; + + // Calculate pageX/Y if missing and clientX/Y available + if ( event.pageX == null && original.clientX != null ) { + eventDoc = event.target.ownerDocument || document; + doc = eventDoc.documentElement; + body = eventDoc.body; + + event.pageX = original.clientX + ( doc && doc.scrollLeft || body && body.scrollLeft || 0 ) - ( doc && doc.clientLeft || body && body.clientLeft || 0 ); + event.pageY = original.clientY + ( doc && doc.scrollTop || body && body.scrollTop || 0 ) - ( doc && doc.clientTop || body && body.clientTop || 0 ); + } + + // Add relatedTarget, if necessary + if ( !event.relatedTarget && fromElement ) { + event.relatedTarget = fromElement === event.target ? original.toElement : fromElement; + } + + // Add which for click: 1 === left; 2 === middle; 3 === right + // Note: button is not normalized, so don't use it + if ( !event.which && button !== undefined ) { + event.which = ( button & 1 ? 1 : ( button & 2 ? 3 : ( button & 4 ? 2 : 0 ) ) ); + } + + return event; + } + }, + + special: { + load: { + // Prevent triggered image.load events from bubbling to window.load + noBubble: true + }, + focus: { + // Fire native event if possible so blur/focus sequence is correct + trigger: function() { + if ( this !== safeActiveElement() && this.focus ) { + try { + this.focus(); + return false; + } catch ( e ) { + // Support: IE<9 + // If we error on focus to hidden element (#1486, #12518), + // let .trigger() run the handlers + } + } + }, + delegateType: "focusin" + }, + blur: { + trigger: function() { + if ( this === safeActiveElement() && this.blur ) { + this.blur(); + return false; + } + }, + delegateType: "focusout" + }, + click: { + // For checkbox, fire native event so checked state will be right + trigger: function() { + if ( jQuery.nodeName( this, "input" ) && this.type === "checkbox" && this.click ) { + this.click(); + return false; + } + }, + + // For cross-browser consistency, don't fire native .click() on links + _default: function( event ) { + return jQuery.nodeName( event.target, "a" ); + } + }, + + beforeunload: { + postDispatch: function( event ) { + + // Even when returnValue equals to undefined Firefox will still show alert + if ( event.result !== undefined ) { + event.originalEvent.returnValue = event.result; + } + } + } + }, + + simulate: function( type, elem, event, bubble ) { + // Piggyback on a donor event to simulate a different one. + // Fake originalEvent to avoid donor's stopPropagation, but if the + // simulated event prevents default then we do the same on the donor. + var e = jQuery.extend( + new jQuery.Event(), + event, + { + type: type, + isSimulated: true, + originalEvent: {} + } + ); + if ( bubble ) { + jQuery.event.trigger( e, null, elem ); + } else { + jQuery.event.dispatch.call( elem, e ); + } + if ( e.isDefaultPrevented() ) { + event.preventDefault(); + } + } +}; + +jQuery.removeEvent = document.removeEventListener ? + function( elem, type, handle ) { + if ( elem.removeEventListener ) { + elem.removeEventListener( type, handle, false ); + } + } : + function( elem, type, handle ) { + var name = "on" + type; + + if ( elem.detachEvent ) { + + // #8545, #7054, preventing memory leaks for custom events in IE6-8 + // detachEvent needed property on element, by name of that event, to properly expose it to GC + if ( typeof elem[ name ] === strundefined ) { + elem[ name ] = null; + } + + elem.detachEvent( name, handle ); + } + }; + +jQuery.Event = function( src, props ) { + // Allow instantiation without the 'new' keyword + if ( !(this instanceof jQuery.Event) ) { + return new jQuery.Event( src, props ); + } + + // Event object + if ( src && src.type ) { + this.originalEvent = src; + this.type = src.type; + + // Events bubbling up the document may have been marked as prevented + // by a handler lower down the tree; reflect the correct value. + this.isDefaultPrevented = src.defaultPrevented || + src.defaultPrevented === undefined && ( + // Support: IE < 9 + src.returnValue === false || + // Support: Android < 4.0 + src.getPreventDefault && src.getPreventDefault() ) ? + returnTrue : + returnFalse; + + // Event type + } else { + this.type = src; + } + + // Put explicitly provided properties onto the event object + if ( props ) { + jQuery.extend( this, props ); + } + + // Create a timestamp if incoming event doesn't have one + this.timeStamp = src && src.timeStamp || jQuery.now(); + + // Mark it as fixed + this[ jQuery.expando ] = true; +}; + +// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding +// http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html +jQuery.Event.prototype = { + isDefaultPrevented: returnFalse, + isPropagationStopped: returnFalse, + isImmediatePropagationStopped: returnFalse, + + preventDefault: function() { + var e = this.originalEvent; + + this.isDefaultPrevented = returnTrue; + if ( !e ) { + return; + } + + // If preventDefault exists, run it on the original event + if ( e.preventDefault ) { + e.preventDefault(); + + // Support: IE + // Otherwise set the returnValue property of the original event to false + } else { + e.returnValue = false; + } + }, + stopPropagation: function() { + var e = this.originalEvent; + + this.isPropagationStopped = returnTrue; + if ( !e ) { + return; + } + // If stopPropagation exists, run it on the original event + if ( e.stopPropagation ) { + e.stopPropagation(); + } + + // Support: IE + // Set the cancelBubble property of the original event to true + e.cancelBubble = true; + }, + stopImmediatePropagation: function() { + this.isImmediatePropagationStopped = returnTrue; + this.stopPropagation(); + } +}; + +// Create mouseenter/leave events using mouseover/out and event-time checks +jQuery.each({ + mouseenter: "mouseover", + mouseleave: "mouseout" +}, function( orig, fix ) { + jQuery.event.special[ orig ] = { + delegateType: fix, + bindType: fix, + + handle: function( event ) { + var ret, + target = this, + related = event.relatedTarget, + handleObj = event.handleObj; + + // For mousenter/leave call the handler if related is outside the target. + // NB: No relatedTarget if the mouse left/entered the browser window + if ( !related || (related !== target && !jQuery.contains( target, related )) ) { + event.type = handleObj.origType; + ret = handleObj.handler.apply( this, arguments ); + event.type = fix; + } + return ret; + } + }; +}); + +// IE submit delegation +if ( !support.submitBubbles ) { + + jQuery.event.special.submit = { + setup: function() { + // Only need this for delegated form submit events + if ( jQuery.nodeName( this, "form" ) ) { + return false; + } + + // Lazy-add a submit handler when a descendant form may potentially be submitted + jQuery.event.add( this, "click._submit keypress._submit", function( e ) { + // Node name check avoids a VML-related crash in IE (#9807) + var elem = e.target, + form = jQuery.nodeName( elem, "input" ) || jQuery.nodeName( elem, "button" ) ? elem.form : undefined; + if ( form && !jQuery._data( form, "submitBubbles" ) ) { + jQuery.event.add( form, "submit._submit", function( event ) { + event._submit_bubble = true; + }); + jQuery._data( form, "submitBubbles", true ); + } + }); + // return undefined since we don't need an event listener + }, + + postDispatch: function( event ) { + // If form was submitted by the user, bubble the event up the tree + if ( event._submit_bubble ) { + delete event._submit_bubble; + if ( this.parentNode && !event.isTrigger ) { + jQuery.event.simulate( "submit", this.parentNode, event, true ); + } + } + }, + + teardown: function() { + // Only need this for delegated form submit events + if ( jQuery.nodeName( this, "form" ) ) { + return false; + } + + // Remove delegated handlers; cleanData eventually reaps submit handlers attached above + jQuery.event.remove( this, "._submit" ); + } + }; +} + +// IE change delegation and checkbox/radio fix +if ( !support.changeBubbles ) { + + jQuery.event.special.change = { + + setup: function() { + + if ( rformElems.test( this.nodeName ) ) { + // IE doesn't fire change on a check/radio until blur; trigger it on click + // after a propertychange. Eat the blur-change in special.change.handle. + // This still fires onchange a second time for check/radio after blur. + if ( this.type === "checkbox" || this.type === "radio" ) { + jQuery.event.add( this, "propertychange._change", function( event ) { + if ( event.originalEvent.propertyName === "checked" ) { + this._just_changed = true; + } + }); + jQuery.event.add( this, "click._change", function( event ) { + if ( this._just_changed && !event.isTrigger ) { + this._just_changed = false; + } + // Allow triggered, simulated change events (#11500) + jQuery.event.simulate( "change", this, event, true ); + }); + } + return false; + } + // Delegated event; lazy-add a change handler on descendant inputs + jQuery.event.add( this, "beforeactivate._change", function( e ) { + var elem = e.target; + + if ( rformElems.test( elem.nodeName ) && !jQuery._data( elem, "changeBubbles" ) ) { + jQuery.event.add( elem, "change._change", function( event ) { + if ( this.parentNode && !event.isSimulated && !event.isTrigger ) { + jQuery.event.simulate( "change", this.parentNode, event, true ); + } + }); + jQuery._data( elem, "changeBubbles", true ); + } + }); + }, + + handle: function( event ) { + var elem = event.target; + + // Swallow native change events from checkbox/radio, we already triggered them above + if ( this !== elem || event.isSimulated || event.isTrigger || (elem.type !== "radio" && elem.type !== "checkbox") ) { + return event.handleObj.handler.apply( this, arguments ); + } + }, + + teardown: function() { + jQuery.event.remove( this, "._change" ); + + return !rformElems.test( this.nodeName ); + } + }; +} + +// Create "bubbling" focus and blur events +if ( !support.focusinBubbles ) { + jQuery.each({ focus: "focusin", blur: "focusout" }, function( orig, fix ) { + + // Attach a single capturing handler on the document while someone wants focusin/focusout + var handler = function( event ) { + jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ), true ); + }; + + jQuery.event.special[ fix ] = { + setup: function() { + var doc = this.ownerDocument || this, + attaches = jQuery._data( doc, fix ); + + if ( !attaches ) { + doc.addEventListener( orig, handler, true ); + } + jQuery._data( doc, fix, ( attaches || 0 ) + 1 ); + }, + teardown: function() { + var doc = this.ownerDocument || this, + attaches = jQuery._data( doc, fix ) - 1; + + if ( !attaches ) { + doc.removeEventListener( orig, handler, true ); + jQuery._removeData( doc, fix ); + } else { + jQuery._data( doc, fix, attaches ); + } + } + }; + }); +} + +jQuery.fn.extend({ + + on: function( types, selector, data, fn, /*INTERNAL*/ one ) { + var type, origFn; + + // Types can be a map of types/handlers + if ( typeof types === "object" ) { + // ( types-Object, selector, data ) + if ( typeof selector !== "string" ) { + // ( types-Object, data ) + data = data || selector; + selector = undefined; + } + for ( type in types ) { + this.on( type, selector, data, types[ type ], one ); + } + return this; + } + + if ( data == null && fn == null ) { + // ( types, fn ) + fn = selector; + data = selector = undefined; + } else if ( fn == null ) { + if ( typeof selector === "string" ) { + // ( types, selector, fn ) + fn = data; + data = undefined; + } else { + // ( types, data, fn ) + fn = data; + data = selector; + selector = undefined; + } + } + if ( fn === false ) { + fn = returnFalse; + } else if ( !fn ) { + return this; + } + + if ( one === 1 ) { + origFn = fn; + fn = function( event ) { + // Can use an empty set, since event contains the info + jQuery().off( event ); + return origFn.apply( this, arguments ); + }; + // Use same guid so caller can remove using origFn + fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); + } + return this.each( function() { + jQuery.event.add( this, types, fn, data, selector ); + }); + }, + one: function( types, selector, data, fn ) { + return this.on( types, selector, data, fn, 1 ); + }, + off: function( types, selector, fn ) { + var handleObj, type; + if ( types && types.preventDefault && types.handleObj ) { + // ( event ) dispatched jQuery.Event + handleObj = types.handleObj; + jQuery( types.delegateTarget ).off( + handleObj.namespace ? handleObj.origType + "." + handleObj.namespace : handleObj.origType, + handleObj.selector, + handleObj.handler + ); + return this; + } + if ( typeof types === "object" ) { + // ( types-object [, selector] ) + for ( type in types ) { + this.off( type, selector, types[ type ] ); + } + return this; + } + if ( selector === false || typeof selector === "function" ) { + // ( types [, fn] ) + fn = selector; + selector = undefined; + } + if ( fn === false ) { + fn = returnFalse; + } + return this.each(function() { + jQuery.event.remove( this, types, fn, selector ); + }); + }, + + trigger: function( type, data ) { + return this.each(function() { + jQuery.event.trigger( type, data, this ); + }); + }, + triggerHandler: function( type, data ) { + var elem = this[0]; + if ( elem ) { + return jQuery.event.trigger( type, data, elem, true ); + } + } +}); + + +function createSafeFragment( document ) { + var list = nodeNames.split( "|" ), + safeFrag = document.createDocumentFragment(); + + if ( safeFrag.createElement ) { + while ( list.length ) { + safeFrag.createElement( + list.pop() + ); + } + } + return safeFrag; +} + +var nodeNames = "abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|" + + "header|hgroup|mark|meter|nav|output|progress|section|summary|time|video", + rinlinejQuery = / jQuery\d+="(?:null|\d+)"/g, + rnoshimcache = new RegExp("<(?:" + nodeNames + ")[\\s/>]", "i"), + rleadingWhitespace = /^\s+/, + rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi, + rtagName = /<([\w:]+)/, + rtbody = /\s*$/g, + + // We have to close these tags to support XHTML (#13200) + wrapMap = { + option: [ 1, "" ], + legend: [ 1, "
", "
" ], + area: [ 1, "", "" ], + param: [ 1, "", "" ], + thead: [ 1, "", "
" ], + tr: [ 2, "", "
" ], + col: [ 2, "", "
" ], + td: [ 3, "", "
" ], + + // IE6-8 can't serialize link, script, style, or any html5 (NoScope) tags, + // unless wrapped in a div with non-breaking characters in front of it. + _default: support.htmlSerialize ? [ 0, "", "" ] : [ 1, "X
", "
" ] + }, + safeFragment = createSafeFragment( document ), + fragmentDiv = safeFragment.appendChild( document.createElement("div") ); + +wrapMap.optgroup = wrapMap.option; +wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; +wrapMap.th = wrapMap.td; + +function getAll( context, tag ) { + var elems, elem, + i = 0, + found = typeof context.getElementsByTagName !== strundefined ? context.getElementsByTagName( tag || "*" ) : + typeof context.querySelectorAll !== strundefined ? context.querySelectorAll( tag || "*" ) : + undefined; + + if ( !found ) { + for ( found = [], elems = context.childNodes || context; (elem = elems[i]) != null; i++ ) { + if ( !tag || jQuery.nodeName( elem, tag ) ) { + found.push( elem ); + } else { + jQuery.merge( found, getAll( elem, tag ) ); + } + } + } + + return tag === undefined || tag && jQuery.nodeName( context, tag ) ? + jQuery.merge( [ context ], found ) : + found; +} + +// Used in buildFragment, fixes the defaultChecked property +function fixDefaultChecked( elem ) { + if ( rcheckableType.test( elem.type ) ) { + elem.defaultChecked = elem.checked; + } +} + +// Support: IE<8 +// Manipulating tables requires a tbody +function manipulationTarget( elem, content ) { + return jQuery.nodeName( elem, "table" ) && + jQuery.nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ? + + elem.getElementsByTagName("tbody")[0] || + elem.appendChild( elem.ownerDocument.createElement("tbody") ) : + elem; +} + +// Replace/restore the type attribute of script elements for safe DOM manipulation +function disableScript( elem ) { + elem.type = (jQuery.find.attr( elem, "type" ) !== null) + "/" + elem.type; + return elem; +} +function restoreScript( elem ) { + var match = rscriptTypeMasked.exec( elem.type ); + if ( match ) { + elem.type = match[1]; + } else { + elem.removeAttribute("type"); + } + return elem; +} + +// Mark scripts as having already been evaluated +function setGlobalEval( elems, refElements ) { + var elem, + i = 0; + for ( ; (elem = elems[i]) != null; i++ ) { + jQuery._data( elem, "globalEval", !refElements || jQuery._data( refElements[i], "globalEval" ) ); + } +} + +function cloneCopyEvent( src, dest ) { + + if ( dest.nodeType !== 1 || !jQuery.hasData( src ) ) { + return; + } + + var type, i, l, + oldData = jQuery._data( src ), + curData = jQuery._data( dest, oldData ), + events = oldData.events; + + if ( events ) { + delete curData.handle; + curData.events = {}; + + for ( type in events ) { + for ( i = 0, l = events[ type ].length; i < l; i++ ) { + jQuery.event.add( dest, type, events[ type ][ i ] ); + } + } + } + + // make the cloned public data object a copy from the original + if ( curData.data ) { + curData.data = jQuery.extend( {}, curData.data ); + } +} + +function fixCloneNodeIssues( src, dest ) { + var nodeName, e, data; + + // We do not need to do anything for non-Elements + if ( dest.nodeType !== 1 ) { + return; + } + + nodeName = dest.nodeName.toLowerCase(); + + // IE6-8 copies events bound via attachEvent when using cloneNode. + if ( !support.noCloneEvent && dest[ jQuery.expando ] ) { + data = jQuery._data( dest ); + + for ( e in data.events ) { + jQuery.removeEvent( dest, e, data.handle ); + } + + // Event data gets referenced instead of copied if the expando gets copied too + dest.removeAttribute( jQuery.expando ); + } + + // IE blanks contents when cloning scripts, and tries to evaluate newly-set text + if ( nodeName === "script" && dest.text !== src.text ) { + disableScript( dest ).text = src.text; + restoreScript( dest ); + + // IE6-10 improperly clones children of object elements using classid. + // IE10 throws NoModificationAllowedError if parent is null, #12132. + } else if ( nodeName === "object" ) { + if ( dest.parentNode ) { + dest.outerHTML = src.outerHTML; + } + + // This path appears unavoidable for IE9. When cloning an object + // element in IE9, the outerHTML strategy above is not sufficient. + // If the src has innerHTML and the destination does not, + // copy the src.innerHTML into the dest.innerHTML. #10324 + if ( support.html5Clone && ( src.innerHTML && !jQuery.trim(dest.innerHTML) ) ) { + dest.innerHTML = src.innerHTML; + } + + } else if ( nodeName === "input" && rcheckableType.test( src.type ) ) { + // IE6-8 fails to persist the checked state of a cloned checkbox + // or radio button. Worse, IE6-7 fail to give the cloned element + // a checked appearance if the defaultChecked value isn't also set + + dest.defaultChecked = dest.checked = src.checked; + + // IE6-7 get confused and end up setting the value of a cloned + // checkbox/radio button to an empty string instead of "on" + if ( dest.value !== src.value ) { + dest.value = src.value; + } + + // IE6-8 fails to return the selected option to the default selected + // state when cloning options + } else if ( nodeName === "option" ) { + dest.defaultSelected = dest.selected = src.defaultSelected; + + // IE6-8 fails to set the defaultValue to the correct value when + // cloning other types of input fields + } else if ( nodeName === "input" || nodeName === "textarea" ) { + dest.defaultValue = src.defaultValue; + } +} + +jQuery.extend({ + clone: function( elem, dataAndEvents, deepDataAndEvents ) { + var destElements, node, clone, i, srcElements, + inPage = jQuery.contains( elem.ownerDocument, elem ); + + if ( support.html5Clone || jQuery.isXMLDoc(elem) || !rnoshimcache.test( "<" + elem.nodeName + ">" ) ) { + clone = elem.cloneNode( true ); + + // IE<=8 does not properly clone detached, unknown element nodes + } else { + fragmentDiv.innerHTML = elem.outerHTML; + fragmentDiv.removeChild( clone = fragmentDiv.firstChild ); + } + + if ( (!support.noCloneEvent || !support.noCloneChecked) && + (elem.nodeType === 1 || elem.nodeType === 11) && !jQuery.isXMLDoc(elem) ) { + + // We eschew Sizzle here for performance reasons: http://jsperf.com/getall-vs-sizzle/2 + destElements = getAll( clone ); + srcElements = getAll( elem ); + + // Fix all IE cloning issues + for ( i = 0; (node = srcElements[i]) != null; ++i ) { + // Ensure that the destination node is not null; Fixes #9587 + if ( destElements[i] ) { + fixCloneNodeIssues( node, destElements[i] ); + } + } + } + + // Copy the events from the original to the clone + if ( dataAndEvents ) { + if ( deepDataAndEvents ) { + srcElements = srcElements || getAll( elem ); + destElements = destElements || getAll( clone ); + + for ( i = 0; (node = srcElements[i]) != null; i++ ) { + cloneCopyEvent( node, destElements[i] ); + } + } else { + cloneCopyEvent( elem, clone ); + } + } + + // Preserve script evaluation history + destElements = getAll( clone, "script" ); + if ( destElements.length > 0 ) { + setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); + } + + destElements = srcElements = node = null; + + // Return the cloned set + return clone; + }, + + buildFragment: function( elems, context, scripts, selection ) { + var j, elem, contains, + tmp, tag, tbody, wrap, + l = elems.length, + + // Ensure a safe fragment + safe = createSafeFragment( context ), + + nodes = [], + i = 0; + + for ( ; i < l; i++ ) { + elem = elems[ i ]; + + if ( elem || elem === 0 ) { + + // Add nodes directly + if ( jQuery.type( elem ) === "object" ) { + jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); + + // Convert non-html into a text node + } else if ( !rhtml.test( elem ) ) { + nodes.push( context.createTextNode( elem ) ); + + // Convert html into DOM nodes + } else { + tmp = tmp || safe.appendChild( context.createElement("div") ); + + // Deserialize a standard representation + tag = (rtagName.exec( elem ) || [ "", "" ])[ 1 ].toLowerCase(); + wrap = wrapMap[ tag ] || wrapMap._default; + + tmp.innerHTML = wrap[1] + elem.replace( rxhtmlTag, "<$1>" ) + wrap[2]; + + // Descend through wrappers to the right content + j = wrap[0]; + while ( j-- ) { + tmp = tmp.lastChild; + } + + // Manually add leading whitespace removed by IE + if ( !support.leadingWhitespace && rleadingWhitespace.test( elem ) ) { + nodes.push( context.createTextNode( rleadingWhitespace.exec( elem )[0] ) ); + } + + // Remove IE's autoinserted from table fragments + if ( !support.tbody ) { + + // String was a , *may* have spurious + elem = tag === "table" && !rtbody.test( elem ) ? + tmp.firstChild : + + // String was a bare or + wrap[1] === "
" && !rtbody.test( elem ) ? + tmp : + 0; + + j = elem && elem.childNodes.length; + while ( j-- ) { + if ( jQuery.nodeName( (tbody = elem.childNodes[j]), "tbody" ) && !tbody.childNodes.length ) { + elem.removeChild( tbody ); + } + } + } + + jQuery.merge( nodes, tmp.childNodes ); + + // Fix #12392 for WebKit and IE > 9 + tmp.textContent = ""; + + // Fix #12392 for oldIE + while ( tmp.firstChild ) { + tmp.removeChild( tmp.firstChild ); + } + + // Remember the top-level container for proper cleanup + tmp = safe.lastChild; + } + } + } + + // Fix #11356: Clear elements from fragment + if ( tmp ) { + safe.removeChild( tmp ); + } + + // Reset defaultChecked for any radios and checkboxes + // about to be appended to the DOM in IE 6/7 (#8060) + if ( !support.appendChecked ) { + jQuery.grep( getAll( nodes, "input" ), fixDefaultChecked ); + } + + i = 0; + while ( (elem = nodes[ i++ ]) ) { + + // #4087 - If origin and destination elements are the same, and this is + // that element, do not do anything + if ( selection && jQuery.inArray( elem, selection ) !== -1 ) { + continue; + } + + contains = jQuery.contains( elem.ownerDocument, elem ); + + // Append to fragment + tmp = getAll( safe.appendChild( elem ), "script" ); + + // Preserve script evaluation history + if ( contains ) { + setGlobalEval( tmp ); + } + + // Capture executables + if ( scripts ) { + j = 0; + while ( (elem = tmp[ j++ ]) ) { + if ( rscriptType.test( elem.type || "" ) ) { + scripts.push( elem ); + } + } + } + } + + tmp = null; + + return safe; + }, + + cleanData: function( elems, /* internal */ acceptData ) { + var elem, type, id, data, + i = 0, + internalKey = jQuery.expando, + cache = jQuery.cache, + deleteExpando = support.deleteExpando, + special = jQuery.event.special; + + for ( ; (elem = elems[i]) != null; i++ ) { + if ( acceptData || jQuery.acceptData( elem ) ) { + + id = elem[ internalKey ]; + data = id && cache[ id ]; + + if ( data ) { + if ( data.events ) { + for ( type in data.events ) { + if ( special[ type ] ) { + jQuery.event.remove( elem, type ); + + // This is a shortcut to avoid jQuery.event.remove's overhead + } else { + jQuery.removeEvent( elem, type, data.handle ); + } + } + } + + // Remove cache only if it was not already removed by jQuery.event.remove + if ( cache[ id ] ) { + + delete cache[ id ]; + + // IE does not allow us to delete expando properties from nodes, + // nor does it have a removeAttribute function on Document nodes; + // we must handle all of these cases + if ( deleteExpando ) { + delete elem[ internalKey ]; + + } else if ( typeof elem.removeAttribute !== strundefined ) { + elem.removeAttribute( internalKey ); + + } else { + elem[ internalKey ] = null; + } + + deletedIds.push( id ); + } + } + } + } + } +}); + +jQuery.fn.extend({ + text: function( value ) { + return access( this, function( value ) { + return value === undefined ? + jQuery.text( this ) : + this.empty().append( ( this[0] && this[0].ownerDocument || document ).createTextNode( value ) ); + }, null, value, arguments.length ); + }, + + append: function() { + return this.domManip( arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.appendChild( elem ); + } + }); + }, + + prepend: function() { + return this.domManip( arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.insertBefore( elem, target.firstChild ); + } + }); + }, + + before: function() { + return this.domManip( arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this ); + } + }); + }, + + after: function() { + return this.domManip( arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this.nextSibling ); + } + }); + }, + + remove: function( selector, keepData /* Internal Use Only */ ) { + var elem, + elems = selector ? jQuery.filter( selector, this ) : this, + i = 0; + + for ( ; (elem = elems[i]) != null; i++ ) { + + if ( !keepData && elem.nodeType === 1 ) { + jQuery.cleanData( getAll( elem ) ); + } + + if ( elem.parentNode ) { + if ( keepData && jQuery.contains( elem.ownerDocument, elem ) ) { + setGlobalEval( getAll( elem, "script" ) ); + } + elem.parentNode.removeChild( elem ); + } + } + + return this; + }, + + empty: function() { + var elem, + i = 0; + + for ( ; (elem = this[i]) != null; i++ ) { + // Remove element nodes and prevent memory leaks + if ( elem.nodeType === 1 ) { + jQuery.cleanData( getAll( elem, false ) ); + } + + // Remove any remaining nodes + while ( elem.firstChild ) { + elem.removeChild( elem.firstChild ); + } + + // If this is a select, ensure that it displays empty (#12336) + // Support: IE<9 + if ( elem.options && jQuery.nodeName( elem, "select" ) ) { + elem.options.length = 0; + } + } + + return this; + }, + + clone: function( dataAndEvents, deepDataAndEvents ) { + dataAndEvents = dataAndEvents == null ? false : dataAndEvents; + deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; + + return this.map(function() { + return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); + }); + }, + + html: function( value ) { + return access( this, function( value ) { + var elem = this[ 0 ] || {}, + i = 0, + l = this.length; + + if ( value === undefined ) { + return elem.nodeType === 1 ? + elem.innerHTML.replace( rinlinejQuery, "" ) : + undefined; + } + + // See if we can take a shortcut and just use innerHTML + if ( typeof value === "string" && !rnoInnerhtml.test( value ) && + ( support.htmlSerialize || !rnoshimcache.test( value ) ) && + ( support.leadingWhitespace || !rleadingWhitespace.test( value ) ) && + !wrapMap[ (rtagName.exec( value ) || [ "", "" ])[ 1 ].toLowerCase() ] ) { + + value = value.replace( rxhtmlTag, "<$1>" ); + + try { + for (; i < l; i++ ) { + // Remove element nodes and prevent memory leaks + elem = this[i] || {}; + if ( elem.nodeType === 1 ) { + jQuery.cleanData( getAll( elem, false ) ); + elem.innerHTML = value; + } + } + + elem = 0; + + // If using innerHTML throws an exception, use the fallback method + } catch(e) {} + } + + if ( elem ) { + this.empty().append( value ); + } + }, null, value, arguments.length ); + }, + + replaceWith: function() { + var arg = arguments[ 0 ]; + + // Make the changes, replacing each context element with the new content + this.domManip( arguments, function( elem ) { + arg = this.parentNode; + + jQuery.cleanData( getAll( this ) ); + + if ( arg ) { + arg.replaceChild( elem, this ); + } + }); + + // Force removal if there was no new content (e.g., from empty arguments) + return arg && (arg.length || arg.nodeType) ? this : this.remove(); + }, + + detach: function( selector ) { + return this.remove( selector, true ); + }, + + domManip: function( args, callback ) { + + // Flatten any nested arrays + args = concat.apply( [], args ); + + var first, node, hasScripts, + scripts, doc, fragment, + i = 0, + l = this.length, + set = this, + iNoClone = l - 1, + value = args[0], + isFunction = jQuery.isFunction( value ); + + // We can't cloneNode fragments that contain checked, in WebKit + if ( isFunction || + ( l > 1 && typeof value === "string" && + !support.checkClone && rchecked.test( value ) ) ) { + return this.each(function( index ) { + var self = set.eq( index ); + if ( isFunction ) { + args[0] = value.call( this, index, self.html() ); + } + self.domManip( args, callback ); + }); + } + + if ( l ) { + fragment = jQuery.buildFragment( args, this[ 0 ].ownerDocument, false, this ); + first = fragment.firstChild; + + if ( fragment.childNodes.length === 1 ) { + fragment = first; + } + + if ( first ) { + scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); + hasScripts = scripts.length; + + // Use the original fragment for the last item instead of the first because it can end up + // being emptied incorrectly in certain situations (#8070). + for ( ; i < l; i++ ) { + node = fragment; + + if ( i !== iNoClone ) { + node = jQuery.clone( node, true, true ); + + // Keep references to cloned scripts for later restoration + if ( hasScripts ) { + jQuery.merge( scripts, getAll( node, "script" ) ); + } + } + + callback.call( this[i], node, i ); + } + + if ( hasScripts ) { + doc = scripts[ scripts.length - 1 ].ownerDocument; + + // Reenable scripts + jQuery.map( scripts, restoreScript ); + + // Evaluate executable scripts on first document insertion + for ( i = 0; i < hasScripts; i++ ) { + node = scripts[ i ]; + if ( rscriptType.test( node.type || "" ) && + !jQuery._data( node, "globalEval" ) && jQuery.contains( doc, node ) ) { + + if ( node.src ) { + // Optional AJAX dependency, but won't run scripts if not present + if ( jQuery._evalUrl ) { + jQuery._evalUrl( node.src ); + } + } else { + jQuery.globalEval( ( node.text || node.textContent || node.innerHTML || "" ).replace( rcleanScript, "" ) ); + } + } + } + } + + // Fix #11809: Avoid leaking memory + fragment = first = null; + } + } + + return this; + } +}); + +jQuery.each({ + appendTo: "append", + prependTo: "prepend", + insertBefore: "before", + insertAfter: "after", + replaceAll: "replaceWith" +}, function( name, original ) { + jQuery.fn[ name ] = function( selector ) { + var elems, + i = 0, + ret = [], + insert = jQuery( selector ), + last = insert.length - 1; + + for ( ; i <= last; i++ ) { + elems = i === last ? this : this.clone(true); + jQuery( insert[i] )[ original ]( elems ); + + // Modern browsers can apply jQuery collections as arrays, but oldIE needs a .get() + push.apply( ret, elems.get() ); + } + + return this.pushStack( ret ); + }; +}); + + +var iframe, + elemdisplay = {}; + +/** + * Retrieve the actual display of a element + * @param {String} name nodeName of the element + * @param {Object} doc Document object + */ +// Called only from within defaultDisplay +function actualDisplay( name, doc ) { + var elem = jQuery( doc.createElement( name ) ).appendTo( doc.body ), + + // getDefaultComputedStyle might be reliably used only on attached element + display = window.getDefaultComputedStyle ? + + // Use of this method is a temporary fix (more like optmization) until something better comes along, + // since it was removed from specification and supported only in FF + window.getDefaultComputedStyle( elem[ 0 ] ).display : jQuery.css( elem[ 0 ], "display" ); + + // We don't have any data stored on the element, + // so use "detach" method as fast way to get rid of the element + elem.detach(); + + return display; +} + +/** + * Try to determine the default display value of an element + * @param {String} nodeName + */ +function defaultDisplay( nodeName ) { + var doc = document, + display = elemdisplay[ nodeName ]; + + if ( !display ) { + display = actualDisplay( nodeName, doc ); + + // If the simple way fails, read from inside an iframe + if ( display === "none" || !display ) { + + // Use the already-created iframe if possible + iframe = (iframe || jQuery( " diff --git a/docs/development/workflow/this_project.inc b/docs/development/workflow/this_project.inc new file mode 100644 index 0000000..453ebb4 --- /dev/null +++ b/docs/development/workflow/this_project.inc @@ -0,0 +1,2 @@ +.. _`Astropy GitHub`: http://github.com/astropy/astropy + diff --git a/docs/development/workflow/virtual_pythons.rst b/docs/development/workflow/virtual_pythons.rst new file mode 100644 index 0000000..66070cc --- /dev/null +++ b/docs/development/workflow/virtual_pythons.rst @@ -0,0 +1,189 @@ +:orphan: + +.. include:: links.inc +.. _virtual_envs: + +=========================== +Python virtual environments +=========================== + +If you plan to do regular work on astropy you should do your development in +a python virtual environment. Conceptually a virtual environment is a +duplicate of the python environment you normally work in with as many (or as +few) of the packages from your normal environment included in that virtual +environment. It is sandboxed from your normal python environment in the sense +that packages installed in the virtual environment do not affect your normal +environment in any way. + +.. note:: + "Normal python environment" means whatever python you are using when you + log in. + +There are two options for using virtual environments; the choice of method is +dictated by the python distribution you use: + +* If you use the anaconda python distribution you must use `conda`_ to make + and manage your virtual environments. +* If you use any other distribution you use `virtualenvwrapper`_; you *can not* + use `conda`_. As the name suggests, `virtualenvwrapper`_ is a wrapper around + `virtualenv`_. + +In both cases you will go through the same basic steps; the commands to +accomplish each step are given for both `conda`_ and `virtualenvwrapper`_: + +* :ref:`setup_for_env` +* :ref:`list_env` +* :ref:`create_env` +* :ref:`activate_env` +* :ref:`deactivate_env` +* :ref:`delete_env` + +.. note:: + + You **cannot** use `virtualenvwrapper`_ or `virtualenv`_ within anadoncda. + + `virtualenvwrapper`_ works with bash and bash-like shells; see + :ref:`using-virtualenv` for alternatives. + +.. _setup_for_env: + + +Set up for virtual environments +------------------------------- + +* `virtualenvwrapper`_: + + + First, install `virtualenvwrapper`_, which will also install `virtualenv`_, + with ``pip install virtualenvwrapper``. + + From the `documentation for virtualenvwrapper`_, you also need to:: + + export WORKON_HOME=$HOME/.virtualenvs + export PROJECT_HOME=$HOME/ + source /usr/local/bin/virtualenvwrapper.sh + +* `conda`_: No setup is necessary beyond installing the anaconda python + distribution. + +.. _list_env: + +List virtual environments +------------------------- + +You do not need to list the virtual environments you have created before using +them...but sooner or later you will forget what environments you have defined +and this is the easy way to find out. + +* `virtualenvwrapper`_: ``workon`` + + If this displays nothing you have no virtual environments + + If this displays ``workon: command not found`` then you haven't done + the :ref:`setup_for_env`; do that. + + For more detailed information about installed environments use + ``lsvirtualenv``. +* `conda`_: ``conda info -e`` + + you will always have at least one environment, called ``root`` + + your active environment is indicated by a ``*`` + +.. _create_env: + +Create a new virtual environment +-------------------------------- + +This needs to be done once for each virtual environment you want. There is one +important choice you need to make when you create a virtual environment: +which, if any, of the packages installed in your normal python environment do +you want in your virtual environment? + +Including them in your virtual environment doesn't take much extra space--they +are linked into the virtual environment instead of being copied. Within the +virtual environment you can install new versions of packages like Numpy or +Astropy that override the versions installed in your normal python environment. + +The easiest way to get started is to include in your virtual environment the +packages installed in your your normal python environment; the instructions +below do that. + +In everything that follows, ``ENV`` represents the name you give your virtual +environment. + +**The name you choose cannot have spaces in it.** + +* `virtualenvwrapper`_: + + Make an environment called ``ENV`` with all of the packages in your normal + python environment:: + + ``mkvirtualenv --system-site-packages ENV`` + + + Omit the option ``--system-site-packages`` to create an environment + without the python packages installed in your normal python environment. + + Environments created with `virtualenvwrapper`_ always include `pip`_ + and `setuptools `_ so that you + can install packages within the virtual environment. + + More details and examples are in the + `virtualenvwrapper command documentation`_. +* `conda`_: + + Make an environment called ``ENV`` with all of the packages in your main + anaconda environment:: + + ``conda create -n ENV anaconda`` + + + More details, and examples that start with none of the packages from + your normal python environment, are in the + `documentation for the conda command`_ and the + `blog post announcing anaconda environments`_. + +.. _activate_env: + +Activate a virtual environment +------------------------------ + +To use a new virtual environment you may need to activate it; +`virtualenvwrapper`_ will try to automatically activate your new environment +when you create it. Activation does two things (either of which you could do +manually, though it would be inconvenient): + +* Put the ``bin`` directory for the virtual environment at the front of your + ``$PATH``. +* Add the name of the virtual environment to your command prompt. If you have + successfully switched to a new environment called ``ENV`` your prompt should + look something like this: ``(ENV)[~] $`` + +The commands below allow you to switch between virtual environments in +addition to activating new ones. + +* `virtualenvwrapper`_: Activate the environment ``ENV`` with:: + + workon ENV + +* ` conda`: Activiate the environment ``ENV`` with:: + + source activate ENV + + +.. _deactivate_env: + +Deactivate a virtual environment +-------------------------------- + +At some point you may want to go back to your normal python environment. Do +that with: + +* `virtualenvwrapper`_: ``deactivate`` + + Note that in ``virtualenvwrapper 4.1.1`` the output of + ``mkvirtualenv`` says you should use ``source deactivate``; that does + not seem to actually work. +* `conda`_: ``source deactivate`` + +.. _delete_env: + +Delete a virtual environment +---------------------------- + +In both `virtualenvwrapper`_ and `conda`_ you can simply delete the directory in +which the ``ENV`` is located; both also provide commands to make that a bit easier. + +* `virtualenvwrapper`_: ``rmvirtualenv ENV`` +* `conda`_: ``conda remove --all -n ENV`` + +.. _documentation for virtualenvwrapper: http://virtualenvwrapper.readthedocs.org/en/latest/install.html +.. _virtualenvwrapper command documentation: http://virtualenvwrapper.readthedocs.org/en/latest/command_ref.html +.. _documentation for the conda command: http://docs.continuum.io/conda/examples/create.html +.. _blog post announcing anaconda environments: http://www.continuum.io/blog/conda + diff --git a/docs/development/workflow/virtualenv_detail.rst b/docs/development/workflow/virtualenv_detail.rst new file mode 100644 index 0000000..3884e81 --- /dev/null +++ b/docs/development/workflow/virtualenv_detail.rst @@ -0,0 +1,164 @@ +:orphan: + +.. _using-virtualenv: + +Using virtualenv +================ + +`virtualenv`_ is a tool for creating and activating isolated Python +environments that allow installing and experimenting with Python packages +without disrupting your production Python environment. When using commands +such as ``python setup.py develop``, for example, it is strongly recommended to +do so within a virtualenv. This is generally preferable to installing a +development version of Astropy into your system site-packages and having to +keep track of whether or not your environment is in a "known good" +configuration for production/science use. + +Using a virtualenv is also a good way to try out new versions of software that +you're not actively doing development work on without disrupting your normal +production environment. + +We won't provide a full tutorial on using virtualenv here |emdash| the +virtualenv documentation linked to above is a better place to start. But here +is a quick overview on how to set up a virtualenv for Astropy development with +your default Python version: + +#. Install virtualenv:: + + $ pip install virtualenv + + or:: + + $ easy_install virtualenv + + or (on Debian/Ubuntu):: + + $ sudo apt-get install python-virtualenv + + etc. + +#. (Recommended) Create a root directory for all your virtualenvs under a path + you have write access to. For example:: + + $ mkdir ~/.virtualenvs + +#. Create the Astropy virtualenv:: + + $ virtualenv --distribute --system-site-packages ~/.virtualenvs/astropy-dev + + The ``--system-site-packages`` option inherits all packages already + installed in your system site-packages directory; this frees you from having + to reinstall packages like Numpy and Scipy in the virtualenv. However, if + you would like your virtualenv to use a development version of Numpy, for + example, you can still install Numpy into the virtualenv and it will take + precedence over the version installed in site-packages. + +#. Activate the virtualenv:: + + $ source ~/.virtualenvs/astropy-dev/bin/activate + + or if you're using a csh-variant:: + + $ source ~/.virtualenvs/astropy-dev/bin/activate.csh + + virtualenv works on Windows too |emdash| see the documentation for details. + +#. If the virtualenv successfully activated its name should appear in your + shell prompt:: + + (astropy-dev) $ + + The virtualenv can be disabled at any time by entering:: + + (astropy-dev) $ deactivate + +#. Now as long as the virtualenv is activated, packages you install with + ``pip``, ``easy_install``, or by manually running ``python setup.py + install`` will automatically install into your virtualenv instead of the + system site-packages. Consider installing Astropy in develop mode into the + virtualenv as described :ref:`activate_development_astropy`. + +Using virtualenv with IPython +----------------------------- + +.. note:: + + As of IPython 0.13 this functionality is built into IPython and these steps + are not necessary for IPython to recognize that it's running with a + virtualenv enabled. + +Each virtualenv has its own ``bin/``, and as IPython is written in pure Python +one can always install IPython directly into a virtualenv. However, if you +would rather not have to install IPython every time you create a virtualenv, it +also suffices to make IPython virtualenv-aware. + +1. Check to see if you already have an IPython profile in + ``~/.ipython/profile_default/``; if not, create one:: + + $ ipython profile create + +2. Edit ``~/.ipython/profile_default/ipython_config.py`` and add the + following to the end:: + + import os + + execfile(os.path.join(os.environ['HOME'], '.ipython', 'virtualenv.py')) + +3. Finally, create the ``~/.ipython/virtualenv.py`` module:: + + import site + from os import environ + from os.path import join + from sys import version_info + + if 'VIRTUAL_ENV' in environ: + virtual_env = join(environ.get('VIRTUAL_ENV'), + 'lib', + 'python%d.%d' % version_info[:2], + 'site-packages') + site.addsitedir(virtual_env) + print 'VIRTUAL_ENV ->', virtual_env + del virtual_env + del site, environ, join, version_info + +Now IPython will import all packages from your virtualenv where applicable. + +.. note:: + + This is not magic. If you switch to a virtualenv that uses a different + Python version from your main IPython installation this won't help you + |emdash| instead use the appropriate IPython installation for the Python + version in question. + +virtualenvwrapper +----------------- + +`virtualenvwrapper`_ is a set of enhancements to virtualenv mostly +implemented through simple shell scripts and aliases. It automatically +organizes all your virtualenvs under a single directory (as suggested +above). To create a new virtualenv you can just use the ``'mkvirtualenv +'`` command and it will automatically create a new virtualenv of +that name in the default location. + +To activate a virtualenv with virtualenvwrapper you don't need to think +about the environment's location of the filesystem or which activate script +to run. Simply run ``'workon '``. You can also list all +virtualenvs with ``lsvirtualenv``. That just scratches the surface of the +goodies included with virtualenvwrapper. + +The one caveat is that it does not support csh-like shells. For csh-like +shells there exists `virtualenvwrapper-csh`_, which implements most of the +virtualenvwrapper functionality and is otherwise compatible with the original. +There also exists `virtualenvwrapper-win`_, which ports virtualenvwrapper to +Windows batch scripts. + +venv +---- + +virtualenv is so commonly used in the Python development community that its +functionality was finally added to the standard library in Python 3.3 under +the name `venv`_. venv has not gained wide use yet and is not explicitly +supported by tools like virtualenvwrapper, but it is expected to see wider +adoption in the future. + +.. include:: links.inc diff --git a/docs/development/workflow/worked_example_switch_branch.png b/docs/development/workflow/worked_example_switch_branch.png new file mode 100644 index 0000000000000000000000000000000000000000..8df761e88beb0543286dba316b9a8480a18ac861 GIT binary patch literal 34439 zcmd41V|ylD(|{YBGqESQGO;GMZQFKo#kOrvY}+;`wrv|b&;9QGANGg-P`#>GqtHj4 zRbg^6qKI%fa3CNch~i>G3Lqe$j^76c4Al3T!~ho$5D++ib3s8laX~>MIR{%4b1P#I z5HZB8WEUmH6^xMSPS(zuAH+mt_c-pbM6;Y?=n(Q@;=XcV!GQp0AxIH4R0K`v@eT~}JsJrC= zj*DMDam1{Gq);Gm!V4^1-T=%4utdfBL2Sk^h_U-53dUdtMvPq8yb9vIfoiMA;zO$T z(bN>celCvs6n4j-?8tVx&2+Mz{78sgtyHpY(|k{8<4z^Pi#TNqx zs>FWjolsY)>H7wRXnIWFYTIOCP%?GvG_$yiO1z5;^~FQ8m+qA<0&m3+?h*Gll}`M?E&@4x z#QC|EBWR8#E!8pWTaLf&Sa&e{Q`-3Dn+j(UCa)ifI69ND@Ig^=k)+VX1OigZw9&^C z%GX{GyZPj3KXqnK59UqKv9kPeB5YuoS$hzdpIR)f5nQ0p!j2CX3pq5lca|lZnUgMv zR}i~gi#YU%EjNfD2{@?@uREC6nI;MY+(=i4S!G6kNVCH-fb^JWb z-!90o4C)+UVFxnr=RN}=4U zoF=e{f~x`JCdeW;rmjJ^z0t5a} zL=6V_Mo1u5jT991mt`Dr0a&SUdMr`_ZflfSoc%sHR7kFVDi+2N_Fjm3o@Px-E%>tjIVT>{a1Zt_ zo}G6KmKLK8a}`80@=|2G?^!?M4&cVMgK`t?${)Gc@!G)iR~MWQo)5uR4B=2Wc`U33 zgrz_7uVf-pD^evC9GD}Rk3iJEKk|MhvOwZs6t@8_1B%+{W&RFvJb+9Bns_iNrUEHN zT7#dGxCOB`fE&>HD`AwwNYnvKoli%$iZq8vmkc?cS1$2408Lf`Pgr0@hFzkS&yCMb zuuuW9#AaTtEVV65L$*`MOTvpzG=VAQE@f}DGNC-qGU+VEgG@GYDa9p4mo|r7_m_{v zXVH5Ln0zl^ucBXmjY_V_PD3AekhpMqUXnqmLApVhLBKBapgdVeykC6t57|<_lbTtw z!e2N7bA@iREe+BQ9IM8w^!TEz1#fc+CxiEd$DQ}E#$?8t2fq*S4sZ_Y#}uIY==GkN z6tD{;6C&FqWA6SOwodO&ud!5{>wI-kB)w4DPZf)*ttKmfvsSaBi-4>oof@5Pwft2sk9V5x2Dfn&kWJ3UIc$eZ%J-= zZnu86e*TUjNm*R^KgxWKe9j>@!zTOi8=gH)KuuVdc)WPjsI=np{HBs#+jPS;g5{wZ z;7>Vo`*2>(y1IIW$l_o2ey$PEe7B4jDtPV)wTLW?4GdlC3u;B0GipM03Ux>|>xR>s z*Z$RkbW3eRui;>eJ;VB`qK(7G%2Jvw=k5zH&UEdX4UBe~_UT8YN8&fY8v`_Vh-*kE z($g}~`FDGu*8=L<@lMOsP2?Ulxd(aeoOtn8miiRM6yj7qJObu5<~$9S!$13-7CeVI zS|i#>U01u@dF8dFJ&m*13F)P%Bj*9@fsJMEm5-J;Dr8CIIMImV#Nvb_oN0KS(3EP` zIMwRr_$Pxai>qQD4xTWcD4w5P&RykRbkF)9U?0w}8+S|BN3Tn7vJh+#6|l!}1rQ<- zD$qRejIci-ogn&qL3`W%3F&5aNwwxNzy#g}orAbwnGv+nUmdY%ssB^}&fQv+UAa0&6ZP`<)`_LUw;Ro&~h zx*R|(CA5dU#UBk(9Yx(}KO4WtL8JuM1^yFC8PEOw zP0XiYD*abFX&%_{+%P9VrXOBIrOm`m-bvU0C!b!hZOnY(Qu${d-@iDi-;U}g?Wxh9 zQV(L0q#0xZ>hSdzb(^N5#yMm16R3yx`|0~OaPxm1X=~;cRFdN zG?A5%v5b_R~}5<=G`{8tDbkM=xf^RA)Ch-#WXOM)OAW!TZk?cpB9|@;l!jL zcAk1PUu3NguPt?NS^La^Mu87P^AMonb@+@t$yrwNTMz25?^N_ppXHslb({L=`UqW! z-Na3c28k99_r*9ZaLzBWmarzW^hL!F)$Z93+kR|62zvZ1Kapd~Z+(Vf`^A2bwa4h*~6Q&ra4o_clLiyt3{*v8(?`-m8r6g7i9i)qbPfd~EYNi2lmL1&@Gw zy`=5(_|jSDKJnW8_y=8ztiwCu&h}yUVR*imM-C@TnO(+P`LXc4WK!CBfBtt#Z@NCz zG$y!3knSt#rCPQha=LVyN~A&LRJ2lbI`SoQ!@c%?@OnHlWo|F1o8q(SDd}H%jVi{z_sPSvov0b&5|zm*TqNcBctQ}IT!P*qT=w)@-E&-zmMEE!~5a5lXKZ;*`%B( zUKdZ;_ghzhwaMYuQ{dejBZs;-2nZjDxDdaRE9kj4v?toa<0nh|NhivWS)yP3?$Ds- z3RZuY`D;46!x5FP)w5qKLS-S*;a>+H(N0+0<2F5xAz)5AmnIj#*y}k*P3mEB6<~HY zw``qFY`C8?R<5$yn<*mqi8A^4h=@>7V6+xs*t8aA;ddaxz(D31?=E42n+V=|d|6f-YP+%hI4fa+Bm%Vpfj4;VqNXwy$t@qIV*4ntj`7dls z<5E=0g|)Qs7@3%IRmv>`;gS77NVa!({by$61_lOT_#nVk@xj7_tz>Eem*iF)PrQL5 zfqkPZH{mKein+oP25=&xyn&jA1XY0X(FF3i_;@f;QPC15%HL?=7CXBlL_Ho2$3PCd z{^`KYMoU%DaK7y_fxrCeFI&YJ#H5rE(K3`;Cx{Vh!LipU=TW-K0TXE>>SqSTs+37T z(%=QU%Qoi#=*e5!JkJ~30o~gWX{;3c#CHI`zx+lzd9;BF=m?!UOAvVrypn|yeiQx$ zIIs`y+?kn~1-z7&NB$$0e85Q$`m}nL%k6F7_xE>80%Xvhg%G}gk<9WnH5@l@3s_G< zu5<+c#g^0=$TU{$0ad4=cLGO}>VIZcr|vL2)!Gm^Qio8Ty|nj09B?>BiNY2h{!2M_ zWyFF}S*qT@Js$QV-HTNnwB$b$*a)YlrUaO}@jx`)aMUT{rd7+@hT=W;<(~1kAZ3AN z*vQ&u#G1S8&?%vJV89qCLD$qZ{KH?_&7rdrqe@90i$1vn$J2R=_xHQ$_~8I#o={rt zHYLdLa+L_*cbn@MiMD>K>Fon}>Hsju9Kuin>8ILN>V*#1cYh@rIi+@-gD7uXTSQ*V zn2_M0om~B2i<`TfP+*7v$0pIWy~|@3^uaL%3QFDT(HEgM0&M0mk-t)jRo(7D5(z4a zny>dQHJ=^m=it`m1gHF=mxC1D>w%xLVs5l*H6ZmSlXaY5v$G0#yxw>puMY_vnin$5 zVyNR|V|M!^sP|V}x{MUe3QfcrCPF;{iP(%fvpamcag%Cx31~@COkwCs86dzbv$yV! zlZP`)8uom?fHdBmf_o7zF6xFwSK^*PzJSq8sVL`GB%(}d@xbEtK=PcLom>>teeFpu z#QSj;!O8+|^A;BDlfBL{T_i9b& zSXJ=1ecb(8H9&hDS_{xG6vD_Gxi)4Y6XkBk{QA%`Ff}g(pWuBgG+EHi{+dI2~;;g~3Tz;hXs4O%bF@hY>Smqi- zG<)rNrm^P7wYA%LU0Ny#Gz=k&8$YMFFtyK^=8Li*%WcbgWMIk$dOstG2+1ehYRZL< zGbK>mHXy2AzWup1dr}5>gsArNzf2wUp$@6?wUX4oD}E>J{%ksN;fC` z)^OR)FuYXLpxVa+tY^F;dJI*fLfN1T+EqwBPKCRi-;?1uRak0&^H7SuWCv<9!i<3v z7_(v1sPh^;-h4VV#DB18ZX85M3#q2#lh4(M*&=4AX&L2l5b!>Ro+??GD;~!gznr zb?^d@ao0)vjV`aZF@tA~YzB+c+HvnBR2o{fjIDMTI;1(m$c<1`k^=0YbD!&Nesh7b zsVq+Wahhf^7SHI6s+n;l>HxOsSZIK9~n5%6qLTB2X@h{bx9 z6ft-2=FV_9T3AjlR%l-fNj849**1r?-MH=k9=LyaDDk2eO08{ChqSi_dA7ac10kUJ zWN2(f?712FF1&RN8? z{(CqvU-NoSwtH7i1GQuAp)Xd`wy=ii{NqrQnC!Hh?*~tIGZG!Q8`FB-)6Ott1WJYm z;DE{f-e{eJ6pVUK^=_%x_TD;lVl&dsQtA0Q;snN}?kcOk%Fu8NS23PCRLgJ8fD5Ix zq-R=a3Q{SxwgC%M!q-L}vpiJmB(mA|DZo;L;Ipw>fY?^h1{BFW|JtA!&PwcezrLc& z4-ugjqPj}fb!D98%=qCiod>rL1O9!ux=E>vvVl*vvEhLSwH8WHlC~F;_+8B-CgMI= z*f1Q@)ZVbD@t(^_m`I$%NV>_Fp)49cioOC7^CR)8d7)cm#7c8$R#U-Zt4zsXjoDp2 zZ4J%V2BqPePGoEUXeQ~Tj{CH6{+HXcCy*YTnr5w}KO03@nsygL=SK*Vg{FAJsK3|FpN}-{lB>`A0{p+50lh+RqgWo|Ev(q24H^q4+Bq9Q( zQlaMOan*@=d3iZ|3(mC?)-NF;QQ*1SXh|M5q~2tO4TXdky4mRwFv%-*{vDDre?-zD z_kw-zc7WYqpPX!2-Mr=|`*n471yH?hd#mco(HFI~Aoy^+yw9=c=H~D>t2RyBul>la zp@na1PMq)W4!l@DRSaG^yT$YkZSegwJIiZK_^B1}c?Tzs>r>g^EbCYi`tIl+29jeA z6_@4r+opv_f=Eb%P8ucI6p@TCIo=J2kmId7OJ!i^VLpJA7??K{*_~B*9>+MIu}qY& zed9@?>rx8w#e>s}m6{AbpPukJWs*ZLC&f8cw`PGjaWLQ*{%FCt35^pAf6QfDeO zxLiKA$dPA4(sXIMhn|_g2M>z9=ov5r-d_K%rBi2alcF25NmnZJrc6j!Z@{QVc1CAd zugRpiL6y1_fAmzrnA49h(+WPTi2m)R*Gv$2IJe$f$r##nZIU$C(xs!LqYy+!Mivqh zQnEN-EWDl>4k7ebt9yZlfY3Xh$&D|XFq*oI6D8o+(<7p!q@=gs6$%Ut6kFQd+_ZCa zEMK&L=>bRf{aC@+%oT)2!@!W(@fK?84@OJ?wAOfK2Tlr@(-=LP#23}xW8Y0K8O1Q|`qns_9J2IBhKPk!&t#l4pEN6AzOcp16*Glj_f z8hkm%TdnC97SCtTeQXjDKj$5RaD5BB^{uN1yt<+%nK$9uh^}5rX!q*t8ObK}CWw|3 zw)Fro8GFzKd`BOcN}tK_A8Exq3Z->@s$zR@XoYEiB-6jYBz>&f=^*At@%Kzf3xq=O z?B-SFU^7tCY_fFWPj#D@NU9y+yMh$1Q7DYE8?s`Gb9{|Nu=y?Jy3SLPCA5EzT&EBd zLF}K51GO!tMc_r8Do~(%nt?MeG!+f_jEaybi}bh`K&7Yu@zgv$5C$=ov8czvuM6}Z zOhOa|Mk1fAH@1_$))_8!)q4Rj)Jakt6R&8Pf$UP5)+94}jI7J->4SBy9&^jM(~MU| zEmNbTxzYK$1r^4(6_t1ZuS-(ndW81xyiCNvAWy6bP~%CJTIrfJi5ZNbqWP=f*$TSn zx=xRVi79E$u8U-dm_3@=XhR@A!15YLA}WChb~Jvn>`YgDcXJcBzcbOw(30qQ?Idch z@B=9EGvarD|2E%Kt2DwyprJvX>T zWmwC;>!PWpfGs8Qkwe5ZL4VeG8OP|#bVymh^PR<1{!gCnr$3u-?`KXJES|D z{&sk9u3|35t6qeW#=9OR&k;LBVo9lSbaEQpA5A!;P7>{-|K~tX$_Y@rL`lMz#Wt;z zCiXY-U_%Zt>N6h3FHer%tJ@p$tK9}xTsZFad7rxu5ocg7FHY@G%Ur%UNRdM>-{eo{ zLl*zewj$g1Sj0H+nDVcF`g~?Yb;GuCH|NpmtjC;H;SGY`JMzHV#r7wX zrevYj@hyCN+`V{W-CJtqE?^T9ytY$}%{CuY;L25>%u0ab!`<6??3YC8y$-@UsPrEx?CLe`&jg! z*LHyK`KF?dp-{y?*z3f*2>2^rnW_NAQ`ChwOh%9lTjU3fZAC2~#8SGswDSBd>ZmbJ ziJctvFoZ_wxC*nUZy_39jH!1!2@H1yD+3N$8{c68j)~I>8T-(&&zj3GBg@2h!M*QK zIo6<%?tB~+-*IbkG7i`Bz{=j$#A?f)o!P`Nwaz{&G?jiG`%AE3+NH^9>7WJtcDdFt z9yUqin9ziMB`#B!sUspQ#OUa#!us9O^xi<& z&v$6du5s9np$*C{|0D1N~+1SMsIcyw_eK@QgPrz!vi}m~OKTbDHZ8A^YV9 zEt11=2gt~5si4&EojQ{dKcWsJS;>}~oOdmUPGU12W+60nv+KOC_jt+G(b)}SYfBfX zx-{kbcr%rXJ(deFtzdW+kD2uNA;wU`*fC;HGtKb9J;{wC)Kbds>U83Ea%TNjN1~in zB{WOY)=(=Y!h`PW{AoX+Bh~|`o9oU{c=+oP=$^3$W#B25)ScNol5I^h!Cq&(z`k=2 zc>-9M`6;O^VZVR^k*>@{H<#EmISQT*WHqv^zkcL3`3DoU6su)dq9BM&?_64etc!^~ zt-Q%PEKKbJmaaXW&COsqgn)yLuWdfmGQWynfnU$Q_x)pvb=aBqY^oh!?aX9-yAeZE z$2Gl^e!AQ!jGk)0>e7r>xtLhxS?juL*+Zhp#v5sJS^T8ov^t>Y8P|pNoJ0hT39kis zE+($9Kac66HeDqhHdVX@(Dk`Yt}TB|wl527+*>yz37w{YEI;j8bksaMN^N|RcCEA{ zU3J#TjHPf2@_L%1jOGn~JT(iR?*gqp4?Nbpy&=0?#r;fq*L|hk?mcLZ_b+Hn7H!bl| z_g*2yy>YaYf;${spgvc%L%Vf=m5mIF5Kg@nCESPV`SUOMDFGBfwYKXKK7_;bh2$$<0dL_O=P`yW_n29-sf9OjUO<~b@sh#kA#3cZ zomx0s{ulD|Uco4whv`x={ZUW8GWN24QjT`0E#R^o!*Md5c{UUYz_o%VPvbv0d4Do@ zGb>3a&a)o(_)nB@rSthmjDWtsNnT=@GQj20lnF6IpDpr=ooB0lBLg;NuGUSWw>yxmypy z-L56NZ*H4!2lNsO=Tr@h9NEWp#*RNZBU0e^7+pYn3^b6eb%bPcHjE?cgPYqd)sdy{ z_GFId4+>nR`FmDVNi0q*i``rbQA(qn#rcQ%p|rg-<^1JE*{IQ=@DHe{m;S8Tb+tRW zf12jM4+^z0yk9s7cs1;b>zk=;>?w+Cmy70i;;e5z!0}@p&)IZd%F$q%8I^ZWZsJ@Wu6YUFp)2Wp&HxmL+t#{N@bQ)Cl zx6+b)YuA{yn(I_^&(>AB@LA6}Tos$&=8v_96)%kXb%y?(i2z#RKVS4=x4qG$=~*3_ zlo`6uvG_kZ2&Jy2#;izzt`ViMT1c71}5AZdMq16!KNGrF>g^ zBQ7x7h)KyDj~JK{EU40`!)>us{8Dd?h%W!i_`k#9DLy|5sbNc@6m=rI_x{amy0-(BM_cWV_mIb7hXK1_agC|7A^YYgQTZ3h=N=rCfNc zX4?A8^TGwz70&Hsu8Xx>r8r!*|CKsEGXlnbbAh35 zEX?!Dt6nOkMlaeHN3fJV@6}v$1kME@v-d%w=c%@WG`eX9<|dXXpnZm=kC9 z^P#`FG$6jg;9Yt?@p*O1l-g4b)tloFhHpUbCrSJlZn(j;pAK6(dM51r`fO<2Wp{lR z)eI%AJ$6I0+Lk?7#zxJPTfjnZ3O|BtSCGxGrx0a(%Ba=xQNdogh3_xfuNfl7?#{QL zYTib)iP87?CZ{B=4o)OisZ~EMM`pfj8*@))*~;e3D0Py&>lPDKJz-yIE3Vhy zTwLozZ10vBIPy0p0zH2ko^IhSXV@HRo%_>tYcJlfMkvJFJSlK~8T*Q|*<8H-4hEL6 z^Q3;2aNRL5G1WV~y9iDQMiEtwk<~`2WbETho=qLe2MHhh z9`+dDY7k(PLB3L}o+h;MR|mJoK3&a7340?)Dx!ys?3&pxZB-uF2>+jAn6K1~XEHMF z)~!aD>9^Wie64a^E1ABdvGI?iIO^NR2lZ%Ec)T6*?=}StL^>R3TB`AG6VEvAR1NQ4 z^1({7RAu-?tG|}Hb20?2?j6o=uJQ!rw0ZWb%%RuxtDv9%Jx){*$j`cSquWD|uEW85 zSL@$8uCmd?rT{0N@j%+*W}fDBB)PO$<9HLW6!FaI4p$61p~1BV26}{4glINZ+XqMj zFl5PFlqm?#8U%`zY~8?s~P)e?^X_tC}C=)>gRIq5HK;s)*6JRkCDTqo=d0 zCgSHDV94+=5pi{8x|Epyz?Au~LDOIa;fld%dED`fb6O{b8TdkNd2p*9%7k*)xoS4P}2ME=PhS=1|>P#t`Y{`JAz29Tlxj{6?F^O|W4CF&v3}bsk=29h+eDk-H@s#iTttp3%+;e$ zo=reR#(FiA0UB63!ZE|vm$ueSsrlQq@g}bQZ2jbr-W}-QQ((|H1fgI2l82xKlYN5d zm6Oy~7CSCWx3R5F|3)&yGQ}Gdi)AJ`Amjk{JdQ8#zNtMgdYU|h>$yEU2)=Cacmp0p zpU_dG;F+q<&Q0?^zipMK#wprcTgB@+(tOV}b)Z)R{89`|1X9Ga!m?T?(r0k9CL$st z{pKFkLMnDkhD|^XTc{`Z)zORuA5I2?ah3OKS(-b0NO_ ze?VUDew2Q_8TLX-fAAVhLRkK*_@lv*T`&So)yg#-WC~mi;jMU)>`FQ7hYM@8jGbI( zWTLGglNqap=vCNktNO_@IXLq#pFzLR3%DLGFIc=iO@i%2R*F$1qZOSi7_u8zc(bn? z`19>?XJk~eSe3jgaklebRHE^{!gk-4u3Eea$)d?aJy>$oogM5Vn-k$_{R!ZF(SY1M zP9a!;3iAgI6mUB*AQU!|R?*+rhk}n!gYP-%Ep>|*vbQ*_jbzK5o>s`?&myX2N&mb_ zU@Eoc3%&J;e|;4*iX6kQ_v-CgXZDjP+nbIn#j0;auUOI~ABfzS+U141yqL6$vHjsQ zCDgGcW%cF-b}CV+l=g~qf6uO*(&50B)muB|nu68;_QNk91Zn=TVbz(#P zoZ{88(2fwtM>N9+v79-M!`|-e3v!*Dm^{@V&~<#GcPA+$6OG2o$XK_TfLZA_QHOAL zCNeis$z&t``nY?fm!+1~ z0!k;76HWKYVm=a*(iw3ww>9)BRoJjplB3Auo$-Mh9|gyDI5p^#jL!#INF}iz%vu~n zUg-l?B$X32siN_f8^x=?shL2Z*;m}%O<;9idw@j!iE-7FN-84f(E@x0LReBJ$X=UCd#RWG+oic*3J#s&2aTNb<0M%+@J9USWBO^1c1 zm(GXv>w>4u&z&B2sM)72{U;8_#y}RFY}Rr+(Y(Ia+ph8*S6WNx7dJo_y8b` zuj~%ez@=_*pcdrqbk)#jzUa-FEcxGGj$@_5miBBaQ7FUI6`S?YXYt{&4wUk=1V_76 ze~IewpSE$A9U4$J@wsAwK$=a@ONs#u$Kphavd?SEcjsSNUH8{9RT)7sivV#@WrvJi z>{14G!VF?*+U5Jna@WU{IXil^-JX9RYOgQDYPH=cD^MhG$Ub4io1YlsZaP?VbWq}N zOS=alo5FeN035k@nztvz71kU{V+9k1G{UN4OeX6HY4sQkWffRvV`-6{FyYkab@I^3 zhoNnLn;$x|TMvY#To0{VH-Efd5?$O;rloKKgyIjPh7XAo>i*y?EO)#9y8lG(rRY^M zr46&EVmD|<5}!X#B(F#r<4n##E;J=RoXn2edK}(0h(mQsQC*nsJ5bu8dWR%%>P9v; zHb#h>?;X8Dl!z!>*^zVoy6x~g z3fG>r5iQN$qmbSuhUmWYOuyslwETIy(_>o%i{jScW|IG#((WL2m;`0acE(I{D6wJ1 zIqf1RWPa%cI#ID~Psdy_!-}fSt7l_v7(M8O$e^qhXuo0U?s?#7)W~S1K|9A*wU}=)c)ru!-DV;a6Jv|0CvVal z^ugS*yTs|}{^WVTI?UJ!L0qnP!@PaO$p32LwDVrjso1)R$~PiNr7vOv23wW7;!uax zff*P$i-3nqjOW_f#o2^7J&KB(!?taH5#ps6`0A94*!!aeetL2AscAqpM&yt7ziU!- zr-_AV5enZ(=p!5hn6cg>+vN*dMZ>*5z+LhJiU|(TH6Q|^e{o(KdbXOyv6GWE6WaYz znS>6_V?-n?oM{;|ru?gA^`mz+=9n}|NH!JP#@_NPc>H|1pSIs$F-p79I~ z>6py`&fUiqMwDf$mzO2|77O@AeNPBAaHSyxCOHIvNyj{#(ka$+@S2jCc*kbY%t#rL+b8xo8Jo=Ki zuqXpg;;<7|WEB?|OP)}fr805K7+$>r$V!f;nFS9c;lbTX9feYI6*3pT+Td+HyAitS zK0&5s-jr_d?uu*kGzcSGc-7iif4}BCs{L*g$>4ap2X*;duZfN-AEv;C#7v+bCM_C2 zL)(_H|8&>TTzh%Nd)*74qqos!TmWcS`H_m|H#dTLST6B0`H9)}3eBU{MR2F%rSnQ@ z*Y)834duudQZQPV(lQ_gp^&Pye5Yya9xt@QLbM_?yFGQu&NS7!U#2bU#_XT9xU-mE zH(1o*vcDb(mz%7?Ke-R&rLs8C?IDfDwBAs%3y@anSdadGJoGcvB#dSsD>}na-6Odk z?||CPXzV$I25X%1Nn`nQq>sJbC6QlGL0@ez4j-irGA(N8X!0t8n^SaaA=`J|f9-1w zOS5Ese?)IxS0VTqot>-$jpIy*u@EHV zlM|Y7;sdeP4XMR#sM*yE!E!@GNS9ZBOk$-Nxq~t>6x%xh$orgowj&^si&~_jxg)gN z=m--0ROuUVL$sM*iyXc=y_W#(KKns1;T^&OJNk#oc#{$Vv547-de%O8@k!yA3a@2X zDvcAcF3NL-3Y;&zWk0jjD#5XLJIq!x1%AsFXSypPl;6v%TQNC!{c+`S# zz?yJJyUcL2tNOayXOd;%W^aq0V*p$GDlJBH?F>bB^*Qk^d+2>5=$O@KH4ga7_WlzMpg*%bHk_UFM{^N;Gm4 z?@h1bEBv%=&#m<+3;{g>_!@=%IfEL1tln9vUkmPwjC?T>!nxDxu&i``Ta)0-tCFJ?Vs<$rvPfYuZ zaQ$hOQHcA4yUXWq{p9WZJb7*cD7vX%k1P7iiLitJ90aarQCS&3D4%BQ8x zO9GO_`juI;3xqmV2f+C!LH}}c%aX;`MQ2;zbdI-qBndWmcZmc~gja*an#zH5parpr zV>iI#WL%;Iw`xp1Bt_o98|L?de5d&Z7BO|<7;3hR%sib&xI$DZ_#j8tW*J^9#JDSx z1bX8uWgbW<3*C@3gvPc@F6G_x)pW zC(?I`$!F}6{N_NUBockem<_%MItu0mUD`R|qMV?)A1?DedgFXA>xl0Qou~L{*TDIb zRH>Uz&z_+!Su&b8LrL!_q#9S!c5Zx_^fX?#~yXNO%rh)`nD&U=nKI5;;@rtqulu}t` z%m;g1bV=F$JWod+vrA>0n( zK&S4nN_0Yi`R4EJM!QRdBu-AZ5#qIxm4-eiQ`MZO5&xm{E-Mkn>Ui-cGo1U1DrHlB zX>oSHpZ@a&N(BU4eu0+>Ga`bkr1-)Gy+!z|zffCEU6F-23wLH(g{(H+P!mLhRnhgj zX6NXcyNP&-?;_nnS2T1x7B%R=y`+D03DDm!UXP*jxx4F*F#~>TSyncesE(WQJ(sUd zPgnm#)Qed5@ryE*OPDX>$1okOxL8z{A_ozAT9y@; zOgG+fkW8tjF!Mj;3w$jbu#7K!m=2259)m6PiSHd*g9BO2NI52lO5(7=R0L68JS6!&vN2Hj z=~RX!|3PfM9^>q>Ku(J+@9i;_3L30Sh+CHr2s^n!f{V z<=P#Dz4Yd?X?taJ9H-fnh=8*@_586i1=4-yJ3T?@4^t3-p5!BMB)~(S}2^ge||HFLWGq50j--U-8 zI@8X7`}QpO@@Z*lbq;ozOi(|Fh>-q|#eHj`g85HW=Y^07q5mg0{vsObU@;2p{tqy< zgrlGsd82bv|0?@WEZ<9n-z((1gh09h@z2x8=~O{M5vgQ zk!4^&nqaEmT880!oeY0QB(!Vamj{eSQ zu6tljP8oSxaV(dWTAtzFnBYKZN_peF{ zvFy3RT(suKa!6ogQ#raENdH9k_LH_d{6-3~FR2RTZEAXTAr1qV@q_!f5l_^8(Z- zCr7_<>ozM7b0hMROppWge@<39zvjkv5b!tg4rE+ZsrE!6mU>7nON&D77ymBjZ1Tdq(r~Sa3>@R*%q-QDCFC#><~$M@JJAyY|eA36RU~tET0E|DF4reHITnuQ**i= zjT!Y@`o3{`%0#2N+Rc{i%?ffxoaqMWU+IA#a2ZcqTNc5MzR>g4y4@H=K`8wH;nro5 zziFc#FrvuW1Et&%cJy5x=|YgEta?mASzIl+t;P%f15y9^^!MmInBO6pqXzxZKkAuF zaEb*X&e|7WplzTmvyVN=fAJ0hmh!#baGE70nZ!m1y4#azf6Q>KnXKUlEQ!BiUo3$t zx@dwU9`DrKj}S()>t;%qrV~fKCB2xqkV^hgU2i;Xshjup-Lg5 z*pJrBp#nLdSe$>g@ozLtV!JZCYP7WZ?}*YTlC;-@q=y%dUDkd#SdRHyl=7>e>R7GH zPCp(pgjOuoSCR?t(4d%sL8PlP6~mdGVA)?y+A&`CN5caYC3{!0MuZ@SqE4l9O>iYw zushzW-PkH{!cVF@!b5_oYMejM)PQ+dt9hiEo%;4wP*^<_iGcHvTPzhyPi$qKTZz`q|&w4$KzRCm_L zK>@nR>rAg4xGP#A?S}Uan>A5?IXXwK2c#|PS<9W^jCF}7B@a(IC^Iy)lO+5iGQ13! z-Kp;clg5j$1^^#F%z-b$!Aswb6q&=wvejBdsHn40UZ0JijN|%BbmzUWF z@X#S!MsCh{$a?rPA6r2y`e3OGSp+#WP%As8`v*4myp|JTgnU-{6RT17ro@3!+T3iP z{y%VrIv36ibLbbXo~BMrIcoD9G}by+kw;59*hB{-CZx>q(!^e-lAj_-_Wg%jG2-6p zqN)baHhnW#vWbO+Wz2+Ut1A;OYA!8mqKTJ{Y+aXhc(hE|ZCq;uGnew6da+VOfsK$z zS@R*5W0Eg8BU*}2j)C1OUmn9Idlee9C9&#|tJt06eb@CGmZ9;o>h#nWZ#0d2kd?o3d26cml zuD#R)G!z=S&tOOjU7lbD+WAylpi{Mv9H7K0uIhYvQg6Ty>;?sZ)16=w{Inap{MI zF|CO7^SfY466tA5Ul~xqb{aUe$+EYs0reR)=3BTDZzvXCr;*On?}QS#c~Ys^#1DZE z5zW6ox0pzOf?nNt!O%HZ((dt<@!GEOLQFnC@q^vas=aGpVG|9v!OgqbrtSQzR^R3B zAG~hvi0ZPgFjtJqVMGbXpf)KUK9Jxm9bHajVR6s<_oN(DmfBPs;O4vH3 zqt<$lLixNpMuy6wux?LM&=U8{I``9yjPa*TenSX_gWi)SLtcG|vIG%(Z=y3>p|xZ# z!wy`<*BSL#M0uOtL4#@Yy9i=9Nn0dVkj=et0G4ec-BvIjWieM7_-Y6?>Sj6Ns2s$S zg6x24;>@FG)$u$lLh+B=9fD2d3om>-XGzb>ml6-;%0(F5Bx}pjrTY!#e1ZLc<`dS( zqZgt_zm?t_@lMQ-#fJ)S;0PH(mqKd#b|S3YvtO_i ziyTE0Kz3D$?1E5h_One;2ZJQ+6D6GbeeFN4mzqQD_pYljj584OFS7u9IZx<&`9XvR zTLOwTvfV!0In9B5NfY*E2qo69yg^5BhkJ8hlS4Kd^pXozOkj3kpBJ7x7*3d%_^7sV zKK-;-L`6X33iBf%Ya#oyx?;vE$>noVO4qB8cPY|L z3am<|e(^+)Oe{(k;j*+t1kIB=jF{tVmB}fH>=kk57h1vXJsV}IL@45>>?M{G%~i*< z3;`0n-S~erRX6zkqG6zrr(1xJJ+t5>Y}&l8Psz%w@;nerGNm2F{C{Zr#=yF|uG`p+ zZQITX8{3VI#!edBwrwYk*{HE?+cxj%^WN|0IUCD!tvSY+bEA|XfwrrWF3+J@Sgs-| zCFE5wrcN#`Iw_GQ3&NcCkD+w=+@_N83wr#dXmn_`2lGz}5V}ifGdAmd+p?oOnz*^36Vs}nxq%| zmjm}4k!)>8*i$Y)sJ_0+kd9>TS|yytb~1xg*aiIqGi#vc{WsdMYaJa)1Lc_TvGE#g z%S=uuyJyAqEagejU#6nh&R5ilY3Fv{k0DgW6U~hp$4mxeA>7Z8*x`t3Gwx{CJp|x` zTOrJrYC;Q??zdf`i*B!5kc4O|j~D~kzxV167x_R=#Z8L-ap|z9mH58r|4WJI z;2?eZ>B*4vNx6*A$_N!*IFcbT{_H-VZF(ATc|!HHecgsmURuvHP!!$Xssvy)wARL> z^Ts$Hxn#t7qba7O6TvN3RTo&CT}-caF*(`vQ(1E*$^7NN8#5OzX|0oZPY57~-V{H?69BpQ!8)L7u^>Sj*43P0>hXunt{gJr8 zMW5d8nh}tW5N+U!qJ;vvRwNekzG;z2;Md zb5#9zV4v)nZbg_Auh3-GVI53m9Roc#yHEjL9MK&9Z!ns(dIJIt>PO>+HyZ%6s?S}z zWmYFf;qB7|tu+391;3KETK}l-IQiYMY8*sOjwEhO;uI zZD=MW6tVu6OF{=pvY0b|=Yhkweu%RFw`@Yl^75i_WI3b9hUo+ayIB`L8>Eg`C`xX- z+88ywESz9hCjwpXw-vmzoO?A@`Ir5uH(-Q$uPuj;+qKZGxunxYw(L4hlqMs!+#?y5(N1l)cUqpE}Xx1&s( zv!~r^qihoHAS$pLzqSG-Kg^qoU!6X-jm1}yZs>ZsV`Z|h^0^Vs-e^8i8)Be73{m1t?7DEJJ2BNtWXZ81MZ&^c>`Dz+Hrxhuz$rtk@o1^Ui3QN z+m;5P(L{}NjPpca;4b{!T<+sA^O3e>As~ptCfW-yr5d|=9R!LGAiFqJympx_t8$kx=tYn>Wp@N?yX_2`#5 zjUg~A+^RnL!#9^vcVe{I1o$%~(|AZRmyy1LZpJF}`lE^YKW4b#S~B3YWIhB|t0fEB zgw2@y3`~grhpY&FARFiz;_+mvrD>-PXG=1u*H3i%Xs%#%NR7M)ns(gGz3x%d(S-y` z!*Y?zx@s}4Dd|H@-SbOIN%^IyA*#+%V?Xsdnnz1>^B&~s+S9g+BlO^i;F144GFmKn z7-pjTOQqs25;5rdKCmZr&AOa05-gd;nNu2_f+2d?No%YkwDtUe%<~UPV1NcCZ0hss zi)fALdetN|Bc-w}MVuZ?ZIBxATIpp+pQ|sozYX#CFiCC~cGF#k{UJrlsuDgNF65E{ z7uD`S%x8YpI9JL@%2A0dC6x}Nf-f8z!$n*j7J761+viOnFMK%5Fd2l>9Y3re$D7iD z5YS&}ngQX?sLBnTuz3%pLhFXOO-7_HHPlXSD#$!X8$~8H&c8&=wdb_TW2C{OsE`Wy zEQqRk<&*clnK3-p*?s6~=Yuu=7;=Rmo`Qp4Sm$Qw21Jcp2Q|rwrR&?VP8`RTTPQ2x zz{8rf^;Vtc#d#M|?@Ih{>4f`@6Wp#$nxx`XOX0BgPr)-n5!l>s`*l7W4|8{eUjjpo zT$~CA79PykI6*RY`5>l!;0Z)T?DzZK`PdC%CZ{QpwdH>n(sK}Y#s3`^-IE#RvBRbR zzi|3xVZW+nxuXF5NaTY;k(OV!Fz`qsN12p`eKTk$={UpV4E-l_M>|)CVKadqdy`5_ zWDFGFB+RcyI=*gTa?)Nwyv*Tcf5_t(2*0Bopw2abfbc&2%LRZUdWieKY2zo!!9~0>t zDL}Ai$7`WX8ke-i+}e4Sj`g!AQ!|lFPcARw9Ad1v^A)zgq8-sb`E1-kpVvC-K@?5A zDRart!c81Ice@88<%SQhZ6-6Qg5VQoa~1>h(9-sMjF?Li#`phn3T&TBRV#dQ(yjHC%HEh`k^<& zl}(gbp#Lkp0$BLSdvKe+;kFBb!`E|WozEN~OKwD1#}=~s^MKgmaGKk&V9^D+KZ4uO(zhe>tx$j;(0NOd<;0jptwek5VYvN;4Li*oMfaXuxln(9YNkT#fe-kc6O-m!@;11acP);4G6TiINLByiM zN{(A}ocn?9Tczix z?fsBEGm0FsM$}q+tEclntG^f~lxTSvQ*#O9>>{k~i$r=)YM~qxn#!9l$e!>^Pfd@~ zhP*dY=Oo$#x2`~xd}Z{j77}iR>+Uru!nyCCXtO+jH8P}8rrsIB z6eBr-voFz4)s9Fy{XtjXiksy|EbF4`lj6CPVF*=Pg4y$_3?;2-q$znPSJzOk=#T$& zldbJ2zl$%1jzoe;zrC; z=pEz_MP6rw_|mQ;%;pX08CXLS{Cp<+v^!vY1A2k)aT~1iw7@Qr#T;h zK+ME!YFIKjUox6KV^=@(?&^hnvDOQO2Yo8JQ5M<~o7NR)_SfINug})zB98a;@mfgI zqli01N`HU<#%;|A;yYYBVAd}f9bFjq@(I4>txy2Fg?CC1=e50aK;ZYlgmwB>RrHLd+qqjRp7jRVSL?%B|No(V*k6d`Dke**& zq&T123v_9tDsn}0dYHr>*T@z>h?GbgUUoP|_d1;N1HnSZ$nii$y@ZQN|I+|N zfHnUcR&^fQ?ZZ=;dXGF66W;riP0$fotT^W#u&m*tS%wn46zlaEqNU4{5s`V8ipdf~ zMnr0MK<~l=&c*tM`G*skOF~MaoRHrIE{y(vtArSNduCJa=+bA~FiT-PsJ$o%flAus zXVG_=?1InlB=$%M;?fQ@m^E86l$cbw8NkCdn*1d(1Kk`MRV@hPBs=fDDk|qWPvn>9 zd0h#b>OTGdjH%HcrHy@>L*S>6#=3q)T{F+4J+AHU+ZFt0d`o}b|5l3WLkv!sa_oY`cSz4P_l=6dV{k8SG{ z+{S2C<++yobP1Gg^F5&PjTp8fQoH+al&?>)32wuR>w07wj}xiMAK#6ZhTk50(v|EO z+^xH8?^ch>`^r2md~Zyhx5LbC#CTRmlf8J|wh6Zcd?y^VU-A%KX?Fa~9=EDdB|1~p zueg;{l_>m=O6zJb198FCV^V|YTJ`=}+LC7B5;^!@nh+kcu5JwJl_bBZ&O{2>z|9^_bh*c4|+vZol%m62sH^4sy~Z zNWRd`3k_4o!LFbUjpqTQvbRQw#Z?Q`_$@wT#>384@D8j$;aF-}p-PE?e?lL(rv{E3 z#yr`k>8?lxJT7gr$PPM=r@_|WcD;%o(}nwa;X>8CqolrA<5|F)e2pn>U!D+xq%D4u z#n1NFW`$6N4?|D75nXk!5}kFW`G39LpewidkfOXW)cW|_{oIf^nCgrAF%lio!q+8P z%H_>te|V(EkQlbkTY%&t;hsnT#;0b>9ldV@PZ!SD*Mh(DBGzakLP$%UxUYLgPWlvc z8r!S8nlU%uwC_-;CSpQ~Y!2l9COUIsXGjVNj8-KSX@POik4M9H(wXU%%?(&NkZUrK9y#eL=q?PJjxkO zUmvg)bp$u(l}^mRtS{&kq~q~+O8@@OoYwU8%V^diFHM!=x!r}+T&Z%QV{yFwdwgQr z-!+hHs1Ra-rezac&O+U3k5C%ug@U^P{Shj`8Nf~o@88oDC{&o>uG1dwoB@`z%oOMWRuKTf&1F{fT-)(W%uZg~G#!24Yp^-Id3*$u!a+CWC`6y% zWU%V!wtCN;Vr5DaI)_;<*d!SR{kSi27XX60pgYFJ2s%U|r$?U&gz(xZX;ijq4kC2@jZ(Fczo&)<9&oP!HU33>qQ){_V$on`iea0U^3fvI%@cL40K`@Fl~mMHqcnyPtBO`-$1nWSxOUb zQ)sf@(I=0YhoK`Pd0;HJki7X6c7ZB#;l;Emx3|eA}uy({ZsV)()6IW{{BfZWS^SS9rgB3MUcm>bL~yeF>%0 zpg;V+TA>So4U+ORxUJzUp`z>>m_kI;i6tMrdMwAFqA4-phlvD&yFDB_bjn^buXqnh z0TGU=A9(1b@SJ*#89-av>R66xFZ!b6hH>Q&mCA1c1Gp3~c3k;3nWEuFyU>*%GdF%_ zm^&{)&@5)Jx@jW$P+zdmFX8!Nv_3@OSp1<}LJy+@M`Q(H7Wf2V}6_P2l0)w}N`AKRrB`%Cwvpq(H0xV7DFJ zYQwB=rhIypQTalrb!BR-lg7k94~q)CX@omu%~~JyhjSB)W=G@MstoI|R;lZ@|K|_z z!@wjK@l%I$e-rPaIK||9bJN$69M2;O_u!s;zLU>?u&KwT4;cpPGA#dLHkL#-W0h&6 z{K(nu5Y2qOL84&(YJNgi7uD8~vEr)#pH7YiA~kPy|3#1eWBkRoGgum~sWEmeC4{zB zcsWT$iS0Rv?Zt+@W*$Y!57J6Jv;)uyw?m}(ztfD)MhBi&`|BnMAfWeQA7V{s(`S1{%@w%l|PjiGTMXk?MdM0jHAZmQ7 zDY&kvtaxU)rKRa;6dR=oLIrw>KsGhHM>C@-M_(fh4eR#ix&J$Ae(AYhfMD#tWTe#% zm^M|+sPl!5*4kvwmJ0Ky@2wWjkL+zB1bXXT?PGPCQ?Lss%rms!f0z`}Kw)|1qmt|Sdoo&Uqa z@EGh+_&w-%eV^fZqkcF!*kU5}?+tGIm2GpW~(PGiTSl9E5aL-Wg z+kv3+lKk=E3JMr`eR)F{zy4B_{)0k*oPwVp9sUpT;|QlXFM((XN8#JYfn`q?c&KFF z0_csX8}N!(6z$}6vHaT}_&?e_d1cjRXep#UGIOb9EeIc0JGhQE~(#a`r% z7#bV?)o^Cvc`M8s*6KBj&4yodI{^x{-Dw>nt->IiM7AppQCT)45SD z=)d_Wv~4LW6j&t;j-JycI-jlzN)`3mEit{J^8HPGno&p4>pMXal||&Bxkd7Vn%Ln{ z^Uv*4Ycf4-tQftE_55kw{zUy7y|n66@Ssz&e*newD+;x^A&bqoC4oHUvyzFBad}96 z*Lv=>X1cg{EsQY>_Nq|co}uVpZ!}}Q6+k4Jlt>rSZtef)3Ty=+Q!gd&^_P5;q;lYO zh7&i5Jv){TB&c*HDQyHhDP1&_Xb0CRY*)MVo~{_~9DC4%Dcu3sG4jWT`bZA}3gO!U zGxFC%=6KJThwNs)ee>l^I~o%|zTwhaoP_x=^>NtqOD!(+M zy~9mmj=k6hf8F~T14XCU=n?fvff}vbR`e_OP@|mu%VMdep*Nj!SyFc4|;&t8)l4Dz!lqbkc@>)E7F&~xcDyqbdc4OtZ zztD_k3asv??h3*?p@<)x^GKng>5$zFkEdg!|MXz69y5`CTAX#fhnE?6y?}H(OhlRq zJmk`7Z$H(_c|mmY>W?_n2EOP;)aAHG~cGd&u`2C)SEMMhtLgW=*)+TsrPc zY(|{05eiFQ`gpz8*2YJ|h-6^vkjgykvv6;i^ z`M!K1>-s?CcD+N^Q)e_+!O+l1)KL|}|Ll4vZ>MA24t+W#_3y?!9nPIQ&|7Cfaqq?( zBI%D9KC7_Oy-ms?)`TTXaT#p%+)i)=aq?XarMHAgC0N-0w5T;$1^ROmf6)6 z^e+aK7R5X`!`*a4YhKLjTnhV-xbfl~8AV%rYqG)Md&BHQ-2z?y59guGIVwM{ACc+l z5oTL&ctZ*Py<86iw=cX7RR*X~w1~#~>0pIJ-v;Xzm=-gB`iV^_SZ^{UzrB))P;4D- zRrIhieFRLHEnq7U(257`6l)^olX;W$cXFakbv8p}8ZR7UX?Mt$y#H{!79`q*BFbam z1C!rR%fNfequyUqT58`9U-@3&9@8V6u#jAoeRJ*Z<^3B$vwZz%M?Ms<{hKeU-OH6k zPK%f^^@8j>5UR6*049hWyJH#%PsD9~I#>IVG3v;`E zumgIB@!j;}Q$>@4trbh%#E0yIhC{gQ z>}aB#f6;y@zwq{rS;q>#y@fs>mPt8_ElDsx7hJS2QYfHUyh)I2dZwF#tRncp*&&p8 zjlIu23}b2BzI4|G*{;0b4y&ZdrIcIW&>^>`{M#+*6>@(T0C1J``oSsWCgU$nOXpI0 z%W(Zx^gn$))xow@lmB!#%C*}yCe=44rOejDnW7<^2n9QcqBww}<3K8XkutX0u`%v} z?S_B_6f#ps9+z>~(||D|)Vm<7QbUd|YH8&;E6ncuPRZEb;U=UBuaaN1b#I;hxnexZ zdE2=6Jj!<6@@^q(>;tJLAriC|46)@OF@3?vm>_MEek+DRd zrGM+%XQ^n}(HSI{c8zE-&FTdQLsz8InE9a81#b~cLbEY>2WLF?VDd44s){kTwFN?;{s}+6I9fUvLmz^u5KG51jy`yu3 z1=|!livub-Q?j>Qi1#vIKjxYx>U8d97~$$nAl2nN;BCaDFOj?LF6d=-E71$Ko+aYt z35l@)hXyL8+2OpkHM&QM0Q8|7+imq zuhHbPfKA|oMr#J$4Q$_A)MZWJ!okA>Gr5u~;4x4>oG_=$jw-3BfEUhabg;*#P#=ax zZ%P2gsOEsS)3Ty65GWh;-NIm4TdaEuF>Z==jG3F8CLiMt+M}%YwGy3sPielwC45<<>5EaUKW%hh_yeLLm34qwiP17-Zq=~RY!?>RylrYj ztcd=8HwT>k9vF6Ah{Y<+Kw!c`^QK+BB@c*j*B5Yb@MP2QM)r6C#qXgA*6A|jxHK^_ z3eKyEi~zz(83Hb+7P;2r)WyL0Zyr0FGt{xXdywA&aF?U5^NR}_1ZT$asFe2HPs*iY zMyPhOS1@Li#t<-|ly^Vhd10)$u*qtXs+#6ASb7~Qh?STzHvI2T zb`UVO*Li*F4$ae8oR)tt6Yn`fzK$^Ma&{t&LC=0`Hh1*G9{gr;` zhws0ZWsrk)6KzHs2Oq#h$bIkLBp^7yv%_m%8eT66@GK{zI*^!LubgTE*2NFy+6=-+ zvjIl?PHuE57!2IE_sj8hXA(9r%g5J`$nW*ey5SO7+%!81^%tae44(h6?7G*H3Bm<& z7n)ilL1lepcFd9;y;CXKZ#wE%Q^6c9-4HYm@iaLO?6qzRK=A61U6udpDZj63<(AcL6Sy!OW& z&vy}sZy|@07=lvsj}evHbH8Pc06R}0T*$o*w(RdDilhx1$lb|YnR9_d_Nn~ov(zt| zM|?U0u-{Ymj(Uk#4|JDkFQvI!B99}^lKcNUlvr|)te;t9cv9Aw@AdXhR z!@0dbY}uSl&x-_aYq*1&>mV%{`l9I#KxV(?Bdf1l(3s(W=&`J8z1dMR0wd1}uA>~3>p2(bL`G?Q>E zN)hi3+`1M1l*JTT=_PW*tkA4%0T3WR+S7eH(l0su-^rrjJR7yh6P%D z+Y8w|dr^;_ZV)KK7AdX+TdC|8?i!XlTVE{a#v|jd-W7zr(M)&S9VX;sO1ZLL{$8oc zj=jhRN+7J@X@Zjxo+aG9OsKc45<`FcuN#g+Jw z6Yy)W!R5=sWw;L6IYspEvi@Fw6niXC8(}7$ z{I$s1r}Mso;Ev>`!|HbZXgXdoZj2p2+^oe1jb1WifTygQ?su(woCR>AFp~Sd!%m6ZXGkak)3VKsEQU|D9qB7Cg9F*3* z!(WnYEhvi}P@XM>l_LA>;t38zG6OKu6FnI>buOXR08i_RR&1eI_dBQL^>OZ1*50VG zuUaPC@VZvbF3r^F7ijSHlycN5DJ&Rj%r*_gOybL|!;KX@Chu4HW-zrURNv{F@1&S5lR z%j<)H{k-uJiS7bp*#YqJwX*@JP7NwhrmW`|Um=H&N+#ezbxlZD$*BsfmxHcj1Jvp7 z1eHfK4<7sF+T&t%LS8lY>%R^7-#cqT$Tg0-7VLvC)d&WH_N^|$M!q&YA=0#ocv9>t z(}iY2o=v@-#9lF$n2AL+dw6x#Pt$6O_bL1GXp^|DW>&7e9tW_Qa`YCGy6 zK?KFF(@S;?YDB-kd#!svZD_Q*;pAx5qjh7d!o%y1VS4ay?^isC0QVF~JxkP5%Ojb6 z#&*RB!VbQ{_I1h7nHEap8EA>$&JRA;$aKoos_-rGtq9Z>X11)xVK?}xRUdczwpL_|mX)B6M5?p;CB`SZEZ<1>Rfysrj~_OOj{elqa}bTiR|C|Q3QS`0gmZuOly z<=y3K&B&#o@CeS0s%k$wOni5UFZ+J>818lzj(`xgv>n!JKR)?G;g4}TCruVWA0lmf z3X8{9T_1vd7q)XWrA*)HK_UVEzY~|TAShv3brmtP2y=SBGOxq@{Cp3!4h&Ra{^Rh- zNN?2z%fn^#`w{xOZj9C$f3H+yg52CGRyh5MPm*8*c$c7{Mid#` z$F7B+7r&?0Ay4J}`wy-W&zbuDKb!|e8CfjGHEiyI$^#yOwMP3J>F1!SsZL zJpYDFs#0TO`1Pi>t~Y+#5L9trGgn zsOQ5eWCSpZwwiCLyrQD7G}njradKv71kba_WS;J9&J@oVo9OJ=69!nBr^6r)OHvqQ zecv%c`s!R$+Yg&j=LWRTz2m8P*#m1BGPb|mH>E@Z!U{`_y zh?QzSWg16x-5P;sOlQ$w=YVh_-LiTVEG8xEq?MaY`LG6BI=W=j#e^|j8;b&_8n)8j zITY00wvX|T!=;`G4_MH;M=oNR7cn!3g};%=xNA_a#0+Xasbp1mnK!drM1M0BzJkkR zyG!EQD22|UhO8$3u&Z~iF*vB1o10(YI%(N`N|;RAVs2$CERJs>E9&jS^o%Hz4e~u< z2penBT_#z|aW7u*MU*Gn3NO{@-i+mJ9H*{mxi{rLI>fuCc45D*p|hKL^B?}Sm?XYk z#sLlY6F=7_#Kvb7Sd;Lvzd2!YkWcX_?hR_{kT)2LA>h_lif+rjK;CjgUqhZt5Y^XB zp=OkSq_bXbVKQ5NGNN?a%0ycn&P73HOaviy{JxF3ize^vpl36^lg(6rGk7~8-Blk! zQD3}_st!?SQ^U12?jk)sDklD5peA-HzxI5${?c{{MC1fNyAcF)Y1OvWQ53anS>@g! zPs8}lvb5;n2xuSoR`~Otp0?~ zSr7AZF8--~n8*8WJ6(kmw_hC-OC(qm$9*s&lF;#L$XBhACn6WK38L~?8$)w(+-bAc z>~brmuQ`R+?v%?3k?}2kEo7WVyoU{@3^t3rBAl1vV4o*@__e-gOY-{Ka0j#g^-0QA zy3&}3G3HMo@F?zTf28+;4=QDOnl|ekhjB^$jOV@<>pw^_Bx7nxTI%IXl>6cKagjB$ zzV$Qe;Q!_=I(}caG#LRp`)#=Xde#N7iw@m!Fw?YEO71AjbN|q zou%hvS1JV?1kImyFNd*-QS5>T0{Z(UcC$(tO=eroy+rrf)!=4XQ!OYGzuX}Goet~k z8i{v*p=TXXwIB?&_Kw74v<_GH#8x_9i?~3F$2coRM`D9=p~R6>R?3j3s?iNJriK8# z?#5kO56+~X_tRz5B3g_kPo2jB4c|UU8G_1c&g54We}6-gO$+A1L)@A8@`<$g_**?e zdo8`F$X8!klk~#dH9(+QAin}Sj?G6{`%`<&b6iOC@f!@5%t-cF<4A$S%(F>cQBCt8 zEYd?;)tJKvavp|KqbA;;zS?gwBB70HWwhK;yOv|LWPN&tu)?y+w7Y=}-DI`#?jIpg z_$a(n3xa9(&q1&&h=Tq)z832ND8vxt{wlKjlv6Zvi&na0ow(gL5St#paDyiCVtUc8 zEleV3ad%{nr(1bQy~7fOeimcw=BL;%fc4H>!gcLQDip`I5=7qYj3LIHrdk|)FM-Pvsuzo7GB3TjZAyz92o1BddRl6Hn?U` z|B+UkRT!_Q6Dx&s&;DdhaG)y89T$h1n17?w?B1vTY>p> zJdp|8%;c1-0*;qDCDll{shx^aXPE-b4OXO@$ z7;v_}U{J%l@jZYz*r1MA!=K+L@$NHBZ#U0uWvB}|B9|0=(-|}1*_7D9sc6sr`Jgd6Xx|LS)aty?S z*q$vK{y!o>Xi7>AuiVjI*t4syN_ zYF85Z+-HjoOsqjIE4x4Q-I)umw`{XeOnvr#IzWTY#e1jKH=||@8}O}CA!v6o4Kfwv z-&41f1)ed3T@n+AXBm+?y31QcK3Fzsi0sA@|4IN3J}L`~5#3>&Mq`<`ry#aEe+v}b z7cVLSsz6VV+0pKYp@Z}j&zELYXexmOouhfAeRCfEo}LF-A?wESWE(AX3zBYFI$pQ9 z_QE19x!I?y{$7ksK~jg`@gIPJ0p)gk1eS~dMTx|#0AfkH$+A3 zmtVCzha?UsTsffbzNtu9$I^czw>vm@z)qIz*Y5f9lcNql_csZr7~t5Kh#hvcsE0*I zHfq>wQw8B6951qfbWhRr2t2VkS028<+$DPebJ4j}`utS~L~i}sFVww45CwjRW1<~e zN(U7vh8>a9I0 z%;}o?)!)?N#;S<|M_uu!fTJ}XUJ!lxOOwu!N3;4t|3{>G5PZkh|o@~V~ADMnHJCnTzAcR(w zh#s2#-YaXpfLV+XQ%@oGsULUj5+;Nb-G|4dFH1he3-ZC|QuqTY=m0-`8B-T6djz@Cv>#Qt zT@oVf!g+)?k!$6Ut%LdEB+<=%&fYXRA_;k=nCTMH2j|KsK6GV?VsE~e@0{aluxHZw zg=%cQO_oN)CLOH~Eng*$d)NbCpBt<+I7|lmo1%F}k(h~IVE3=G@2aXv0iIVmIM2Hm z!Q73WSobt0)DVS_A+fm6t}qfE<~N1x8LH}Kmiv(oQ1Dh8XuUbZXJiep{pzhpH>MhI zBPz#prAXqk+ELkpzLBLvkE)ZouO{l%ts;1-+pArFlHcQ1^#A6NYC2g;t)y~i6IHx_f_!@6iP{l7iG43NH*>f`i zZLAXgoTz?3X#OOYC`e>EH;H<27Ra{j(gFy&=Fe6OTS zSB8t0AxtVI?C2L-Ja`5}fw2y)`o&44`I&R|`=+$ed87%7A>blw|5s^!f#9Il{UvQk zLpdBgC&5sW&(3nkD@1b|2jNZn8rn+|jfx^s<$4v3v zI!WELRa7qQ+)*qq(pw7MsfpX;(!Ss0PAX0n4EOL*5G8QRF? z&daw4vhUZqU2exOH0Bd(okKpy3CkwdX!s!~ygv2WAA(oOyUGb$$vrchH@6!@ul2(^2@Bd?;ZOdJA3JUN2KBNFt_Z!i+PWBRP@&q z@30_MW2S$n4~5t$@q39W{hc>0BAkd?`qfz0C$ghYBM> zcv3*E{q~wdT*pj(1~p$fk!@#V^fhl|uS=l3XXrQ)j?*OA%4^h|*oHj<`eiWEt?$rE z1#Zv~tZ$whazOspoEqa#L()7iOEB2gZGX$(lA(i}Trrtz<;8hF5FeVj}70 zE1=B+05!p_w5FP9K(4_ikuM+ z33heDTzTK1)US$e@5jdO)tfO)UQ`(Eq@*`}V{`VvU%dlA+<4$7Jf4Tvk7;OJ3Qw=D z0f33Ti#!`nL{CRVLfnfzzZ`*?{0t)+GSn8#vgC$vu=TDBC($>v*^QCif%~!l5fcF zwxshhKXR&`HHmvNp2(s$EKr=q0$nW3%zIq%uuCn?tXQy1nw3^rWe1U!$ zt)^OoIVZiYMZ*pwY3wSc-V$?%ZjzraZd>pH!@rD6g)C?!1mB3-|fdNN#Tq9x&MR2DofbK zg2r6Th|wdvxBC{<>@|!nyV5uEw0&M}lsG+_ zAS}+Jn){+wnB`(x!osvvpc+%_429lDwK0%r00#$0x~odUr5J(Ib82EcgTo34Kj`s7 zvtv2>z}NxEH|y1#$0<+|jV$tcsV%pw3`7SisasQq%hwG0Tk5QfO=^S49g8s&Tlx72 z1ydKx$;C*`Ua8K2pNL=xM@yI46GrWdDh4HGLd;Q~&V~;p2P^L&H<7|ln5Lm5rDDz3 zXihW{2B;81Uy%V}hz}lvZ|^XmQaTCD6m3dC64MtMXSAJNR#QQ1U-USK(n;N3S{MI> zfx$TpUl*$U$1cMwe#abjQWt~K6 zRLDvy9C7g76um5KoemuhC>~NFkUCZkoy4PyyF{l>DUxlU0|t#2&L__%Y}1r@0Zcq; zWJZ3ECRxPbAptq7!M_=vmIW}9Mu`yeK~&-CVKIoLHWde=kkRuBsEtt+7Hep2 z<3x)H9yefVPFIFg{BI;+q8gT9DWX3W_SxArDZ?{QC1rF_+mk5v27^Qs zmUK@gdU2@u1Bvi^EBXr6MIYN@yqx68$Gn6w$a) zc(W^!G;?op=_Iqu1w|Y@nJy=UyAcoABE*3nu`rnrGQh9Q)lC2#y8@RdE>Yv^u$T@`Tcyx zQdSpjz>)n946<2pNXRW-9u^{ni~)oWnGl5der1d){HTidPCa6>449qlZi>f;lB~yQ z;OQv)06KZynAd2cDQgKI;^ucCr`YB?@7k^YAi#RrvYQd*X9Yn{(-CnoCRtF(x5Uq( zQ%-J#qoTknR|l}sFA*qZhrS}2{s(t7U#9YQLi0SW&m+h6%kCvu;8fntTe$EXyb1rN zvYC}SET4Lva(5^&!}l2?B9wrK(kRP&Hqo_?CApzT)pU8vYG&c$E${h% zW6#9`LB6t*2sq#+vsoF$%C|4dEacOl|HE_IZ?cJRmOZqd2eXWp!{5ebeng0AD~O2{ zY34vE3WSaW4eF`Idr-h+!(tS7_!P(tJas4L5;O__BSwM98p$@snAgc6A3hD>$!2E% znK=q?wrd7zih$)Gi8if@d<~b57Oz0G086skqlI6Iz~N@R;GzoyOrijI8NnwGg?6#)kZ^b^AdVJx+*P;J=g0F&S>}_y*Y$Z zCQt`X$BDJ0uvaS<%6v5X8a?$sJmPp%_&X&jr@SvknV;4Y#9I^NG3iHP|9$90E7*Uv z7Krq#yXTgcj?J4t))K4LJgiv&OwEF9}=nV9L7inRj2keR%G_}?s*mOFm1xI=aL@I^KH^P?Pye%$h3ZGX1)y6s)8 zp_~@Q6rMlW{Rz9JyBs=3*!(ezB9Ju%^9%;IK;_e4iWk9by(?#~-HJj~!yCdWg)+?R zhEI7TtfkS>)#~nS^&Fj9+?9749BHawjFQ|pp`7ho)HUH<=EHc%kP*rE$0~WeuWA1> zxvhv83oo@hCfO5lPqnzW$$-%m*hRIZ2J{TVng>!z@LZ8f`T-HAbHfw-;p5-`&hRoo zVGK1Jetc55e0}`^q}`qB960jCpMAut93$2Aj1ggy(EyONA9J9DRaj1Xp;?GWgnPTa z*^r#n&%rEk(UbkV1)f1($q9STo`B#uZl}xFSgYM3S=jO;q29+aRWYG%{8hUv>#FiM zCl)n&HKm#a~%huMPLOJ_sdl zhW}MT6PJY>ZkU35Z@d^uNgdEBDH!4#e5gP;x(^>9s9cMFgZtpbcR#`$Ius918jOr? zm*at3dkU+(0v1O}5N`Y1Gz{q37kAv+4;te>ARrf!-nWN5*t+5UfJdI^*B>*C=1KHl%mL zlDiEs7z|>axg(lu5EWbq6@G2=Hq8Ar2M>&CgY^qO6h7^E4DK3&((Pn@hYJ@=9P|ZS zvEbulKu8vSKMV?7IlVl8#})y!AMcemf>P>D5TyC2bhO zd|~)bbmVMCfDv$wKzPT$;O@>^@o{=H0y|xU>6roId#v=&O~z{kuDR!aMZmcf?^-UG zULXSd*3QDzwH6`$#y^U5LA3f|S0i-WCwO-1RHY!bQ(I{E|Ac22^uwGf(P*bN;)L1QZVUd$nu2|+dIT$%|WTWe*?yx+2F78_x zjyLB_6b~hfpo@z~R2{M{M^Ql~!bCn<(q{1h)F|8{vWgf=Ejd?|6bY+64Dr!S$7+l@ z%?LCV1lFusgY@)t@joxzr`97VC^%CCkoYj<^MlgX%+Q#0#IiF0000< KMNUMnLSTYdV`D@B literal 0 HcmV?d00001 diff --git a/docs/getting_started.rst b/docs/getting_started.rst new file mode 100644 index 0000000..22f9aa2 --- /dev/null +++ b/docs/getting_started.rst @@ -0,0 +1,70 @@ +**************************** +Getting Started with Astropy +**************************** + +Importing Astropy +================= + +In order to encourage consistency amongst users in importing and using Astropy +functionality, we have put together the following guidelines. + +Since most of the functionality in Astropy resides in sub-packages, importing +astropy as:: + + >>> import astropy + +is not very useful. Instead, it is best to import the desired sub-package +with the syntax:: + + >>> from astropy import subpackage # doctest: +SKIP + +For example, to access the FITS-related functionality, you can import +`astropy.io.fits` with:: + + >>> from astropy.io import fits + >>> hdulist = fits.open('data.fits') # doctest: +SKIP + +In specific cases, we have recommended shortcuts in the documentation for +specific sub-packages, for example:: + + >>> from astropy import units as u + >>> from astropy import coordinates as coord + >>> coord.SkyCoord(ra=10.68458*u.deg, dec=41.26917*u.deg, frame='icrs') + + +Finally, in some cases, most of the required functionality is contained in a +single class (or a few classes). In those cases, the class can be directly +imported:: + + >>> from astropy.cosmology import WMAP7 + >>> from astropy.table import Table + >>> from astropy.wcs import WCS + +Note that for clarity, and to avoid any issues, we recommend to **never** +import any Astropy functionality using ``*``, for example:: + + >>> from astropy.io.fits import * # NOT recommended + +Some components of Astropy started off as standalone packages (e.g. PyFITS, PyWCS), +so in cases where Astropy needs to be used as a drop-in replacement, the following +syntax is also acceptable:: + + >>> from astropy.io import fits as pyfits + +Getting started with subpackages +================================ + +Because different subpackages have very different functionality, further +suggestions for getting started are in the documentation for the subpackages, +which you can reach by browsing the sections listed in the :ref:`user-docs`. + +Or, if you want to dive right in, you can either look at docstrings for +particular a package or object, or access their documentation using the +`~astropy.utils.misc.find_api_page` function. For example, doing this:: + + >>> from astropy import find_api_page + >>> from astropy.units import Quantity + >>> find_api_page(Quantity) # doctest: +SKIP + +Will bring up the documentation for the `~astropy.units.Quantity` class +in your browser. diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..b779f6e --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,217 @@ +.. Astropy documentation master file, created by + sphinx-quickstart on Tue Jul 26 02:59:34 2011. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +:tocdepth: 2 + +.. the "raw" directive below is used to hide the title in favor of just the logo being visible +.. raw:: html + + + +################################## +Astropy Core Package Documentation +################################## + +.. image:: astropy_banner_96.png + :width: 485px + :height: 96px + :target: http://www.astropy.org/ + +Welcome to the Astropy documentation! Astropy is a community-driven +package intended to contain much of the core functionality and some common +tools needed for performing astronomy and astrophysics with Python. + +.. _user-docs: + +****************** +User Documentation +****************** + +.. only:: html + + :doc:`whatsnew/0.4` + ------------------- + +.. only:: latex + + .. toctree:: + :maxdepth: 1 + + whatsnew/0.4 + +**Astropy at a glance** + +.. toctree:: + :maxdepth: 1 + + overview + install + getting_started + +**Core data structures and transformations** + +.. toctree:: + :maxdepth: 1 + + constants/index + units/index + nddata/index + table/index + time/index + coordinates/index + wcs/index + modeling/index + +**Connecting up: Files and I/O** + +.. toctree:: + :maxdepth: 1 + + io/unified + io/fits/index + io/ascii/index + io/votable/index + io/misc + +**Astronomy computations and utilities** + +.. toctree:: + :maxdepth: 1 + + convolution/index + cosmology/index + stats/index + vo/index + +**Nuts and bolts of Astropy** + +.. toctree:: + :maxdepth: 1 + + config/index + io/registry + logging + warnings + utils/index + +**Astropy project details** + +.. toctree:: + :maxdepth: 1 + + stability + whatsnew/index + known_issues + credits + license + +.. _getting_help: + +************ +Getting help +************ + +If you want to get help or discuss issues with other Astropy users, you can sign +up for the `astropy mailing list`_. Alternatively, the `astropy-dev mailing +list`_ is where you should go to discuss more technical aspects of Astropy with +the developers. You can also email the astropy developers privately at +`astropy-feedback@googlegroups.com`_...but remember that questions you ask +publicly serve as resources for other users! + +.. _reporting_issues: + +**************** +Reporting Issues +**************** + +If you have found a bug in Astropy please report it. The preferred way is to +create a new issue on the Astropy `GitHub issue page +`_; that requires `creating a free +account `_ on GitHub if you do not have one. + +If you prefer not to create a GitHub account, please report the issue to either +the `astropy mailing list`_, the `astropy-dev mailing list`_ or sending a +private email to the astropy core developers at +`astropy-feedback@googlegroups.com `_. + +Please include an example that demonstrates the issue that will allow the +developers to reproduce and fix the problem. You may be asked to also provide +information about your operating system and a full Python stack trace; the +Astropy developers will walk you through obtaining a stack trace if it is +necessary. + + +For astropy-helpers +------------------- + +As of Astropy v0.4, Astropy and many affiliated packages use a package of +utilities called astropy-helpers during building and installation. If you have +any build/installation issue--particularly if you're getting a traceback +mentioning the ``astropy_helpers`` or ``ah_bootstrap`` modules--please send a +report to the `astropy-helpers issue tracker +`_. If you're not sure, +however, it's fine to report via the main Astropy issue tracker or one of the +other avenues described above. + + +************ +Contributing +************ + +The Astropy project is made both by and for its users, so we highly encourage +contributions at all levels. This spans the gamut from sending an email +mentioning a typo in the documentation or requesting a new feature all the way +to developing a major new package. + +The full range of ways to be part of the Astropy project are described at +`Contribute to Astropy `_. To get +started contributing code or documentation (no git or GitHub experience +necessary): + +.. toctree:: + :maxdepth: 1 + + development/workflow/get_devel_version + development/workflow/development_workflow + + +.. _developer-docs: + +*********************** +Developer Documentation +*********************** + +The developer documentation contains instructions for how to contribute to +Astropy or affiliated packages, as well as coding, documentation, and +testing guidelines. For the guiding vision of this process and the project +as a whole, see :doc:`development/vision`. + +.. toctree:: + :maxdepth: 1 + + development/workflow/development_workflow + development/codeguide + development/docguide + development/testguide + development/scripts + development/building + development/ccython + development/releasing + development/workflow/maintainer_workflow + development/affiliated-packages + changelog + +****************** +Indices and Tables +****************** + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + +.. _astropy mailing list: http://mail.scipy.org/mailman/listinfo/astropy +.. _astropy-feedback@googlegroups.com: mailto:astropy-feedback@googlegroups.com diff --git a/docs/install.rst b/docs/install.rst new file mode 100644 index 0000000..af0afdd --- /dev/null +++ b/docs/install.rst @@ -0,0 +1,382 @@ +************ +Installation +************ + +Requirements +============ + +Astropy has the following strict requirements: + +- `Python `_ 2.6 (>=2.6.5), 2.7, 3.1, 3.2, 3.3, or 3.4 + +- `Numpy`_ |minimum_numpy_version| or later + +Astropy also depends on other packages for optional features: + +- `h5py `_: To read/write + :class:`~astropy.table.Table` objects from/to HDF5 files + +- `BeautifulSoup `_: To read + :class:`~astropy.table.table.Table` objects from HTML files + +- `scipy`_: To power a variety of features (currently + mainly cosmology-related functionality) + +- `xmllint `_: To validate VOTABLE XML files. + +However, note that these only need to be installed if those particular features +are needed. Astropy will import even if these dependencies are not installed. + +.. TODO: Link to the planned dependency checker/installer tool. + +Installing Astropy +================== + +Using pip +------------- + +To install Astropy with `pip `_, simply run:: + + pip install --no-deps astropy + +.. warning:: + + Users of the Anaconda python distribution should follow the instructions + for :ref:`anaconda_install`. + +.. note:: + + You will need a C compiler (e.g. ``gcc`` or ``clang``) to be installed (see + `Building from source`_ below) for the installation to succeed. + +.. note:: + + The ``--no-deps`` flag is optional, but highly recommended if you already + have Numpy installed, since otherwise pip will sometimes try to "help" you + by upgrading your Numpy installation, which may not always be desired. + +.. note:: + + If you get a ``PermissionError`` this means that you do not have the + required administrative access to install new packages to your Python + installation. In this case you may consider using the ``--user`` option + to install the package into your home directory. You can read more + about how to do this in the `pip documentation + `_. + + Alternatively, if you intend to do development on other software that uses + Astropy, such as an affiliated package, consider installing Astropy into a + :ref:`virtualenv`. + + Do **not** install Astropy or other third-party packages using ``sudo`` + unless you are fully aware of the risks. + + +.. _anaconda_install: + +Anaconda python distribution +---------------------------- + +Astropy is installed by default with Anaconda. To update to the latest version +run:: + + conda update astropy + +.. note:: + + There may be a delay of a day or to between when a new version of Astropy + is released and when a package is available for Anaconda. You can check + for the list of available versions with ``conda search astropy``. + +.. note:: + + Attempting to use ``pip`` to upgrade your installation of Astropy may result + in a corrupted installation. + + +Binary installers +----------------- + +Binary installers are available on Windows for Python 2.6, 2.7, 3.1, and 3.2 +at `PyPI `_. + +.. _testing_installed_astropy: + + +Testing an installed Astropy +---------------------------- + +The easiest way to test your installed version of astropy is running +correctly is to use the :ref:`astropy.test()` function:: + + import astropy + astropy.test() + +The tests should run and print out any failures, which you can report at +the `Astropy issue tracker `_. + +.. note:: + + This way of running the tests may not work if you do it in the + astropy source distribution. See :ref:`sourcebuildtest` for how to + run the tests from the source code directory, or :ref:`running-tests` + for more details. + +.. note:: + + Running the tests this way is currently disabled in the IPython REPL due + to conflicts with some common display settings in IPython. Please run the + Astropy tests under the standard Python command-line interpreter. + + + +Building from source +==================== + +Prerequisites +------------- + +You will need a compiler suite and the development headers for Python and +Numpy in order to build Astropy. On Linux, using the package manager for your +distribution will usually be the easiest route, while on MacOS X you will +need the XCode command line tools. + +The `instructions for building Numpy from source +`_ are also a good +resource for setting up your environment to build Python packages. + +You will also need `Cython `_ (v0.15 or later) installed +to build from source, unless you are installing a numbered release. (The +releases packages have the necessary C files packaged with them, and hence do +not require Cython.) + +.. note:: + + If you are using MacOS X, you will need to the XCode command line tools. + One way to get them is to install `XCode + `_. If you are using OS X 10.7 (Lion) + or later, you must also explicitly install the command line tools. You can + do this by opening the XCode application, going to **Preferences**, then + **Downloads**, and then under **Components**, click on the Install button + to the right of **Command Line Tools**. Alternatively, on 10.7 (Lion) or + later, you do not need to install XCode, you can download just the command + line tools from https://developer.apple.com/downloads/index.action + (requires an Apple developer account). + + +Obtaining the source packages +----------------------------- + +Source packages +^^^^^^^^^^^^^^^ + +The latest stable source package for Astropy can be `downloaded here +`_. + +Development repository +^^^^^^^^^^^^^^^^^^^^^^ + +The latest development version of Astropy can be cloned from github +using this command:: + + git clone git://github.com/astropy/astropy.git + +.. note:: + + If you wish to participate in the development of Astropy, see + :ref:`developer-docs`. This document covers only the basics + necessary to install Astropy. + +Building and Installing +----------------------- + +Astropy uses the Python `distutils framework +`_ for building and +installing and requires the +`distribute `_ extension--the later is +automatically downloaded when running ``python setup.py`` if it is not already +provided by your system. + +If Numpy is not already installed in your Python environment, the +astropy setup process will try to download and install it before +continuing to install astropy. + +To build Astropy (from the root of the source tree):: + + python setup.py build + +To install Astropy (from the root of the source tree):: + + python setup.py install + + +Troubleshooting +--------------- + +If you get an error mentioning that you do not have the correct permissions to +install Astropy into the default ``site-packages`` directory, you can try +installing with:: + + python setup.py install --user + +which will install into a default directory in your home directory. + + +External C libraries +^^^^^^^^^^^^^^^^^^^^ + +The Astropy source ships with the C source code of a number of +libraries. By default, these internal copies are used to build +Astropy. However, if you wish to use the system-wide installation of +one of those libraries, you can pass one or more of the +``--use-system-X`` flags to the ``setup.py build`` command. + +For example, to build Astropy using the system `libexpat +`_, use:: + + python setup.py build --use-system-expat + +To build using all of the system libraries, use:: + + python setup.py build --use-system-libraries + +To see which system libraries Astropy knows how to build against, use:: + + python setup.py build --help + +As with all distutils commandline options, they may also be provided in a +``setup.cfg`` in the same directory as ``setup.py``. For example, to use +the system `libexpat `_, add the following to the +``setup.cfg`` file:: + + [build] + use_system_expat=1 + + +The required version of setuptools is not available +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If upon running the ``setup.py`` script you get a message like + + The required version of setuptools (>=0.9.8) is not available, + and can't be installed while this script is running. Please + install a more recent version first, using + 'easy_install -U setuptools'. + + (Currently using setuptools 0.6c11 (/path/to/setuptools-0.6c11-py2.7.egg)) + +this is because you have a very outdated version of the `setuptools +`_ package which is used to install +Python packages. Normally Astropy will bootstrap newer version of +setuptools via the network, but setuptools suggests that you first +*uninstall* the old version (the ``easy_install -U setuptools`` command). + +However, in the likely case that your version of setuptools was installed by an +OS system package (on Linux check your package manager like apt or yum for a +package called ``python-setuptools``), trying to uninstall with +``easy_install`` and without using ``sudo`` may not work, or may leave your +system package in an inconsistent state. + +As the best course of action at this point depends largely on the individual +system and how it is configured, if you are not sure yourself what do please +ask on the Astropy mailing list. + + +The Windows installer can't find Python in the registry +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This is a common issue with Windows installers for Python packages that do not +support the new User Access Control (UAC) framework added in Windows Vista and +later. In particular, when a Python is installed "for all users" (as opposed +to for a single user) it adds entries for that Python installation under the +``HKEY_LOCAL_MACHINE`` (HKLM) hierarchy and *not* under the +``HKEY_CURRENT_USER`` (HKCU) hierarchy. However, depending on your UAC +settings, if the Astropy installer is not executed with elevated privileges it +will not be able to check in HKLM for the required information about your +Python installation. + +In short: If you encounter this problem it's because you need the appropriate +entries in the Windows registry for Python. You can download `this script`__ +and execute it with the same Python as the one you want to install Astropy +into. For example to add the missing registry entries to your Python 2.7:: + + C:\>C:\Python27\python.exe C:\Path\To\Downloads\win_register_python.py + +__ https://gist.github.com/embray/6042780#file-win_register_python-py + +.. _builddocs: + +Building documentation +---------------------- + +.. note:: + + Building the documentation is in general not necessary unless you + are writing new documentation or do not have internet access, because + the latest (and archive) versions of astropy's documentation should + be available at `docs.astropy.org `_ . + +Building the documentation requires the Astropy source code and some additional +packages: + + - `Sphinx `_ (and its dependencies) 1.0 or later + + - `Graphviz `_ + + - `Astropy-helpers `_ (Astropy + and most affiliated packages include this as a submodule in the source + repository, so it does not need to be installed separately.) + +.. note:: + + Sphinx also requires a reasonably modern LaTeX installation to render + equations. Per the `Sphinx documentation + `_, + for the TexLive distribution the following packages are required to be + installed: + + * latex-recommended + * latex-extra + * fonts-recommended + + For other LaTeX distributions your mileage may vary. To build the PDF + documentation using LaTeX, the ``fonts-extra`` TexLive package or the + ``inconsolata`` CTAN package are also required. + +There are two ways to build the Astropy documentation. The most straightforward +way is to execute the command (from the astropy source directory):: + + python setup.py build_sphinx + +The documentation will be built in the ``docs/_build/html`` directory, and can +be read by pointing a web browser to ``docs/_build/html/index.html``. + +The LaTeX documentation can be generated by using the command:: + + python setup.py build_sphinx -b latex + +The LaTeX file ``Astropy.tex`` will be created in the ``docs/_build/latex`` +directory, and can be compiled using ``pdflatex``. + +The above method builds the API documentation from the source code. +Alternatively, you can do:: + + cd docs + make html + +And the documentation will be generated in the same location, but using the +*installed* version of Astropy. + +.. _sourcebuildtest: + +Testing a source code build of Astropy +-------------------------------------- + +The easiest way to test that your Astropy built correctly (without +installing astropy) is to run this from the root of the source tree:: + + python setup.py test + +There are also alternative methods of :ref:`running-tests`. + +.. include:: development/workflow/known_projects.inc diff --git a/docs/io/ascii/base_classes.rst b/docs/io/ascii/base_classes.rst new file mode 100644 index 0000000..406c75f --- /dev/null +++ b/docs/io/ascii/base_classes.rst @@ -0,0 +1,21 @@ +.. include:: references.txt + +.. _base_class_elements: + +Base class elements +---------------------------- + +The key elements in :mod:`astropy.io.ascii` are: + +* :class:`~astropy.io.ascii.Column`: Internal storage of column properties and data () +* :class:`Reader `: Base class to handle reading and writing tables. +* :class:`Inputter `: Get the lines from the table input. +* :class:`Splitter `: Split the lines into string column values. +* :class:`Header `: Initialize output columns based on the table header or user input. +* :class:`Data `: Populate column data from the table. +* :class:`Outputter `: Convert column data to the specified output format, e.g. `numpy` structured array. + +Each of these elements is an inheritable class with attributes that control the +corresponding functionality. In this way the large number of tweakable +parameters is modularized into managable groups. Where it makes sense these +attributes are actually functions that make it easy to handle special cases. diff --git a/docs/io/ascii/extension_classes.rst b/docs/io/ascii/extension_classes.rst new file mode 100644 index 0000000..808fc4a --- /dev/null +++ b/docs/io/ascii/extension_classes.rst @@ -0,0 +1,30 @@ +.. include:: references.txt + +.. _extension_reader_classes: + +Extension Reader classes +------------------------ + +The following classes extend the base :class:`~astropy.io.ascii.BaseReader` functionality to handle reading and writing +different table formats. Some, such as the :class:`~astropy.io.ascii.Basic` Reader class +are fairly general and include a number of configurable attributes. Others +such as :class:`~astropy.io.ascii.Cds` or :class:`~astropy.io.ascii.Daophot` are specialized to read certain +well-defined but idiosyncratic formats. + +* :class:`~astropy.io.ascii.AASTex`: AASTeX `deluxetable `_ used for AAS journals +* :class:`~astropy.io.ascii.Basic`: basic table with customizable delimiters and header configurations +* :class:`~astropy.io.ascii.Cds`: `CDS format table `_ (also Vizier and ApJ machine readable tables) +* :class:`~astropy.io.ascii.CommentedHeader`: column names given in a line that begins with the comment character +* :class:`~astropy.io.ascii.Daophot`: table from the IRAF DAOphot package +* :class:`~astropy.io.ascii.FixedWidth`: table with fixed-width columns (see also :ref:`fixed_width_gallery`) +* :class:`~astropy.io.ascii.FixedWidthNoHeader`: table with fixed-width columns and no header +* :class:`~astropy.io.ascii.FixedWidthTwoLine`: table with fixed-width columns and a two-line header +* :class:`~astropy.io.ascii.HTML`: HTML format table contained in a
tag +* :class:`~astropy.io.ascii.Ipac`: `IPAC format table `_ +* :class:`~astropy.io.ascii.Latex`: LaTeX table with datavalue in the ``tabular`` environment +* :class:`~astropy.io.ascii.NoHeader`: basic table with no header where columns are auto-named +* :class:`~astropy.io.ascii.Rdb`: tab-separated values with an extra line after the column definition line +* :class:`~astropy.io.ascii.SExtractor`: `SExtractor format table `_ +* :class:`~astropy.io.ascii.Tab`: tab-separated values +* :class:`~astropy.io.ascii.Csv`: comma-separated values + diff --git a/docs/io/ascii/fixed_width_gallery.rst b/docs/io/ascii/fixed_width_gallery.rst new file mode 100644 index 0000000..aca8d83 --- /dev/null +++ b/docs/io/ascii/fixed_width_gallery.rst @@ -0,0 +1,359 @@ +.. include:: references.txt + +.. _fixed_width_gallery: + +Fixed-width Gallery +------------------- + +Fixed-width tables are those where each column has the same width for every row +in the table. This is commonly used to make tables easy to read for humans or +FORTRAN codes. It also reduces issues with quoting and special characters, +for example:: + + Col1 Col2 Col3 Col4 + ---- --------- ---- ---- + 1.2 "hello" 1 a + 2.4 's worlds 2 2 + +There are a number of common variations in the formatting of fixed-width tables +which :mod:`astropy.io.ascii` can read and write. The most signicant difference is +whether there is no header line (:class:`~astropy.io.ascii.FixedWidthNoHeader`), one +header line (:class:`~astropy.io.ascii.FixedWidth`), or two header lines +(:class:`~astropy.io.ascii.FixedWidthTwoLine`). Next, there are variations in the +delimiter character, whether the delimiter appears on either end ("bookends"), +and padding around the delimiter. + +Details are available in the class API documentation, but the easiest way to +understand all the options and their interactions is by example. + +Reading +^^^^^^^ + +FixedWidth +"""""""""" + +**Nice, typical fixed format table** +:: + + >>> from astropy.io import ascii + >>> table = """ + ... # comment (with blank line above) + ... | Col1 | Col2 | + ... | 1.2 | "hello" | + ... | 2.4 |'s worlds| + ... """ + >>> ascii.read(table, format='fixed_width') +
+ array([(1.2, '"hello"'), (2..., "'s worlds")], + dtype=[('Col1', '>> table = """ + ... # comment (with blank line above) + ... | Col1 | Col2 | + ... | 1.2 | "hello" | + ... | 2.4 |'s worlds| + ... """ + >>> ascii.read(table, format='fixed_width', names=('name1', 'name2')) +
+ array([(1.2, '"hello"'), (2..., "'s worlds")], + dtype=[('name1', '>> table = """ + ... Col1 | Col2 | + ... 1.2 "hello" + ... 2.4 sdf's worlds + ... """ + >>> ascii.read(table, format='fixed_width') +
+ array([(1.2, '"hel'), (2..., "df's wo")], + dtype=[('Col1', '>> table = """ + ... || Name || Phone || TCP|| + ... | John | 555-1234 |192.168.1.10X| + ... | Mary | 555-2134 |192.168.1.12X| + ... | Bob | 555-4527 | 192.168.1.9X| + ... """ + >>> ascii.read(table, format='fixed_width') +
+ array([('John', '555-1234', '192.168.1.10'), + ('Mary', '555-2134', '192.168.1.12'), + ('Bob', '555-4527', '192.168.1.9')], + dtype=[('Name', 'S4'), ('Phone', 'S8'), ('TCP', 'S12')]) + +**Table with space delimiter** +:: + + >>> table = """ + ... Name --Phone- ----TCP----- + ... John 555-1234 192.168.1.10 + ... Mary 555-2134 192.168.1.12 + ... Bob 555-4527 192.168.1.9 + ... """ + >>> ascii.read(table, format='fixed_width', delimiter=' ') +
+ array([('John', '555-1234', '192.168.1.10'), + ('Mary', '555-2134', '192.168.1.12'), + ('Bob', '555-4527', '192.168.1.9')], + dtype=[('Name', 'S4'), ('--Phone-', 'S8'), ('----TCP-----', 'S12')]) + +**Table with no header row and auto-column naming.** + +Use header_start and data_start keywords to indicate no header line. +:: + + >>> table = """ + ... | John | 555-1234 |192.168.1.10| + ... | Mary | 555-2134 |192.168.1.12| + ... | Bob | 555-4527 | 192.168.1.9| + ... """ + >>> ascii.read(table, format='fixed_width', + ... header_start=None, data_start=0) +
+ array([('John', '555-1234', '192.168.1.10'), + ('Mary', '555-2134', '192.168.1.12'), + ('Bob', '555-4527', '192.168.1.9')], + dtype=[('col1', 'S4'), ('col2', 'S8'), ('col3', 'S12')]) + +**Table with no header row and with col names provided.** + +Second and third rows also have hanging spaces after final "|". Use header_start and data_start +keywords to indicate no header line. +:: + + >>> table = ["| John | 555-1234 |192.168.1.10|", + ... "| Mary | 555-2134 |192.168.1.12| ", + ... "| Bob | 555-4527 | 192.168.1.9| "] + >>> ascii.read(table, format='fixed_width', + ... header_start=None, data_start=0, + ... names=('Name', 'Phone', 'TCP')) +
+ array([('John', '555-1234', '192.168.1.10'), + ('Mary', '555-2134', '192.168.1.12'), + ('Bob', '555-4527', '192.168.1.9')], + dtype=[('Name', 'S4'), ('Phone', 'S8'), ('TCP', 'S12')]) + + +FixedWidthNoHeader +"""""""""""""""""" + +**Table with no header row and auto-column naming. Use the FixedWidthNoHeader +convenience class.** +:: + + >>> table = """ + ... | John | 555-1234 |192.168.1.10| + ... | Mary | 555-2134 |192.168.1.12| + ... | Bob | 555-4527 | 192.168.1.9| + ... """ + >>> ascii.read(table, format='fixed_width_no_header') +
+ array([('John', '555-1234', '192.168.1.10'), + ('Mary', '555-2134', '192.168.1.12'), + ('Bob', '555-4527', '192.168.1.9')], + dtype=[('col1', 'S4'), ('col2', 'S8'), ('col3', 'S12')]) + +**Table with no delimiter with column start and end values specified.** + +This uses the col_starts and col_ends keywords. Note that the +col_ends values are inclusive so a position range of 0 to 5 +will select the first 6 characters. +:: + + >>> table = """ + ... # 5 9 17 18 28 <== Column start / end indexes + ... # | | || | <== Column separation positions + ... John 555- 1234 192.168.1.10 + ... Mary 555- 2134 192.168.1.12 + ... Bob 555- 4527 192.168.1.9 + ... """ + >>> ascii.read(table, format='fixed_width_no_header', + ... names=('Name', 'Phone', 'TCP'), + ... col_starts=(0, 9, 18), + ... col_ends=(5, 17, 28), + ... ) +
+ array([('John', '555- 1234', '192.168.1.'), + ('Mary', '555- 2134', '192.168.1.'), + ('Bob', '555- 4527', '192.168.1')], + dtype=[('Name', 'S4'), ('Phone', 'S9'), ('TCP', 'S10')]) + +FixedWidthTwoLine +""""""""""""""""" + +**Typical fixed format table with two header lines with some cruft** +:: + + >>> table = """ + ... Col1 Col2 + ... ---- --------- + ... 1.2xx"hello" + ... 2.4 's worlds + ... """ + >>> ascii.read(table, format='fixed_width_two_line') +
+ array([(1.2, '"hello"'), (2..., "'s worlds")], + dtype=[('Col1', '>> table = """ + ... ======= =========== + ... Col1 Col2 + ... ======= =========== + ... 1.2 "hello" + ... 2.4 's worlds + ... ======= =========== + ... """ + >>> ascii.read(table, format='fixed_width_two_line', + ... header_start=1, position_line=2, data_end=-1) +
+ array([(1.2, '"hello"'), (2..., "'s worlds")], + dtype=[('Col1', '>> table = """ + ... +------+----------+ + ... | Col1 | Col2 | + ... +------|----------+ + ... | 1.2 | "hello" | + ... | 2.4 | 's worlds| + ... +------+----------+ + ... """ + >>> ascii.read(table, format='fixed_width_two_line', delimiter='+', + ... header_start=1, position_line=0, data_start=3, data_end=-1) +
+ array([(1.2, '"hello"'), (2..., "'s worlds")], + dtype=[('Col1', '>> table = """ + ... | Col1 | Col2 | Col3 | Col4 | + ... | 1.2 | "hello" | 1 | a | + ... | 2.4 | 's worlds | 2 | 2 | + ... """ + >>> dat = ascii.read(table, format='fixed_width') + +**Write a table as a normal fixed width table.** +:: + + >>> ascii.write(dat, format='fixed_width') + | Col1 | Col2 | Col3 | Col4 | + | 1.2 | "hello" | 1 | a | + | 2.4 | 's worlds | 2 | 2 | + +**Write a table as a fixed width table with no padding.** +:: + + >>> ascii.write(dat, format='fixed_width', delimiter_pad=None) + |Col1| Col2|Col3|Col4| + | 1.2| "hello"| 1| a| + | 2.4|'s worlds| 2| 2| + +**Write a table as a fixed width table with no bookend.** +:: + + >>> ascii.write(dat, format='fixed_width', bookend=False) + Col1 | Col2 | Col3 | Col4 + 1.2 | "hello" | 1 | a + 2.4 | 's worlds | 2 | 2 + +**Write a table as a fixed width table with no delimiter.** +:: + + >>> ascii.write(dat, format='fixed_width', bookend=False, delimiter=None) + Col1 Col2 Col3 Col4 + 1.2 "hello" 1 a + 2.4 's worlds 2 2 + +**Write a table as a fixed width table with no delimiter and formatting.** +:: + + >>> ascii.write(dat, format='fixed_width', + ... formats={'Col1': '%-8.3f', 'Col2': '%-15s'}) + | Col1 | Col2 | Col3 | Col4 | + | 1.200 | "hello" | 1 | a | + | 2.400 | 's worlds | 2 | 2 | + +FixedWidthNoHeader +"""""""""""""""""" + +**Write a table as a normal fixed width table.** +:: + + >>> ascii.write(dat, format='fixed_width_no_header') + | 1.2 | "hello" | 1 | a | + | 2.4 | 's worlds | 2 | 2 | + +**Write a table as a fixed width table with no padding.** +:: + + >>> ascii.write(dat, format='fixed_width_no_header', delimiter_pad=None) + |1.2| "hello"|1|a| + |2.4|'s worlds|2|2| + +**Write a table as a fixed width table with no bookend.** +:: + + >>> ascii.write(dat, format='fixed_width_no_header', bookend=False) + 1.2 | "hello" | 1 | a + 2.4 | 's worlds | 2 | 2 + +**Write a table as a fixed width table with no delimiter.** +:: + + >>> ascii.write(dat, format='fixed_width_no_header', bookend=False, + ... delimiter=None) + 1.2 "hello" 1 a + 2.4 's worlds 2 2 + +FixedWidthTwoLine +""""""""""""""""" + +**Write a table as a normal fixed width table.** +:: + + >>> ascii.write(dat, format='fixed_width_two_line') + Col1 Col2 Col3 Col4 + ---- --------- ---- ---- + 1.2 "hello" 1 a + 2.4 's worlds 2 2 + +**Write a table as a fixed width table with space padding and '=' position_char.** +:: + + >>> ascii.write(dat, format='fixed_width_two_line', + ... delimiter_pad=' ', position_char='=') + Col1 Col2 Col3 Col4 + ==== ========= ==== ==== + 1.2 "hello" 1 a + 2.4 's worlds 2 2 + +**Write a table as a fixed width table with no bookend.** +:: + + >>> ascii.write(dat, format='fixed_width_two_line', bookend=True, delimiter='|') + |Col1| Col2|Col3|Col4| + |----|---------|----|----| + | 1.2| "hello"| 1| a| + | 2.4|'s worlds| 2| 2| diff --git a/docs/io/ascii/index.rst b/docs/io/ascii/index.rst new file mode 100644 index 0000000..d68533c --- /dev/null +++ b/docs/io/ascii/index.rst @@ -0,0 +1,202 @@ +.. include:: references.txt + +.. _io-ascii: + +********************************* +ASCII Tables (`astropy.io.ascii`) +********************************* + +Introduction +============ + +`astropy.io.ascii` provides methods for reading and writing a wide range of ASCII data table +formats via built-in :ref:`extension_reader_classes`. The emphasis is on flexibility and ease of use. + +The following shows a few of the ASCII formats that are available, while the section on +`Supported formats`_ contains the full list. + +* :class:`~astropy.io.ascii.Basic`: basic table with customizable delimiters and header configurations +* :class:`~astropy.io.ascii.Cds`: `CDS format table `_ (also Vizier and ApJ machine readable tables) +* :class:`~astropy.io.ascii.Daophot`: table from the IRAF DAOphot package +* :class:`~astropy.io.ascii.FixedWidth`: table with fixed-width columns (see also :ref:`fixed_width_gallery`) +* :class:`~astropy.io.ascii.Ipac`: `IPAC format table `_ +* :class:`~astropy.io.ascii.HTML`: HTML format table contained in a
tag +* :class:`~astropy.io.ascii.Latex`: LaTeX table with datavalue in the ``tabular`` environment +* :class:`~astropy.io.ascii.Rdb`: tab-separated values with an extra line after the column definition line +* :class:`~astropy.io.ascii.SExtractor`: `SExtractor format table `_ + +The :mod:`astropy.io.ascii` package is built on a modular and extensible class +structure with independent :ref:`base_class_elements` so that new formats can +be easily accommodated. + +.. note:: + + It is also possible to use the functionality from + :mod:`astropy.io.ascii` through a higher-level interface in the + :mod:`astropy.table` package. See :ref:`table_io` for more details. + +Getting Started +=============== + +Reading Tables +-------------- + +The majority of commonly encountered ASCII tables can be easily read with the |read| +function. Assume you have a file named ``sources.dat`` with the following contents:: + + obsid redshift X Y object + 3102 0.32 4167 4085 Q1250+568-A + 877 0.22 4378 3892 "Source 82" + +This table can be read with the following:: + + >>> from astropy.io import ascii + >>> data = ascii.read("sources.dat") # doctest: +SKIP + >>> print data # doctest: +SKIP + obsid redshift X Y object + ----- -------- ---- ---- ----------- + 3102 0.32 4167 4085 Q1250+568-A + 877 0.22 4378 3892 Source 82 + +The first argument to the |read| function can be the name of a file, a string +representation of a table, or a list of table lines. By default |read| will +try to `guess the table format <#guess-table-format>`_ by trying all the +`supported formats`_. If this does not work (for unusually formatted tables) then +one needs give astropy.io.ascii additional hints about the format, for +example:: + + >>> lines = ['objID & osrcid & xsrcid ', + ... '----------------------- & ----------------- & -------------', + ... ' 277955213 & S000.7044P00.7513 & XS04861B6_005', + ... ' 889974380 & S002.9051P14.7003 & XS03957B7_004'] + >>> data = ascii.read(lines, data_start=2, delimiter='&') + >>> print(data) + objID osrcid xsrcid + --------- ----------------- ------------- + 277955213 S000.7044P00.7513 XS04861B6_005 + 889974380 S002.9051P14.7003 XS03957B7_004 + +If the format of a file is known (e.g. it is a fixed width table or an IPAC table), +then it is more efficient and reliable to provide a value for the ``format`` argument from one +of the values in the `supported formats`_. For example:: + + >>> data = ascii.read(lines, format='fixed_width_two_line', delimiter='&') + +Writing Tables +-------------- + +The |write| function provides a way to write a data table as a formatted ASCII +table. For example the following writes a table as a simple space-delimited +file:: + + >>> import numpy as np + >>> from astropy.table import Table + >>> x = np.array([1, 2, 3]) + >>> y = x ** 2 + >>> data = Table([x, y], names=['x', 'y']) + >>> ascii.write(data, 'values.dat') + +The ``values.dat`` file will then contain:: + + x y + 1 1 + 2 4 + 3 9 + +All of the input Reader formats supported by `astropy.io.ascii` for reading are +also supported for writing. This provides a great deal of flexibility in the +format for writing. The example below writes the data as a LaTeX table, using +the option to send the output to ``sys.stdout`` instead of a file:: + + >>> import sys + >>> ascii.write(data, sys.stdout, format='latex') + \begin{table} + \begin{tabular}{cc} + x & y \\ + 1 & 1 \\ + 2 & 4 \\ + 3 & 9 \\ + \end{tabular} + \end{table} + +.. _supported_formats: + +Supported formats +================= + +A full list of the supported ``format`` values and corresponding format types for ASCII +tables is given below. The ``Write`` column indicates which formats support write +functionality. + +========================= ===== ============================================================================================ + Format Write Description +========================= ===== ============================================================================================ +``aastex`` Yes :class:`~astropy.io.ascii.AASTex`: AASTeX deluxetable used for AAS journals +``basic`` Yes :class:`~astropy.io.ascii.Basic`: Basic table with custom delimiters +``cds`` :class:`~astropy.io.ascii.Cds`: CDS format table +``commented_header`` Yes :class:`~astropy.io.ascii.CommentedHeader`: Column names in a commented line +``csv`` Yes :class:`~astropy.io.ascii.Csv`: Basic table with comma-separated values +``daophot`` :class:`~astropy.io.ascii.Daophot`: IRAF DAOphot format table +``fixed_width`` Yes :class:`~astropy.io.ascii.FixedWidth`: Fixed width +``fixed_width_no_header`` Yes :class:`~astropy.io.ascii.FixedWidthNoHeader`: Fixed width with no header +``fixed_width_two_line`` Yes :class:`~astropy.io.ascii.FixedWidthTwoLine`: Fixed width with second header line +``html`` Yes :class:`~astropy.io.ascii.HTML`: HTML format table +``ipac`` Yes :class:`~astropy.io.ascii.Ipac`: IPAC format table +``latex`` Yes :class:`~astropy.io.ascii.Latex`: LaTeX table +``no_header`` Yes :class:`~astropy.io.ascii.NoHeader`: Basic table with no headers +``rdb`` Yes :class:`~astropy.io.ascii.Rdb`: Tab-separated with a type definition header line +``sextractor`` :class:`~astropy.io.ascii.SExtractor`: SExtractor format table +``tab`` Yes :class:`~astropy.io.ascii.Tab`: Basic table with tab-separated values +========================= ===== ============================================================================================ + + +Using `astropy.io.ascii` +======================== + +The details of using `astropy.io.ascii` are provided in the following sections: + +Reading tables +--------------- + +.. toctree:: + :maxdepth: 2 + + read + +Writing tables +--------------- + +.. toctree:: + :maxdepth: 2 + + write + +Fixed-width Gallery +-------------------- + +.. toctree:: + :maxdepth: 2 + + fixed_width_gallery + +Base class elements +------------------- + +.. toctree:: + :maxdepth: 2 + + base_classes + +Extension Reader classes +------------------------ + +.. toctree:: + :maxdepth: 2 + + extension_classes + + +Reference/API +============= + +.. automodapi:: astropy.io.ascii diff --git a/docs/io/ascii/read.rst b/docs/io/ascii/read.rst new file mode 100644 index 0000000..0f4c6ef --- /dev/null +++ b/docs/io/ascii/read.rst @@ -0,0 +1,387 @@ +.. include:: references.txt + +.. _astropy.io.ascii_read: + +Reading tables +-------------- + +The majority of commonly encountered ASCII tables can be easily read with the |read| +function:: + + >>> from astropy.io import ascii + >>> data = ascii.read(table) # doctest: +SKIP + +where ``table`` is the name of a file, a string representation of a table, or a +list of table lines. By default |read| will try to `guess the table format <#guess-table-format>`_ +by trying all the supported formats. If this does not work (for unusually +formatted tables) then one needs give `astropy.io.ascii` additional hints about the +format, for example:: + + >>> data = astropy.io.ascii.read('t/nls1_stackinfo.dbout', data_start=2, delimiter='|') # doctest: +SKIP + >>> data = astropy.io.ascii.read('t/simple.txt', quotechar="'") # doctest: +SKIP + >>> data = astropy.io.ascii.read('t/simple4.txt', format='no_header', delimiter='|') # doctest: +SKIP + +The |read| function accepts a number of parameters that specify the detailed +table format. Different formats can define different defaults, so the +descriptions below sometimes mention "typical" default values. This refers to +the :class:`~astropy.io.ascii.Basic` format reader and other similar character-separated formats. + +.. _io_ascii_read_parameters: + +Parameters for ``read()`` +^^^^^^^^^^^^^^^^^^^^^^^^^ + +**table** : input table + There are four ways to specify the table to be read: + + - Name of a file (string) + - Single string containing all table lines separated by newlines + - File-like object with a callable read() method + - List of strings where each list element is a table line + + The first two options are distinguished by the presence of a newline in the string. + This assumes that valid file names will not normally contain a newline. + +**format** : file format (default='basic') + This specifies the top-level format of the ASCII table, for example + if it is a basic character delimited table, fixed format table, or + a CDS-compatible table, etc. The value of this parameter must + be one of the :ref:`supported_formats`. + +**guess**: try to guess table format (default=True) + If set to True then |read| will try to guess the table format by cycling + through a number of possible table format permutations and attempting to read + the table in each case. See the `Guess table format`_ section for further details. + +**delimiter** : column delimiter string + A one-character string used to separate fields which typically defaults to + the space character. Other common values might be "\\s" (whitespace), "," or + "|" or "\\t" (tab). A value of "\\s" allows any combination of the tab and + space characters to delimit columns. + +**comment** : regular expression defining a comment line in table + If the ``comment`` regular expression matches the beginning of a table line then that line + will be discarded from header or data processing. For the ``basic`` format this + defaults to "\\s*#" (any whitespace followed by #). + +**quotechar** : one-character string to quote fields containing special characters + This specifies the quote character and will typically be either the single or double + quote character. This is can be useful for reading text fields with spaces in a space-delimited + table. The default is typically the double quote. + +**header_start** : line index for the header line not counting comment lines + This specifies in the line index where the header line will be found. Comment lines are + not included in this count and the counting starts from 0 (first non-comment line has index=0). + If set to None this indicates that there is no header line and the column names + will be auto-generated. The default is dependent on the format. + +**data_start**: line index for the start of data not counting comment lines + This specifies in the line index where the data lines begin where the counting starts + from 0 and does not include comment lines. The default is dependent on the format. + +**data_end**: line index for the end of data (can be negative to count from end) + If this is not None then it allows for excluding lines at the end that are not + valid data lines. A negative value means to count from the end, so -1 would + exclude the last line, -2 the last two lines, and so on. + +**converters**: dict of data type converters + See the `Converters`_ section for more information. + +**names**: list of names corresponding to each data column + Define the complete list of names for each data column. This will override + names found in the header (if it exists). If not supplied then + use names from the header or auto-generated names if there is no header. + +**include_names**: list of names to include in output + From the list of column names found from the header or the ``names`` + parameter, select for output only columns within this list. If not supplied + then include all names. + +**exclude_names**: list of names to exclude from output + Exclude these names from the list of output columns. This is applied *after* + the ``include_names`` filtering. If not specified then no columns are excluded. + +**fill_values**: list of fill value specifiers + Specify input table entries which should be masked in the output table + because they are bad or missing. See the `Bad or missing values`_ section + for more information and examples. The default is that any blank table + values are treated as missing. + +**fill_include_names**: list of column names, which are affected by ``fill_values``. + If not supplied, then ``fill_values`` can affect all columns. + +**fill_exclude_names**: list of column names, which are not affected by ``fill_values``. + If not supplied, then ``fill_values`` can affect all columns. + +**Outputter**: Outputter class + This converts the raw data tables value into the + output object that gets returned by |read|. The default is + :class:`~astropy.io.ascii.TableOutputter`, which returns a + :class:`~astropy.table.Table` object. + +**Inputter**: Inputter class + This is generally not specified. + +**data_Splitter**: Splitter class to split data columns + +**header_Splitter**: Splitter class to split header columns + +**Reader** : Reader class (*deprecated* in favor of ``format``) + This specifies the top-level format of the ASCII table, for example + if it is a basic character delimited table, fixed format table, or + a CDS-compatible table, etc. The value of this parameter must + be a Reader class. For basic usage this means one of the + built-in :ref:`extension_reader_classes`. + +.. _replace_bad_or_missing_values: + +Bad or missing values +^^^^^^^^^^^^^^^^^^^^^ + +ASCII data tables can contain bad or missing values. A common case is when a table +contains blank entries with no available data, for example:: + + >>> weather_data = """ + ... day,precip,type + ... Mon,1.5,rain + ... Tues,, + ... Wed,1.1,snow + ... """ + +By default |read| will interpret blank entries as being bad/missing and output a masked +Table with those entries masked out by setting the corresponding mask value set to +``True``:: + + >>> dat = ascii.read(weather_data) + >>> print dat + day precip type + ---- ------ ---- + Mon 1.5 rain + Tues -- -- + Wed 1.1 snow + +If you want to replace the masked (missing) values with particular values, set the masked +column ``fill_value`` attribute and then get the "filled" version of the table. This +looks like the following:: + + >>> dat['precip'].fill_value = -999 + >>> dat['type'].fill_value = 'N/A' + >>> print dat.filled() + day precip type + ---- ------ ---- + Mon 1.5 rain + Tues -999.0 N/A + Wed 1.1 snow + +ASCII tables may also have other indicators of bad or missing data. For +example a table may contain string values that are not a valid representation +of a number, e.g. ``"..."``, or a table may have special values like ``-999`` +that are chosen to indicate missing data. The |read| function has a flexible +system to accommodate these cases by marking specified character sequences in +the input data as "missing data" during the conversion process. Whenever +missing data is found then the output will be a masked table. + +This is done with the ``fill_values`` keyword argument, which can be set to a +single missing-value specification ```` or a list of ```` tuples:: + + fill_values = | [, , ...] + = (, '0', , , ...) + +When reading a table the second element of a ```` should always +be the string ``'0'``, +otherwise you may get unexpected behavior [#f1]_. By default the +```` is applied to all columns unless column name strings are +supplied. An alterate way to limit the columns is via the +``fill_include_names`` and ``fill_exclude_names`` keyword arguments in |read|. + +In the example below we read back the weather table after filling the missing +values in with typical placeholders:: + + >>> table = ['day precip type', + ... ' Mon 1.5 rain', + ... 'Tues -999.0 N/A', + ... ' Wed 1.1 snow'] + >>> t = ascii.read(table, fill_values=[('-999.0', '0', 'precip'), ('N/A', '0', 'type')]) + >>> print t + day precip type + ---- ------ ---- + Mon 1.5 rain + Tues -- -- + Wed 1.1 snow + +.. note:: + + The default in |read| is ``fill_values=('','0')``. This marks blank entries as being + missing for any data type (int, float, or string). If ``fill_values`` is explicitly + set in the call to |read| then the default behavior of marking blank entries as missing + no longer applies. For instance setting ``fill_values=None`` will disable this + auto-masking without setting any other fill values. This can be useful for a string + column where one of values happens to be ``""``. + + +.. [#f1] The requirement to put the ``'0'`` there is the legacy of an old + interface which is maintained for backward compatibility and also to + match the format of ``fill_value`` for reading with the format of + ``fill_value`` used for writing tables. On reading, the second + element of the ```` tuple can actually be an arbitrary + string value which replaces occurrences of the ```` + string in the input stream prior to type conversion. This ends up + being the value "behind the mask", which should never be directly + accessed. Only the value ``'0'`` is neutral when attempting to detect + the column data type and perform type conversion. For instance if you + used ``'nan'`` for the ```` value then integer columns + would wind up as float. + +Guess table format +^^^^^^^^^^^^^^^^^^ + +If the ``guess`` parameter in |read| is set to True (which is the default) then +|read| will try to guess the table format by cycling through a number of +possible table format permutations and attempting to read the table in each case. +The first format which succeeds and will be used to read the table. To succeed +the table must be successfully parsed by the Reader and satisfy the following +column requirements: + + * At least two table columns + * No column names are a float or int number + * No column names begin or end with space, comma, tab, single quote, double quote, or + a vertical bar (|). + +These requirements reduce the chance for a false positive where a table is +successfully parsed with the wrong format. A common situation is a table +with numeric columns but no header row, and in this case ``astropy.io.ascii`` will +auto-assign column names because of the restriction on column names that +look like a number. + +The order of guessing is shown by this Python code, where ``Reader`` is the +class which actually implements reading the different file formats:: + + for Reader in (Rdb, Tab, Cds, Daophot, SExtractor, Ipac, Latex, AASTex, HTML): + read(Reader=Reader) + for Reader in (CommentedHeader, Basic, NoHeader): + for delimiter in ("|", ",", " ", "\\s"): + for quotechar in ('"', "'"): + read(Reader=Reader, delimiter=delimiter, quotechar=quotechar) + +Note that the :class:`~astropy.io.ascii.FixedWidth` derived-readers are not included +in the default guess sequence (this causes problems), so to read such tables +one must explicitly specify the format with the ``format`` keyword. + +If none of the guesses succeed in reading the table (subject to the column +requirements) a final try is made using just the user-supplied parameters but +without checking the column requirements. In this way a table with only one +column or column names that look like a number can still be successfully read. + +The guessing process respects any values of the Reader, delimiter, and +quotechar parameters that were supplied to the read() function. Any guesses +that would conflict are skipped. For example the call:: + + >>> data = ascii.read(table, Reader=ascii.NoHeader, quotechar="'") + +would only try the four delimiter possibilities, skipping all the conflicting +Reader and quotechar combinations. + +Guessing can be disabled in two ways:: + + import astropy.io.ascii + data = astropy.io.ascii.read(table) # guessing enabled by default + data = astropy.io.ascii.read(table, guess=False) # disable for this call + astropy.io.ascii.set_guess(False) # set default to False globally + data = astropy.io.ascii.read(table) # guessing disabled + +Converters +^^^^^^^^^^ + +:mod:`astropy.io.ascii` converts the raw string values from the table into +numeric data types by using converter functions such as the Python ``int`` and +``float`` functions. For example ``int("5.0")`` will fail while float("5.0") +will succeed and return 5.0 as a Python float. + +The default converters are:: + + default_converters = [astropy.io.ascii.convert_numpy(numpy.int), + astropy.io.ascii.convert_numpy(numpy.float), + astropy.io.ascii.convert_numpy(numpy.str)] + +These take advantage of the :func:`~astropy.io.ascii.convert_numpy` +function which returns a 2-element tuple ``(converter_func, converter_type)`` +as described in the previous section. The type provided to +:func:`~astropy.io.ascii.convert_numpy` must be a valid `numpy type +`_, for example +``numpy.int``, ``numpy.uint``, ``numpy.int8``, ``numpy.int64``, +``numpy.float``, ``numpy.float64``, ``numpy.str``. + +The default converters for each column can be overridden with the +``converters`` keyword:: + + >>> import numpy as np + >>> converters = {'col1': [ascii.convert_numpy(np.uint)], + ... 'col2': [ascii.convert_numpy(np.float32)]} + >>> ascii.read('file.dat', converters=converters) # doctest: +SKIP + +Advanced customization +^^^^^^^^^^^^^^^^^^^^^^ + +Here we provide a few examples that demonstrate how to extend the base +functionality to handle special cases. To go beyond these simple examples the +best reference is to read the code for the existing +:ref:`extension_reader_classes`. + +**Define a custom reader functionally** +:: + + def read_rdb_table(table): + reader = astropy.io.ascii.Basic() + reader.header.splitter.delimiter = '\t' + reader.data.splitter.delimiter = '\t' + reader.header.splitter.process_line = None + reader.data.splitter.process_line = None + reader.data.start_line = 2 + + return reader.read(table) + +**Define custom readers by class inheritance** +:: + + # Note: Tab, Csv, and Rdb are included in astropy.io.ascii for convenience. + class Tab(astropy.io.ascii.Basic): + def __init__(self): + astropy.io.ascii.Basic.__init__(self) + self.header.splitter.delimiter = '\t' + self.data.splitter.delimiter = '\t' + # Don't strip line whitespace since that includes tabs + self.header.splitter.process_line = None + self.data.splitter.process_line = None + # Don't strip data value spaces since that is significant in TSV tables + self.data.splitter.process_val = None + self.data.splitter.skipinitialspace = False + + class Rdb(astropy.io.ascii.Tab): + def __init__(self): + astropy.io.ascii.Tab.__init__(self) + self.data.start_line = 2 + + class Csv(astropy.io.ascii.Basic): + def __init(self): + astropy.io.ascii.Basic.__init__(self) + self.data.splitter.delimiter = ',' + self.header.splitter.delimiter = ',' + self.header.start_line = 0 + self.data.start_line = 1 + +**Create a custom splitter.process_val function** +:: + + # The default process_val() normally just strips whitespace. + # In addition have it replace empty fields with -999. + def process_val(x): + """Custom splitter process_val function: Remove whitespace at the beginning + or end of value and substitute -999 for any blank entries.""" + x = x.strip() + if x == '': + x = '-999' + return x + + # Create an RDB reader and override the splitter.process_val function + rdb_reader = astropy.io.ascii.get_reader(Reader=astropy.io.ascii.Rdb) + rdb_reader.data.splitter.process_val = process_val diff --git a/docs/io/ascii/references.txt b/docs/io/ascii/references.txt new file mode 100644 index 0000000..1d2ab29 --- /dev/null +++ b/docs/io/ascii/references.txt @@ -0,0 +1,4 @@ +.. |read| replace:: :func:`~astropy.io.ascii.read` +.. |write| replace:: :func:`~astropy.io.ascii.write` +.. |Table| replace:: :class:`~astropy.table.Table` +.. _structured array: http://docs.scipy.org/doc/numpy/user/basics.rec.html diff --git a/docs/io/ascii/toc.txt b/docs/io/ascii/toc.txt new file mode 100644 index 0000000..02df2d5 --- /dev/null +++ b/docs/io/ascii/toc.txt @@ -0,0 +1,8 @@ +.. toctree:: + :maxdepth: 2 + + read + write + base_classes + fixed_width_gallery + ascii_api diff --git a/docs/io/ascii/write.rst b/docs/io/ascii/write.rst new file mode 100644 index 0000000..c3d9437 --- /dev/null +++ b/docs/io/ascii/write.rst @@ -0,0 +1,265 @@ +.. include:: references.txt + +.. _astropy.io.ascii_write: + +Writing tables +-------------- + +:mod:`astropy.io.ascii` is able to write ASCII tables out to a file or file-like +object using the same class structure and basic user interface as for reading +tables. + +The |write| function provides a way to write a data table as a +formatted ASCII table. For example:: + + >>> import numpy as np + >>> from astropy.io import ascii + >>> x = np.array([1, 2, 3]) + >>> y = x ** 2 + >>> ascii.write([x, y], 'values.dat', names=['x', 'y']) + +The ``values.dat`` file will then contain:: + + x y + 1 1 + 2 4 + 3 9 + +Most of the input table :ref:`supported_formats` for +reading are also available for writing. This provides a great deal of +flexibility in the format for writing. The example below writes the data as a +LaTeX table, using the option to send the output to ``sys.stdout`` instead of a +file:: + + >>> ascii.write(data, format='latex') # doctest: +SKIP + \begin{table} + \begin{tabular}{cc} + x & y \\ + 1 & 1 \\ + 2 & 4 \\ + 3 & 9 \\ + \end{tabular} + \end{table} + +Input data format +^^^^^^^^^^^^^^^^^ + +The input ``table`` argument to |write| can be any value that is supported for +initializing a |Table| object. This is documented in detail in the +:ref:`construct_table` section and includes creating a table with a list of +columns, a dictionary of columns, or from `numpy` arrays (either structured or +homogeneous). The sections below show a few examples. + +Table or NumPy structured array +""""""""""""""""""""""""""""""" + +An AstroPy |Table| object or a NumPy `structured array`_ (or record array) can +serve as input to the |write| function. + +:: + + >>> from astropy.io import ascii + >>> from astropy.table import Table + + >>> data = Table({'a': [1, 2, 3], + ... 'b': [4.0, 5.0, 6.0]}, + ... names=['a', 'b']) + >>> ascii.write(data) + a b + 1 4.0 + 2 5.0 + 3 6.0 + + >>> data = np.array([(1, 2., 'Hello'), (2, 3., "World")], + ... dtype=('i4,f4,a10')) + >>> ascii.write(data) + f0 f1 f2 + 1 2.0 Hello + 2 3.0 World + +The output of :mod:`astropy.io.ascii.read` is a |Table| or NumPy array data +object that can be an input to the |write| function. + +:: + + >>> data = ascii.read('t/daophot.dat', format='daophot') # doctest: +SKIP + >>> ascii.write(data, 'space_delimited_table.dat') # doctest: +SKIP + +List of lists +""""""""""""" + +A list of Python lists (or any iterable object) can be used as input:: + + >>> x = [1, 2, 3] + >>> y = [4, 5.2, 6.1] + >>> z = ['hello', 'world', '!!!'] + >>> data = [x, y, z] + + >>> ascii.write(data) + col0 col1 col2 + 1 4.0 hello + 2 5.2 world + 3 6.1 !!! + +The ``data`` object does not contain information about the column names so +|Table| has chosen them automatically. To specify the names, provide the +``names`` keyword argument. This example also shows excluding one of the columns +from the output:: + + >>> ascii.write(data, names=['x', 'y', 'z'], exclude_names=['y']) + x z + 1 hello + 2 world + 3 !!! + + +Dict of lists +""""""""""""" + +A dictionary containing iterable objects can serve as input to |write|. Each +dict key is taken as the column name while the value must be an iterable object +containing the corresponding column values. + +Since a Python dictionary is not ordered the output column order will be +unpredictable unless the ``names`` argument is provided. + +:: + + >>> data = {'x': [1, 2, 3], + ... 'y': [4, 5.2, 6.1], + ... 'z': ['hello', 'world', '!!!']} + >>> ascii.write(data, names=['x', 'y', 'z']) + x y z + 1 4.0 hello + 2 5.2 world + 3 6.1 !!! + + +.. _io_ascii_write_parameters: + +Parameters for ``write()`` +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The |write| function accepts a number of parameters that specify the detailed output table +format. Each of the :ref:`supported_formats` is handled by a corresponding Writer class that +can define different defaults, so the descriptions below sometimes mention "typical" +default values. This refers to the :class:`~astropy.io.ascii.Basic` writer and other +similar Writer classes. + +Some output format Writer classes, e.g. :class:`~astropy.io.ascii.Latex` or +:class:`~astropy.io.ascii.AASTex` accept additional keywords, that can +customize the output further. See the documentation of these classes for +details. + +**output** : output specifier + There are two ways to specify the output for the write operation: + + - Name of a file (string) + - File-like object (from open(), StringIO, etc) + +**table** : input table + Any value that is supported for initializing a |Table| object (see :ref:`construct_table`). + +**format** : output format (default='basic') + This specifies the format of the ASCII table to be written, for + example if it is a basic character delimited table, fixed format table, or a + CDS-compatible table, etc. The value of this parameter must + be one of the :ref:`supported_formats`. + +**delimiter** : column delimiter string + A one-character string used to separate fields which typically defaults to the space character. + Other common values might be "," or "|" or "\\t". + +**comment** : string defining a comment line in table + For the :class:`~astropy.io.ascii.Basic` Writer this defaults to "#". + Which and how comments are written depends on the format chosen (e.g. + :class:`~astropy.io.ascii.CommentedHeader` puts the comment symbol in the line + with the column names). + +**formats**: dict of data type converters + For each key (column name) use the given value to convert the column data to a string. + If the format value is string-like then it is used as a Python format statement, + e.g. '%0.2f' % value. If it is a callable function then that function + is called with a single argument containing the column value to be converted. + Example:: + + astropy.io.ascii.write(table, sys.stdout, formats={'XCENTER': '%12.1f', + 'YCENTER': lambda x: round(x, 1)}, + +**names**: list of names corresponding to each data column + Define the complete list of names for each data column. This will override + names determined from the data table (if available). If not supplied then + use names from the data table or auto-generated names. + +**include_names**: list of names to include in output + From the list of column names found from the data table or the ``names`` + parameter, select for output only columns within this list. If not supplied + then include all names. + +**exclude_names**: list of names to exclude from output + Exclude these names from the list of output columns. This is applied *after* + the ``include_names`` filtering. If not specified then no columns are excluded. + +**fill_values**: fill value specifier of lists + This can be used to fill missing values in the table or replace values with special meaning. + + See the :ref:`replace_bad_or_missing_values` section for more information on the syntax. + The syntax is almost the same as when reading a table. + There is a special value ``astropy.io.ascii.masked`` that is used a say "output this string + for all masked values in a masked table (the default is to use a ``'--'``):: + + >>> import sys + >>> from astropy.table import Table, Column, MaskedColumn + >>> from astropy.io import ascii + >>> t = Table([(1, 2), (3, 4)], names=('a', 'b'), masked=True) + >>> t['a'].mask = [True, False] + >>> ascii.write(t, sys.stdout) + a b + -- 3 + 2 4 + >>> ascii.write(t, sys.stdout, fill_values=[(ascii.masked, 'N/A')]) + a b + N/A 3 + 2 4 + + If no ``fill_values`` is applied for masked values in ``astropy.io.ascii``, the default set + with ``numpy.ma.masked_print_option.set_display`` applies (usually that is also ``'--'``):: + + >>> ascii.write(t, sys.stdout, fill_values=[]) + a b + -- 3 + 2 4 + + Note that when writing a table all values are converted to strings, before + any value is replaced. Because ``fill_values`` only replaces cells that + are an exact match to the specification, you need to provide the string + representation (stripped of whitespace) for each value. For example, in + the following commands ``-99`` is formatted with two digits after the + comma, so we need to replace ``-99.00`` and not ``-99``:: + + >>> t = Table([(-99, 2), (3, 4)], names=('a', 'b')) + >>> ascii.write(t, sys.stdout, fill_values = [('-99.00', 'no data')], + ... formats={'a': '%4.2f'}) + a b + "no data" 3 + 2.00 4 + + Similarly, if you replace a value in a column that has a fixed length format, + e.g. ``'f4.2'``, then the string you want to replace must have the same + number of characters, in the example above ``fill_values=[(' nan',' N/A')]`` + would work. + +**fill_include_names**: list of column names, which are affected by ``fill_values``. + If not supplied, then ``fill_values`` can affect all columns. + +**fill_exclude_names**: list of column names, which are not affected by ``fill_values``. + If not supplied, then ``fill_values`` can affect all columns. + +**Writer** : Writer class (*deprecated* in favor of ``format``) + This specifies the top-level format of the ASCII table to be written, for + example if it is a basic character delimited table, fixed format table, or a + CDS-compatible table, etc. The value of this parameter must be a Writer + class. For basic usage this means one of the built-in :ref:`extension_reader_classes`. + Note: Reader classes and Writer classes are synonymous, in other + words Reader classes can also write, but for historical reasons they are + often called Reader classes. diff --git a/docs/io/fits/api/cards.rst b/docs/io/fits/api/cards.rst new file mode 100644 index 0000000..af22cc3 --- /dev/null +++ b/docs/io/fits/api/cards.rst @@ -0,0 +1,31 @@ +.. currentmodule:: astropy.io.fits + +Cards +----- + +:class:`Card` +^^^^^^^^^^^^^ + +.. autoclass:: Card + :members: + :inherited-members: + :undoc-members: + :show-inheritance: + +Deprecated Interfaces +^^^^^^^^^^^^^^^^^^^^^ + +The following classes and functions are deprecated as of the PyFITS 3.1 header +refactoring, though they are currently still available for backwards-compatibility. + +.. autoclass:: CardList + :members: + :undoc-members: + :show-inheritance: + +.. autofunction:: create_card + +.. autofunction:: create_card_from_string + +.. autofunction:: upper_key + diff --git a/docs/io/fits/api/diff.rst b/docs/io/fits/api/diff.rst new file mode 100644 index 0000000..1e14069 --- /dev/null +++ b/docs/io/fits/api/diff.rst @@ -0,0 +1,47 @@ +Differs +------- + +.. automodule:: astropy.io.fits.diff +.. currentmodule:: astropy.io.fits + +:class:`FITSDiff` +^^^^^^^^^^^^^^^^^ +.. autoclass:: FITSDiff + :members: + :inherited-members: + :show-inheritance: + +:class:`HDUDiff` +^^^^^^^^^^^^^^^^ +.. autoclass:: HDUDiff + :members: + :inherited-members: + :show-inheritance: + +:class:`HeaderDiff` +^^^^^^^^^^^^^^^^^^^ +.. autoclass:: HeaderDiff + :members: + :inherited-members: + :show-inheritance: + +:class:`ImageDataDiff` +^^^^^^^^^^^^^^^^^^^^^^ +.. autoclass:: ImageDataDiff + :members: + :inherited-members: + :show-inheritance: + +:class:`RawDataDiff` +^^^^^^^^^^^^^^^^^^^^ +.. autoclass:: RawDataDiff + :members: + :inherited-members: + :show-inheritance: + +:class:`TableDataDiff` +^^^^^^^^^^^^^^^^^^^^^^ +.. autoclass:: TableDataDiff + :members: + :inherited-members: + :show-inheritance: diff --git a/docs/io/fits/api/files.rst b/docs/io/fits/api/files.rst new file mode 100644 index 0000000..15af19d --- /dev/null +++ b/docs/io/fits/api/files.rst @@ -0,0 +1,44 @@ +.. currentmodule:: astropy.io.fits + +File Handling and Convenience Functions +--------------------------------------- + +:func:`open` +^^^^^^^^^^^^ +.. autofunction:: open + +:func:`writeto` +^^^^^^^^^^^^^^^ +.. autofunction:: writeto + +:func:`info` +^^^^^^^^^^^^ +.. autofunction:: info + +:func:`append` +^^^^^^^^^^^^^^ +.. autofunction:: append + +:func:`update` +^^^^^^^^^^^^^^ +.. autofunction:: update + +:func:`getdata` +^^^^^^^^^^^^^^^ +.. autofunction:: getdata + +:func:`getheader` +^^^^^^^^^^^^^^^^^ +.. autofunction:: getheader + +:func:`getval` +^^^^^^^^^^^^^^ +.. autofunction:: getval + +:func:`setval` +^^^^^^^^^^^^^^ +.. autofunction:: setval + +:func:`delval` +^^^^^^^^^^^^^^ +.. autofunction:: delval diff --git a/docs/io/fits/api/hdulists.rst b/docs/io/fits/api/hdulists.rst new file mode 100644 index 0000000..6df0423 --- /dev/null +++ b/docs/io/fits/api/hdulists.rst @@ -0,0 +1,14 @@ +.. currentmodule:: astropy.io.fits + +HDU Lists +--------- + +.. inheritance-diagram:: HDUList + +:class:`HDUList` +^^^^^^^^^^^^^^^^ + +.. autoclass:: HDUList + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/io/fits/api/hdus.rst b/docs/io/fits/api/hdus.rst new file mode 100644 index 0000000..47f17ba --- /dev/null +++ b/docs/io/fits/api/hdus.rst @@ -0,0 +1,43 @@ +.. currentmodule:: astropy.io.fits + +Header Data Units +----------------- + +The :class:`ImageHDU` and :class:`CompImageHDU` classes are discussed in the +section on :ref:`Images`. + +The :class:`TableHDU` and :class:`BinTableHDU` classes are discussed in the +section on :ref:`Tables`. + +:class:`PrimaryHDU` +^^^^^^^^^^^^^^^^^^^ +.. autoclass:: PrimaryHDU + :members: + :inherited-members: + :show-inheritance: + +:class:`GroupsHDU` +^^^^^^^^^^^^^^^^^^ +.. autoclass:: GroupsHDU + :members: + :inherited-members: + :show-inheritance: + +:class:`GroupData` +^^^^^^^^^^^^^^^^^^ +.. autoclass:: GroupData + :members: + :show-inheritance: + +:class:`Group` +============== +.. autoclass:: Group + :members: + :show-inheritance: + +:class:`StreamingHDU` +^^^^^^^^^^^^^^^^^^^^^ +.. autoclass:: StreamingHDU + :members: + :inherited-members: + :show-inheritance: diff --git a/docs/io/fits/api/headers.rst b/docs/io/fits/api/headers.rst new file mode 100644 index 0000000..d8eb187 --- /dev/null +++ b/docs/io/fits/api/headers.rst @@ -0,0 +1,13 @@ +.. currentmodule:: astropy.io.fits + +Headers +------- + +:class:`Header` +^^^^^^^^^^^^^^^ + +.. autoclass:: Header + :members: + :inherited-members: + :undoc-members: + :show-inheritance: diff --git a/docs/io/fits/api/images.rst b/docs/io/fits/api/images.rst new file mode 100644 index 0000000..6024e04 --- /dev/null +++ b/docs/io/fits/api/images.rst @@ -0,0 +1,30 @@ +.. currentmodule:: astropy.io.fits + +.. _images: + +Images +------ + +`ImageHDU` +^^^^^^^^^^ + +.. autoclass:: ImageHDU + :members: + :inherited-members: + :show-inheritance: + +`CompImageHDU` +^^^^^^^^^^^^^^ + +.. autoclass:: CompImageHDU + :members: + :inherited-members: + :show-inheritance: + +`Section` +========= + +.. autoclass:: Section + :members: + :inherited-members: + :show-inheritance: diff --git a/docs/io/fits/api/tables.rst b/docs/io/fits/api/tables.rst new file mode 100644 index 0000000..1c9138e --- /dev/null +++ b/docs/io/fits/api/tables.rst @@ -0,0 +1,63 @@ +.. currentmodule:: astropy.io.fits + +.. _tables: + +Tables +------ + +:class:`BinTableHDU` +^^^^^^^^^^^^^^^^^^^^ +.. autoclass:: BinTableHDU + :members: + :inherited-members: + :show-inheritance: + +:class:`TableHDU` +^^^^^^^^^^^^^^^^^ +.. autoclass:: TableHDU + :members: + :inherited-members: + :show-inheritance: + +:class:`Column` +^^^^^^^^^^^^^^^ +.. autoclass:: Column + :members: + :inherited-members: + :show-inheritance: + +:class:`ColDefs` +^^^^^^^^^^^^^^^^ +.. autoclass:: ColDefs + :members: + :inherited-members: + :show-inheritance: + +:class:`FITS_rec` +^^^^^^^^^^^^^^^^^ +.. autoclass:: FITS_rec + :members: + :show-inheritance: + +:class:`FITS_record` +^^^^^^^^^^^^^^^^^^^^ +.. autoclass:: FITS_record + :members: + :inherited-members: + :show-inheritance: + + +Table Functions +^^^^^^^^^^^^^^^ + +:func:`new_table` +""""""""""""""""" +.. autofunction:: new_table + +:func:`tabledump` +""""""""""""""""" +.. autofunction:: tabledump + +:func:`tableload` +""""""""""""""""" +.. autofunction:: tableload diff --git a/docs/io/fits/api/verification.rst b/docs/io/fits/api/verification.rst new file mode 100644 index 0000000..efca766 --- /dev/null +++ b/docs/io/fits/api/verification.rst @@ -0,0 +1,72 @@ +.. currentmodule:: astropy.io.fits + +.. _verify: + +Verification options +-------------------- + +There are 5 options for the ``output_verify`` argument of the following methods +of :class:`HDUList`: :meth:`~HDUList.close`, :meth:`~HDUList.writeto`, and +:meth:`~HDUList.flush`, or the :meth:``~_BaseHDU.writeto`` method on any HDU +object. In these cases, the verification option is passed to a :meth:``verify`` +call within these methods. + +exception +^^^^^^^^^ + +This option will raise an exception if any FITS standard is violated. This is +the default option for output (i.e. when :meth:`~HDUList.writeto`, +:meth:`~HDUList.close`, or :meth:`~HDUList.flush` is called. If a user wants to +overwrite this default on output, the other options listed below can be used. + +ignore +^^^^^^ + +This option will ignore any FITS standard violation. On output, it will write +the HDU List content to the output FITS file, whether or not it is conforming +to FITS standard. + +The ``ignore`` option is useful in these situations, for example: + + 1. An input FITS file with non-standard is read and the user wants to copy or + write out after some modification to an output file. The non-standard will + be preserved in such output file. + + 2. A user wants to create a non-standard FITS file on purpose, possibly for + testing purpose. + +No warning message will be printed out. This is like a silent warn (see below) +option. + +fix +^^^ + +This option will try to fix any FITS standard violations. It is not always +possible to fix such violations. In general, there are two kinds of FITS +standard violation: fixable and not fixable. For example, if a keyword has a +floating number with an exponential notation in lower case 'e' (e.g. 1.23e11) +instead of the upper case 'E' as required by the FITS standard, it's a fixable +violation. On the other hand, a keyword name like ``P.I.`` is not fixable, +since it will not know what to use to replace the disallowed periods. If a +violation is fixable, this option will print out a message noting it is fixed. +If it is not fixable, it will throw an exception. + +The principle behind the fixing is do no harm. For example, it is plausible to +'fix' a :class:`Card` with a keyword name like ``P.I.`` by deleting it, but +Astropy will not take such action to hurt the integrity of the data. + +Not all fixes may be the "correct" fix, but at least Astropy will try to make +the fix in such a way that it will not throw off other FITS readers. + +silentfix +^^^^^^^^^ + +Same as fix, but will not print out informative messages. This may be useful in +a large script where the user does not want excessive harmless messages. If the +violation is not fixable, it will still throw an exception. + +warn +^^^^ + +This option is the same as the ignore option but will send warning messages. It +will not try to fix any FITS standard violations whether fixable or not. diff --git a/docs/io/fits/appendix/faq.rst b/docs/io/fits/appendix/faq.rst new file mode 100644 index 0000000..b71d634 --- /dev/null +++ b/docs/io/fits/appendix/faq.rst @@ -0,0 +1,800 @@ +.. doctest-skip-all + +.. _io-fits-faq: + +astropy.io.fits FAQ +------------------- + +.. contents:: + +General Questions +^^^^^^^^^^^^^^^^^ + +What is PyFITS and how does it relate to Astropy? +""""""""""""""""""""""""""""""""""""""""""""""""" + +PyFITS_ is a library written in, and for use with the Python_ programming +language for reading, writing, and manipulating FITS_ formatted files. It +includes a high-level interface to FITS headers with the ability for high and +low-level manipulation of headers, and it supports reading image and table +data as Numpy_ arrays. It also supports more obscure and non-standard formats +found in some FITS files. + +The `astropy.io.fits` module is identical to PyFITS but with the names changed. +When development began on Astropy it was clear that one of the core +requirements would be a FITS reader. Rather than starting from scratch, +PyFITS--being the most flexible FITS reader available for Python--was ported +into Astropy. There are plans to gradually phase out PyFITS as a stand-alone +module and deprecate it in favor of `astropy.io.fits`. See more about that in +the next question. + +Although PyFITS is written mostly in Python, it includes an optional module +written in C that's required to read/write compressed image data. However, +the rest of PyFITS functions without this extension module. + +.. _PyFITS: http://www.stsci.edu/institute/software_hardware/pyfits +.. _Python: http://www.python.org +.. _FITS: http://fits.gsfc.nasa.gov/ +.. _Numpy: http://numpy.scipy.org/ + + +What is the development status of PyFITS? +""""""""""""""""""""""""""""""""""""""""" + +PyFITS is written and maintained by the Science Software Branch at the `Space +Telescope Science Institute`_, and is licensed by AURA_ under a `3-clause BSD +license`_ (see `LICENSE.txt`_ in the PyFITS source code). + +It is now primarily developed as primarily as a component of Astropy +(`astropy.io.fits`) rather than as a stand-alone module. There are a few +reasons for this: The first is simply to reduce development effort; the +overhead of maintaining both PyFITS *and* `astropy.io.fits` in separate code +bases is non-trivial. The second is that there are many features of Astropy +(units, tables, etc.) from which the `astropy.io.fits` module can benefit +greatly. Since PyFITS is already integrated into Astropy, it makes more sense +to continue development there rather than make Astropy a dependency of PyFITS. + +PyFITS' current primary developer and active maintainer is `Erik Bray`_, though +patch submissions are welcome from anyone. PyFITS is now primarily developed +in a Git repository for ease of merging to and from Astropy. Patches and issue +reports can be posted to the `GitHub project`_ for PyFITS, or for Astropy. +There is also a legacy `Trac site`_ with some older issue reports still open, +but new issues should be submitted via GitHub if possible. An `SVN mirror`_ of +the repository is still maintained as well. + +The current stable release series is 3.3.x. Each 3.3.x release tries to +contain only bug fixes, and to not introduce any significant behavioral or API +changes (though this isn't guaranteed to be perfect). Patch releases for older +release series may be released upon request. Older versions of PyFITS (2.4 and +earlier) are no longer actively supported. + +.. _Space Telescope Science Institute: http://www.stsci.edu/ +.. _AURA: http://www.aura-astronomy.org/ +.. _3-clause BSD license: http://en.wikipedia.org/wiki/BSD_licenses#3-clause_license_.28.22New_BSD_License.22_or_.22Modified_BSD_License.22.29 +.. _LICENSE.txt: https://aeon.stsci.edu/ssb/trac/pyfits/browser/trunk/LICENSE.txt +.. _Erik Bray: mailto:embray@stsci.edu +.. _Trac site: https://aeon.stsci.edu/ssb/trac/pyfits/ +.. _SVN mirror: https://aeon.stsci.edu/ssb/svn/pyfits/ +.. _GitHub project: https://github.com/spacetelescope/PyFITS + + +Usage Questions +^^^^^^^^^^^^^^^ + +Something didn't work as I expected. Did I do something wrong? +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Possibly. But if you followed the documentation and things still did not work +as expected, it is entirely possible that there is a mistake in the +documentation, a bug in the code, or both. So feel free to report it as a bug. +There are also many, many corner cases in FITS files, with new ones discovered +almost every week. `astropy.io.fits` is always improving, but does not support +all cases perfectly. There are some features of the FITS format (scaled data, +for example) that are difficult to support correctly and can sometimes cause +unexpected behavior. + +For the most common cases, however, such as reading and updating FITS headers, +images, and tables, `astropy.io.fits`. is very stable and well-tested. Before +every Astropy/PyFITS release it is ensured that all its tests pass on a variety +of platforms, and those tests cover the majority of use-cases (until new corner +cases are discovered). + + +Astropy crashed and output a long string of code. What do I do? +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +This listing of code is what is knows as a `stack trace`_ (or in Python +parlance a "traceback"). When an unhandled exception occurs in the code, +causing the program to end, this is a way of displaying where the exception +occurred and the path through the code that led to it. + +As Astropy is meant to be used as a piece in other software projects, some +exceptions raised by Astropy are by design. For example, one of the most +common exceptions is a `~.exceptions.KeyError` when an attempt is made to read +the value of a non-existent keyword in a header:: + + >>> from astropy.io import fits + >>> h = fits.Header() + >>> h['NAXIS'] + Traceback (most recent call last): + File "", line 1, in + File "/path/to/astropy/io/fits/header.py", line 125, in __getitem__ + return self._cards[self._cardindex(key)].value + File "/path/to/astropy/io/fits/header.py", line 1535, in _cardindex + raise KeyError("Keyword %r not found." % keyword) + KeyError: "Keyword 'NAXIS' not found." + +This indicates that something was looking for a keyword called "NAXIS" that +does not exist. If an error like this occurs in some other software that uses +Astropy, it may indicate a bug in that software, in that it expected to find a +keyword that didn't exist in a file. + +Most "expected" exceptions will output a message at the end of the traceback +giving some idea of why the exception occurred and what to do about it. The +more vague and mysterious the error message in an exception appears, the more +likely that it was caused by a bug in Astropy. So if you're getting an +exception and you really don't know why or what to do about it, feel free to +report it as a bug. + +.. _stack trace: http://en.wikipedia.org/wiki/Stack_trace + + +Why does opening a file work in CFITSIO, ds9, etc. but not in Astropy? +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +As mentioned elsewhere in this FAQ, there are many unusual corner cases when +dealing with FITS files. It's possible that a file should work, but isn't +support due to a bug. Sometimes it's even possible for a file to work in an +older version of Astropy or PyFITS, but not a newer version due to a regression +that isn't tested for yet. + +Another problem with the FITS format is that, as old as it is, there are many +conventions that appear in files from certain sources that do not meet the FITS +standard. And yet they are so common-place that it is necessary to support +them in any FITS readers. CONTINUE cards are one such example. There are +non-standard conventions supported by Astropy/PyFITS that are not supported by +CFITSIO and possibly vice-versa. You may have hit one of those cases. + +If Astropy is having trouble opening a file, a good way to rule out whether not +the problem is with Astropy is to run the file through the `fitsverify`_ +program. For smaller files you can even use the `online FITS verifier`_. +These use CFITSIO under the hood, and should give a good indication of whether +or not there is something erroneous about the file. If the file is +malformatted, fitsverify will output errors and warnings. + +If fitsverify confirms no problems with a file, and Astropy is still having +trouble opening it (especially if it produces a traceback) then it's possible +there is a bug in Astropy. + +.. _fitsverify: http://heasarc.gsfc.nasa.gov/docs/software/ftools/fitsverify/ +.. _online FITS verifier: http://fits.gsfc.nasa.gov/fits_verify.html + + +How do I turn off the warning messages Astropy keeps outputting to my console? +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Astropy uses Python's built-in `warnings`_ subsystem for informing about +exceptional conditions in the code that are recoverable, but that the user may +want to be informed of. One of the most common warnings in `astropy.io.fits` +occurs when updating a header value in such a way that the comment must be +truncated to preserve space:: + + Card is too long, comment is truncated. + +Any console output generated by Astropy can be assumed to be from the warnings +subsystem. See Astropy's documentation on the :ref:`python-warnings` for more +information on how to control and quiet warnings. + +.. _warnings: http://docs.python.org/library/warnings.html + + +What convention does Astropy use for indexing, such as of image coordinates? +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +All arrays and sequences in Astropy use a zero-based indexing scheme. For +example, the first keyword in a header is ``header[0]``, not ``header[1]``. +This is in accordance with Python itself, as well as C, on which Python is +based. + +This may come as a surprise to veteran FITS users coming from IRAF, where +1-based indexing is typically used, due to its origins in FORTRAN. + +Likewise, the top-left pixel in an N x N array is ``data[0,0]``. The indices +for 2-dimensional arrays are row-major order, in that the first index is the +row number, and the second index is the column number. Or put in terms of +axes, the first axis is the y-axis, and the second axis is the x-axis. This is +the opposite of column-major order, which is used by FORTRAN and hence FITS. +For example, the second index refers to the axis specified by NAXIS1 in the +FITS header. + +In general, for N-dimensional arrays, row-major orders means that the +right-most axis is the one that varies the fastest while moving over the +array data linearly. For example, the 3-dimensional array:: + + [[[1, 2], + [3, 4]], + [[5, 6], + [7, 8]]] + +is represented linearly in row-major order as:: + + [1, 2, 3, 4, 5, 6, 7, 8] + +Since 2 immediately follows 1, you can see that the right-most (or inner-most) +axis is the one that varies the fastest. + +The discrepancy in axis-ordering may take some getting used to, but it is a +necessary evil. Since most other Python and C software assumes row-major +ordering, trying to enforce column-major ordering in arrays returned by Astropy +is likely to cause more difficulties than it's worth. + + +How do I open a very large image that won't fit in memory? +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +In PyFITS, prior to version 3.1, when the data portion of an HDU is accessed, +the data is read into memory in its entirety. For example:: + + >>> hdul = pyfits.open('myimage.fits') + >>> hdul[0].data + ... + +reads the entire image array from disk into memory. For very large images or +tables this is clearly undesirable, if not impossible given the available +resources. + +However, `astropy.io.fits.open` has an option to access the data portion of an +HDU by memory mapping using `mmap`_. In both Astropy and newer versions of +PyFITS this is used by *default*. + +What this means is that accessing the data as in the example above only reads +portions of the data into memory on demand. For example, if I request just a +slice of the image, such as ``hdul[0].data[100:200]``, then just rows 100-200 +will be read into memory. This happens transparently, as though the entire +image were already in memory. This works the same way for tables. For most +cases this is your best bet for working with large files. + +To ensure use of memory mapping, just add the ``memmap=True`` argument to +`fits.open `. Likewise, using ``memmap=False`` will +force data to be read entirely into memory. + + +The default can also be controlled through a configuration option called +``USE_MEMMAP``. Setting this to ``0`` will disable mmap by default. + +Unfortunately, memory mapping does not currently work as well with scaled +image data, where BSCALE and BZERO factors need to be applied to the data to +yield physical values. Currently this requires enough memory to hold the +entire array, though this is an area that will see improvement in the future. + +An alternative, which currently only works for image data (that is, non-tables) +is the sections interface. It is largely replaced by the better support for +mmap, but may still be useful on systems with more limited virtual-memory +space, such as on 32-bit systems. Support for scaled image data is flakey with +sections too, though that will be fixed. See the documentation on :ref:`image +sections ` for more details on using this interface. + +.. _mmap: http://en.wikipedia.org/wiki/Mmap + + +How can I create a very large FITS file from scratch? +""""""""""""""""""""""""""""""""""""""""""""""""""""" + +This is a very common issue, but unfortunately Astropy does not come with any +built-in facilities for creating large files (larger than will fit in memory) +from scratch (though it may in the future). + +Normally to create a single image FITS file one would do something like:: + + >>> import numpy + >>> from astropy.io import fits + >> data = numpy.zeros((40000, 40000), dtype=numpy.float64) + >> hdu = fits.PrimaryHDU(data=data) + >> hdu.writeto('large.fits') + +However, a 40000 x 40000 array of doubles is nearly twelve gigabytes! Most +systems won't be able to create that in memory just to write out to disk. In +order to create such a large file efficiently requires a little extra work, +and a few assumptions. + +First, it is helpful to anticipate about how large (as in, how many keywords) +the header will have in it. FITS headers must be written in 2880 byte +blocks--large enough for 36 keywords per block (including the END keyword in +the final block). Typical headers have somewhere between 1 and 4 blocks, +though sometimes more. + +Since the first thing we write to a FITS file is the header, we want to write +enough header blocks so that there is plenty of padding in which to add new +keywords without having to resize the whole file. Say you want the header to +use 4 blocks by default. Then, excluding the END card which Astropy will add +automatically, create the header and pad it out to 36 * 4 cards like so:: + + >>> data = numpy.zeros((100, 100), dtype=numpy.float64) + # This is a stub array that we'll be using the initialize the HDU; its + # exact size is irrelevant, as long as it has the desired number of + # dimensions + >>> hdu = fits.PrimaryHDU(data=data) + >>> header = hdu.header + >>> while len(header) < (36 * 4 - 1): + ... header.append() # Adds a blank card to the end + +Now adjust the NAXISn keywords to the desired size of the array, and write +*only* the header out to a file. Using the ``hdu.writeto()`` method will +cause Astropy to "helpfully" reset the NAXISn keywords to match the size of the +dummy array. That is because it works hard to ensure that only valid FITS +files are written. Instead, we can write *just* the header to a file using +the `Header.tofile ` method:: + + >>> header['NAXIS1'] = 40000 + >>> header['NAXIS2'] = 40000 + >>> header.tofile('large.fits') + +Finally, we need to grow out the end of the file to match the length of the +data (plus the length of the header). This can be done very efficiently on +most systems by seeking past the end of the file and writing a single byte, +like so:: + + >>> with open('large.fits', 'rb+') as fobj: + ... # Seek past the length of the header, plus the length of the + ... # Data we want to write. + ... # The -1 is to account for the final byte taht we are about to + ... # write: + ... fobj.seek(len(header.tostring()) + (40000 * 40000 * 8) - 1) + ... fobj.write('\0') + +On modern operating systems this will cause the file (past the header) to be +filled with zeros out to the ~12GB needed to hold a 40000 x 40000 image. On +filesystems that support sparse file creation (most Linux filesystems, but not +the HFS+ filesystem used by most Macs) this is a very fast, efficient +operation. On other systems your mileage may vary. + +This isn't the only way to build up a large file, but probably one of the +safest. This method can also be used to create large multi-extension FITS +files, with a little care. + +For creating very large tables, this method may also be used. Though it can be +difficult to determine ahead of time how many rows a table will need. In +general, use of the `astropy.io.fits` module is currently discouraged for the +creation and manipulation of large tables. The FITS format itself is not +designed for efficient on-disk or in-memory manipulation of table structures. +For large, heavy-duty table data it might be better too look into using `HDF5`_ +through the `PyTables`_ library. The :ref:`Astropy Table ` +interface can provide an abstraction layer between different on-disk table +formats as well (for example for converting a table between FITS and HDF5). + +PyTables makes use of Numpy under the hood, and can be used to write binary +table data to disk in the same format required by FITS. It is then possible +to serialize your table to the FITS format for distribution. At some point +this FAQ might provide an example of how to do this. + +.. _HDF5: http://www.hdfgroup.org/HDF5/ +.. _PyTables: http://www.pytables.org/moin + + +How do I create a multi-extension FITS file from scratch? +""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +When you open a FITS file with `astropy.io.fits.open`, an +`~astropy.io.fits.HDUList` object is returned, which holds all the HDUs in the +file. This ``HDUList`` class is a subclass of Python's builtin `list`, and can +be created from scratch and used as such:: + + >>> from astropy.io import fits + >>> new_hdul = fits.HDUList() + >>> new_hdul.append(fits.ImageHDU()) + >>> new_hdul.append(fits.ImageHDU()) + >>> new_hdul.writeto('test.fits') + +Or the HDU instances can be created first (or read from an existing FITS file) +and the HDUList instantiated like so:: + + >>> hdu1 = fits.PrimaryHDU() + >>> hdu2 = fits.ImageHDU() + >>> new_hdul = fits.HDUList([hdu1, hdu2]) + >>> new_hdul.writeto('test.fits') + +That will create a new multi-extension FITS file with two empty IMAGE +extensions (a default PRIMARY HDU is prepended automatically if one was not +provided manually). + + +Why is an image containing integer data being converted unexpectedly to floats? +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +If the header for your image contains non-trivial values for the optional +BSCALE and/or BZERO keywords (that is, BSCALE != 1 and/or BZERO != 0), then +the raw data in the file must be rescaled to its physical values according to +the formula:: + + physical_value = BZERO + BSCALE * array_value + +As BZERO and BSCALE are floating point values, the resulting value must be a +float as well. If the original values were 16-bit integers, the resulting +values are single-precision (32-bit) floats. If the original values were +32-bit integers the resulting values are double-precision (64-bit floats). + +This automatic scaling can easily catch you of guard if you're not expecting +it, because it doesn't happen until the data portion of the HDU is accessed +(to allow things like updating the header without rescaling the data). For +example:: + + >>> hdul = fits.open('scaled.fits') + >>> image = hdul['SCI', 1] + >>> image.header['BITPIX'] + 32 + >>> image.header['BSCALE'] + 2.0 + >>> data = image.data # Read the data into memory + >>> data.dtype + dtype('float64') # Got float64 despite BITPIX = 32 (32-bit int) + >>> image.header['BITPIX'] # The BITPIX will automatically update too + -64 + >>> 'BSCALE' in image.header # And the BSCALE keyword removed + False + +The reason for this is that once a user accesses the data they may also +manipulate it and perform calculations on it. If the data were forced to +remain as integers, a great deal of precision is lost. So it is best to err +on the side of not losing data, at the cost of causing some confusion at +first. + +If the data must be returned to integers before saving, use the `ImageHDU.scale +` method:: + + >>> image.scale('int32') + >>> image.header['BITPIX'] + 32 + +Alternatively, if a file is opened with ``mode='update'`` along with the +``scale_back=True`` argument, the original BSCALE and BZERO scaling will +be automatically re-applied to the data before saving. Usually this is +not desireable, especially when converting from floating point back to +unsigned integer values. But this may be useful in cases where the raw +data needs to be modified corresponding to changes in the physical values. + +To prevent rescaling from occurring at all (good for updating headers--even if +you don't intend for the code to access the data, it's good to err on the side +of caution here), use the ``do_not_scale_image_data`` argument when opening +the file:: + + >>> hdul = fits.open('scaled.fits', do_not_scale_image_data=True) + >>> image = hdul['SCI', 1] + >>> image.data.dtype + dtype('int32') + + +Why am I losing precision when I assign floating point values in the header? +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +The FITS standard allows two formats for storing floating-point numbers in a +header value. The "fixed" format requires the ASCII representation of the +number to be in bytes 11 through 30 of the header card, and to be +right-justified. This leaves a standard number of characters for any comment +string. + +The fixed format is not wide enough to represent the full range of values that +can be stored in a 64-bit float with full precision. So FITS also supports a +"free" format in which the ASCII representation can be stored anywhere, using +the full 70 bytes of the card (after the keyword). + +Currently Astropy/PyFITS only supports writing fixed format (it can read both +formats), so all floating point values assigned to a header are stored in the +fixed format. There are plans to add support for more flexible formatting. + +In the meantime it is possible to add or update cards by manually formatting +the card image from a string, as it should appear in the FITS file:: + + >>> c = fits.Card.fromstring('FOO = 1234567890.123456789') + >>> h = fits.Header() + >>> h.append(c) + >>> h + FOO = 1234567890.123456789 + +As long as you don't assign new values to 'FOO' via ``h['FOO'] = 123``, will +maintain the header value exactly as you formatted it (as long as it is valid +according to the FITS standard). + + +Why is reading rows out of a FITS table so slow? +"""""""""""""""""""""""""""""""""""""""""""""""" + +Underlying every table data array returned by `astropy.io.fits` is a Numpy +`~numpy.recarray` which is a Numpy array type specifically for representing +structured array data (i.e. a table). As with normal image arrays, Astropy +accesses the underlying binary data from the FITS file via mmap (see the +question "`What performance differences are there between astropy.io.fits and +fitsio?`_" for a deeper explanation fo this). The underlying mmap is then +exposed as a `~numpy.recarray` and in general this is a very efficient way to +read the data. + +However, for many (if not most) FITS tables it isn't all that simple. For +many columns there are conversions that have to take place between the actual +data that's "on disk" (in the FITS file) and the data values that are returned +to the user. For example FITS binary tables represent boolean values +differently from how Numpy expects them to be represented, "Logical" columns +need to be converted on the fly to a format Numpy (and hence the user) can +understand. This issue also applies to data that is linearly scaled via the +``TSCALn`` and ``TZEROn`` header keywords. + +Supporting all of these "FITS-isms" introduces a lot of overhead that might +not be necessary for all tables, but are still common nonetheless. That's +not to say it can't be faster even while supporting the peculiarities of +FITS--CFITSIO for example supports all the same features but is orders of +magnitude faster. Astropy could do much better here too, and there are many +known issues causing slowdown. There are plenty of opportunities for speedups, +and patches are welcome. In the meantime for high-performance applications +with FITS tables some users might find the ``fitsio`` library more to their +liking. + + +Comparison with Other FITS Readers +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +What is the difference between astropy.io.fits and fitsio? +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +The `astropy.io.fits` module (originally PyFITS) is a "pure Python" FITS +reader in that all the code for parsing the FITS file format is in Python, +though Numpy is used to provide access to the FITS data via the +`~numpy.ndarray` interface. `astropy.io.fits` currently also accesses the +`CFITSIO `_ to support the +FITS Tile Compression convention, but this feature is optional. It does not +use CFITSIO outside of reading compressed images. + +`fitsio `_, on the other hand, is a Python +wrapper for the CFITSIO library. All the heavy lifting of reading the FITS +format is handled by CFITSIO, while ``fitsio`` provides an easier to use +object-oriented API including providing a Numpy interface to FITS files read +from CFITSIO. Much of it is written in C (to provide the interface between +Python and CFITSIO), and the rest is in Python. The Python end mostly +provides the documentation and user-level API. + +Because ``fitsio`` wraps CFITSIO it inherits most of its strengths and +weaknesses, though it has an added strength of providing an easier to use +API than if one were to use CFITSIO directly. + + +Why did Astropy adopt PyFITS as its FITS reader instead of fitsio? +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +When the Astropy project was first started it was clear from the start that +one of its core components should be a submodule for reading and writing FITS +files, as many other components would be likely to depend on this +functionality. At the time, the ``fitsio`` package was in its infancy (it +goes back to roughly 2011) while PyFITS had already been established going +back to before the year 2000). It was already a mature package with support +for the vast majority of FITS files found in the wild, including outdated +formats such as "Random Groups" FITS files still used extensively in the +radio astronomy community. + +Although many aspects of PyFITS' interface have evolved over the years, much +of it has also remained the same, and is already familiar to astronomers +working with FITS files in Python. Most of not all existing training +materials were also based around PyFITS. PyFITS was developed at STScI, which +also put forward significant resources to develop Astropy, with an eye toward +integrating Astropy into STScI's own software stacks. As most of the Python +software at STScI uses PyFITS it was the only practical choice for making that +transition. + +Finally, although CFITSIO (and by extension ``fitsio``) can read any FITS files +that conform to the FITS standard, it does not support all of the non-standard +conventions that have been added to FITS files in the wild. It does have some +support for some of these conventions (such as CONTINUE cards and, to a limited +extent, HIERARCH cards), it is not easy to add support for other conventions +to a large and complex C codebase. + +PyFITS' object-oriented design makes supporting non-standard conventions +somewhat easier in most cases, and as such PyFITS can be more flexible in the +types of FITS files it can read and return *useful* data from. This includes +better support for files that fail to meet the FITS standard, but still contain +useful data that should still be readable at least well-enough to correct any +violations of the FITS standard. For example, a common error in non-English- +speaking regions is to insert non-ASCII characters into FITS headers. This +is not a valid FITS file, but still should be readable in some sense. +Supporting structural errors such as this is more difficult in CFITSIO which +assumes a more rigid structure. + + +What performance differences are there between astropy.io.fits and fitsio? +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +There are two main performance areas to look at: reading/parsing FITS headers +and reading FITS data (image-like arrays as well as tables). + +In the area of headers ``fitsio`` is significantly faster in most cases. This +is due in large part to the (almost) pure C implementation (due to the use of +CFITSIO), but also due to fact that it is more rigid and does not support as +many local conventions and other special cases as `astropy.io.fits` tries to +support in its pure Python implementation. + +That said the difference is small, and only likely to be a bottleneck either +when opening files containing thousands of HDUs, or reading the headers out +of thousands of FITS files in succession (in either case the difference is +not even an order of magnitude). + +Where data is concerned the situation is a little more complicated, and +requires some understanding of how PyFITS is implemented versus CFITSIO and +``fitsio``. First it's important to understand how they differ in terms of +memory management. + +`astropy.io.fits`/PyFITS uses mmap, by default, to provide access to the raw +binary data in FITS files. Mmap is a system call (or in most cases these days +a wrapper in your libc for a lower-level system call) which allows user-space +applications to essentially do the same thing your OS is doing when it uses a +pagefile (swap space) for virtual memory: It allows data in a file on disk to +be paged into physical memory one page (or in practice usually several pages) +at a time on an as-needed basis. These cached pages of the file are also +accessible from all processes on the system, so multiple processes can read +from the same file with little additional overhead. In the case of reading +over all the data in the file the performance difference between using mmap +versus reading the entire data into physical memory at once can vary widely +between systems, hardware, and depending on what else is happening on the +system at the moment, but mmap almost always going to be better. + +In principle it requires more overhead since accessing each page will result in +a page fault, and the system requires more requests to the disk. But in +practice the OS will optimize this pretty aggressively, especially for the most +common case of sequential access--also in reality reading the entire thing into +memory is still going to result in a whole lot of page faults too. For random +access having all the data in physical memory is always going to be best, +though with mmap it's usually going to be pretty good too (one doesn't normally +access all the data in a file in totally random order--usually a few sections +of it will be accessed most frequently, the OS will keep those pages in +physical memory as best it can). So for the most general case of reading FITS +files (or most large data on disk) this is the best choice, especially for +casual users, and is hence enabled by default. + +CFITSIO/``fitsio``, on the other hand, doesn't assume the existence of +technologies like mmap and page caching. Thus it implements its own LRU cache +of I/O buffers that store sections of FITS files read from disk in memory in +FITS' famous 2880 byte chunk size. The I/O buffers are used heavily in +particular for keeping the headers in memory. Though for large data reads (for +example reading an entire image from a file) it *does* bypass the cache and +instead does a read directly from disk into a user-provided memory buffer. + +However, even when CFITSIO reads direct from the file, this is still largely +less efficient than using mmap: Normally when your OS reads a file from disk, +it caches as much of that read as it can in physical memory (in its page cache) +so that subsequent access to those same pages does not require a subsequent +expensive disk read. This happens when using mmap too, since the data has to +be copied from disk into RAM at some point. The difference is that when using +mmap to access the data, the program is able to read that data *directly* out +of the OS's page cache (so long as it's only being read). On the other hand +when reading data from a file into a local buffer such as with fread(), the +data is first read into the page cache (if not already present) and then copied +from the page cache into the local buffer. So every read performs at least one +additional memory copy per page read (requiring twice as much physical memory, +and possibly lots of paging if the file is large and pages need to dropped from +the cache). + +The user API for CFITSIO usually works by having the user allocate a memory +buffer large enough to hold the image/table they want to read (or at least the +section they're interested in). There are some helper functions for +determining the appropriate amount of space to allocate. Then you just pass it +a pointer to your buffer and CFITSIO handles all the reading (usually using the +process described above), and copies the results into your user buffer. For +large reads it reads directly from the file into your buffer. Though if the +data needs to be scaled it makes a stop in CFITSIO's own buffer first, then +writes the rescaled values out to the user buffer (if rescaling has been +requested). Regardless, this means that if your program wishes to hold an +entire image in memory at once it will use as much RAM as the size of the +data. For most applications it's better (and sufficient) to write it work on +smaller sections of the data, but this requires extra complexity. Using mmap +on the other hand makes managing this complexity simpler and more efficient. + +A very simple and informal test demonstrates this difference. This test was +performed on four simple FITS images (one of which is a cube) of dimensions +256x256, 1024x1024, 4096x4096, and 256x1024x1024. Each image was generated +before the test and filled with randomized 64-bit floating point values. A +similar test was performed using both `astropy.io.fits` and ``fitsio``: A +handle to the FITS file is opened using each library's basic semantics, and +then the entire data array of the files is copied into a temporary array in +memory (for example if we were blitting the image to a video buffer). For +Astropy the test is written: + +.. code:: python + + def read_test_pyfits(filename): + with fits.open(filename, memmap=True) as hdul: + data = hdul[0].data + c = data.copy() + +The test was timed in IPython on a Linux system with kernel version 2.6.32, a +6-core Intel Xeon X5650 CPU clocked at 2.67 GHz per core, and 11.6 GB of RAM +using: + +.. code:: python + + for filename in filenames: + print(filename) + %timeit read_test_pyfits(filename) + +where ``filenames`` is just a list of the aforementioned generated sample +files. The results were:: + + 256x256.fits + 1000 loops, best of 3: 1.28 ms per loop + 1024x1024.fits + 100 loops, best of 3: 4.24 ms per loop + 4096x4096.fits + 10 loops, best of 3: 60.6 ms per loop + 256x1024x1024.fits + 1 loops, best of 3: 1.15 s per loop + +For ``fitsio`` the test was: + +.. code:: python + + def read_test_fitsio(filename): + with fitsio.FITS(filename) as f: + data = f[0].read() + c = data.copy() + +This was also run in a loop over all the sample files, producing the results:: + + 256x256.fits + 1000 loops, best of 3: 476 µs per loop + 1024x1024.fits + 100 loops, best of 3: 12.2 ms per loop + 4096x4096.fits + 10 loops, best of 3: 136 ms per loop + 256x1024x1024.fits + 1 loops, best of 3: 3.65 s per loop + +It should be made clear that the sample files were rewritten with new random +data between the Astropy test and the fitsio test, so they were not reading +the same data from the OS's page cache. Fitsio was much faster on the small +(256x256) image because in that case the time is dominated by parsing the +headers. As already explained this is much faster in CFITSIO. However, as +the data size goes up and the header parsing no longer dominates the time, +`astropy.io.fits` using mmap is roughly twice as fast. This discrepancy would +be almost entirely due to it requiring roughly half as many in-memory copies +to read the data, as explained earlier. That said, more extensive benchmarking +could be very interesting. + +This is also not to say that `astropy.io.fits` does better in all cases. There +are some cases where it is currently blown away by fitsio. See the subsequent +question. + + +Why is fitsio so much faster than Astropy at reading tables? +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +In many cases it isn't--there is either no difference, or it may be a little +faster in Astropy depending on what you're trying to do with the table and +what types of columns or how many columns the table has. There are some +cases, however, where ``fitsio`` can be radically faster, mostly for reasons +explained above in "`Why is reading rows out of a FITS table so slow?`_" + +In principle a table is no different from, say, an array of pixels. But +instead of pixels each element of the array is some kind of record structure +(for example two floats, a boolean, and a 20 character string field). Just as +a 64-bit float is an 8 byte record in an array, a row in such a table can be +thought of as a 37 byte (in the case of the previous example) record in a 1-D +array of rows. So in principle everything that was explained in the answer to +the question "`What performance differences are there between astropy.io.fits +and fitsio?`_" applies just as well to tables as it does to any other array. + +However, FITS tables have many additional complexities that sometimes preclude +streaming the data directly from disk, and instead require transformation from +the on-disk FITS format to a format more immediately useful to the user. A +common example is how FITS represents boolean values in binary tables. +Another, significantly more complicated example, is variable length arrays. + +As explained in "`Why is reading rows out of a FITS table so slow?`_", +`astropy.io.fits`/PyFITS does not currently handle some of these cases as +efficiently as it could, in particular in cases where a user only wishes to +read a few rows out of a table. Fitsio, on the other hand, has a better +interface for copying one row at a time out of a table and performing the +necessary transformations on that row *only*, rather than on the entire column +or columns that the row is taken from. As such, for many cases ``fitsio`` gets +much better performance and should be preferred for many performance-critical +table operations. + +Fitsio also exposes a microlanguage (implemented in CFITSIO) for making +efficient SQL-like queries of tables (single tables only though--no joins or +anything like that). This format, described in the `CFITSIO documentation +`_ can +in some cases perform more efficient selections of rows than might be possible +with Numpy alone, which requires creating an intermediate mask array in order +to perform row selection. diff --git a/docs/io/fits/appendix/header_transition.rst b/docs/io/fits/appendix/header_transition.rst new file mode 100644 index 0000000..178298d --- /dev/null +++ b/docs/io/fits/appendix/header_transition.rst @@ -0,0 +1,427 @@ +.. currentmodule:: astropy.io.fits +.. doctest-skip-all + +.. _header-transition-guide: + +********************************* +Header Interface Transition Guide +********************************* + +.. note:: + + This guide was originally included with the release of PyFITS 3.1, and + still references PyFITS in many places, though the examples have been + updated for ``astropy.io.fits``. It is still useful here for informational + purposes, though Astropy has always used the PyFITS 3.1 Header interface. + +PyFITS v3.1 included an almost complete rewrite of the :class:`Header` +interface. Although the new interface is largely compatible with the old +interace (whether due to similarities in the design, or backwards-compatibility +support), there are enough differences that a full explanation of the new +interface is merited. + +The Trac ticket discussing the initial motivation and changes to be made to the +:class:`Header` class is `#64`_. It may be worth reading for some of the +background to this work, though this document contains a more complete +description of the "final" product (which will continue to evolve). + +.. _#64: https://aeon.stsci.edu/ssb/trac/pyfits/ticket/64 + + +Background +========== + +Prior to 3.1, PyFITS users interacted with FITS headers by way of three +different classes: :class:`Card`, :class:`CardList`, and :class:`Header`. + +The Card class represents a single header card with a keyword, value, and +comment. It also contains all the machinery for parsing FITS header cards, +given the 80 character string, or "card image" read from the header. + +The CardList class is actually a subclass of Python's `list` built-in. It was +meant to represent the actual list of cards that make up a header. That is, it +represents an ordered list of cards in the physical order that they appear in +the header. It supports the usual list methods for inserting and appending new +cards into the list. It also supports `dict`-like keyword access, where +``cardlist['KEYWORD']`` would return the first card in the list with the given +keyword. + +A lot of the functionality for manipulating headers was actually buried in the +CardList class. The Header class was more of a wrapper around CardList that +added a little bit of abstraction. It also implemented a partial dict-like +interface, though for Headers a keyword lookup returned the header value +associated with that keyword, and not the Card object. Though almost every +method on the Header class was just performing some operations on the +underlying CardList. + +The problem is that there were certain things one could *only* do by directly +accessing the CardList, such as look up the comments on a card, or access cards +that have duplicate keywords, such as HISTORY. Another long-standing +misfeature that slicing a Header object actually returned a CardList object, +rather than a new Header. For all but the most simple use cases, working with +CardList objects was largely unavoidable. + +But it was realized that CardList is really an implementation detail +not representing any element of a FITS file distinct from the header itself. +Users familiar with the FITS format know what a header is, but it's not clear +how a "card list" is distinct from that, or why operations go through the +Header object, while some have to be performed through the CardList. + +So the primary goal of this redesign was eliminate the :class:`CardList` class +altogether, and make it possible for users to perform all header manipulations +directly through :class:`Header` objects. It also tries to present headers as +similar as possible to more a more familiar data structure--an ordered mapping +(or :class:`~collections.OrderedDict` in Python) for ease of use by new users +less familiar with the FITS format. Though there are still many added +complexities for dealing with the idiosyncracies of the FITS format. + + +Deprecation Warnings +==================== + +A few old methods on the :class:`Header` class have been marked as deprecated, +either because they have been renamed to a more `PEP 8`_-compliant name, or +because have become redundant due to new features. To check if your code is +using any deprecated methods or features, run your code with ``python -Wd``. +This will output any deprecation warnings to the console. + +Two of the most common deprecation warnings related to Headers are for: + +- :meth:``Header.has_key``--this has actually been deprecated since PyFITS 3.0, + just as Python's `dict.has_key` is deprecated. For checking a key's presence + in a mapping object like `dict` or :class:`Header`, use the ``key in d`` + syntax. This has long been the preference in Python. + +- :meth:``Header.ascardlist`` and :attr:`Header.ascard`--these were used to + access the :class:`CardList` object underlying a header. They should still + work, and return a skeleton CardList implementation that should support most + of the old CardList functionality. But try removing as much of this as + possible. If direct access to the :class:`Card` objects making up a header + is necessary, use :attr:`Header.cards`, which returns an iterator over the + cards. More on that below. + +.. _PEP 8: http://www.python.org/dev/peps/pep-0008/ + +New Header Design +================= + +The new :class:`Header` class is designed to work as a drop-in replacement for +a `dict` via `duck typing`_. That is, although it is not a subclass of `dict`, +it implements all the same methods and interfaces. In particular, it is +similar to an :class:`~collections.OrderedDict` in that the order of insertions +is preserved. However, Header also supports many additional features and +behaviors specific to the FITS format. It should also be noted that while the +old Header implementation also had a dict-like interface, it did not implement +the *entire* dict interface as the new Header does. + +Although the new Header is used like a dict/mapping in most cases, it also +supports a `list` interface. The list-like interace is a bit idiosyncratic in +that in some contexts the Header acts like a list of values, in some like a +list of keywords, and in a few contexts like a list of :class:`Card` objects. This +may be the most difficult aspect of the new design, but there is logic to it. + +As with the old Header implementation, integer index access is supported: +``header[0]`` returns the value of the first keyword. However, the +:meth:`Header.index` method treats the header as though it's a list of +keywords, and returns the index of a given keyword. For example:: + + >>> header.index('BITPIX') + 2 + +:meth:`Header.count` is similar to `list.count`, and also takes a keyword as +its argument:: + + >>> header.count('HISTORY') + 20 + +A good rule of thumb is that any item access using square brackets ``[]`` returns +*value* in the header, whether using keyword or index lookup. Methods like +:meth:`~Header.index` and :meth:`~Header.count` that deal with the order and +quantity of items in the Header generally work on keywords. Finally, methods +like :meth:`~Header.insert` and :meth:`~Header.append` that add new items to +the header work on cards. + +Aside from the list-like methods, the new Header class works very similarly to +the old implementation for most basic use cases and should not present too many +surprises. There are differences, however: + +- As before, the Header() initializer can take a list of :class:`Card` objects + with which to fill the header. However, now any iterable may be used. It is + also important to note that *any* Header method that accepts :class:`Card` + objects can also accept 2-tuples or 3-tuples in place of Cards. That is, + either a ``(keyword, value, comment)`` tuple or a ``(keyword, value)`` tuple + (comment is assumed blank) may be used anywhere in place of a Card object. + This is even preferred, as it simply involves less typing. For example:: + + >>> from astropy.io import fits + >>> header = fits.Header([('A', 1), ('B', 2), ('C', 3, 'A comment')]) + >>> header + A = 1 + B = 2 + C = 3 / A comment + +- As demonstrated in the previous example, the ``repr()`` for a Header, that is + the text that is displayed when entering a Header object in the Python + console as an expression, shows the header as it would appear in a FITS file. + This inserts newlines after each card so that it is easily readable + regardless of terminal width. It is *not* necessary to use ``print header`` + to view this. Simply entering ``header`` displays the header contents as it + would appear in the file (sans the END card). + +- ``len(header)`` is now supported (previously it was necessary to do + ``len(header.ascard)``. This returns the total number of cards in the + header, including blank cards, but excluding the END card. + +- FITS supports having duplicate keywords, although they are generally in error + except for commentary keywords like COMMENT and HISTORY. PyFITS now supports + reading, updating, and deleting duplicate keywords: Instead of using the + keyword by itself, use a ``(keyword, index)`` tuple. For example + ``('HISTORY', 0)`` represents the first HISTORY card, ``('HISTORY', 1)`` + represents the second HISTORY card, and so on. In fact, when a keyword is + used by itself, it's really just shorthand for ``(keyword, 0)``. Its is now + possible to delete an accidental duplicate like so:: + + >>> del header[('NAXIS', 1)] + + This will remove an accdential duplicate NAXIS card from the header. + +- Even if there are duplicate keywords, keyword lookups like + ``header['NAXIS']`` will always return the value associated with the first + copy of that keyword, with one exception: Commentary keywords like COMMENT + and HISTORY are expected to have duplicates. So ``header['HISTORY']``, for + example, returns the whole sequence of HISTORY values in the correct order. + This list of values can be sliced arbitrarily. For example, to view the last + 3 history entries in a header:: + + >>> hdulist[0].header['HISTORY'][-3:] + reference table oref$laf13367o_pct.fits + reference table oref$laf13369o_apt.fits + Heliocentric correction = 16.225 km/s + +- Subscript assignment can now be used to add new keywords to the header. Just + as with a normal `dict`, ``header['NAXIS'] = 1`` will either update the NAXIS + keyword if it already exists, or add a new NAXIS keyword with a value of + ``1`` if it does not exist. In the old interface this would return a + `~.exceptions.KeyError` if NAXIS did not exist, and the only way to add a new + keyword was through the update() method. + + By default, new keywords added in this manner are added to the end of the + header, with a few FITS-specific exceptions: + + * If the header contains extra blank cards at the end, new keywords are added + before the blanks. + + * If the header ends with a list of commentary cards--for example a sequence + of HISTORY cards--those are kept at the end, and new keywords are inserted + before the commentary cards. + + * If the keyword is a commentary keyword like COMMENT or HISTORY (or an empty + string for blank keywords), a *new* commentary keyword is always added, and + appended to the last commentary keyword of the same type. For example, + HISTORY keywords are always placed after the last history keyword:: + + >>> header = fits.Header() + >>> header['COMMENT'] = 'Comment 1' + >>> header['HISTORY'] = 'History 1' + >>> header['COMMENT'] = 'Comment 2' + >>> header['HISTORY'] = 'History 2' + >>> header + COMMENT Comment 1 + COMMENT Comment 2 + HISTORY History 1 + HISTORY History 2 + + These behaviors represent a sensible default behavior for keyword assignment, + and represents the same behavior as :meth:`~Header.update` in the old Header + implementation. The default behaviors may still be bypassed through the use + of other assignment methods like :meth:`Header.set` and :meth:`Header.append` + described later. + +- It is now also possible to assign a value and a comment to a keyword + simultaneously using a tuple:: + + >>> header['NAXIS'] = (2, 'Number of axis') + + This will update the value and comment of an existing keyword, or add a new + keyword with the given value and comment. + +- There is a new :attr:`Header.comments` attribute which lists all the comments + associated with keywords in the header (not to be confused with COMMENT + cards). This allows viewing and updating the comments on specific cards:: + + >>> header.comments['NAXIS'] + Number of axis + >>> header.comments['NAXIS'] = 'Number of axes' + >>> header.comments['NAXIS'] + Number of axes + +- When deleting a keyword from a header, don't assume that the keyword already + exists. In the old Header implementation this would just silently do + nothing. For backwards-compatibility it is still okay to delete a + non-existent keyword, but a warning will be raised. In the future this + *will* be changed so that trying to delete a non-existent keyword raises a + `~.exceptions.KeyError`. This is for consistency with the behavior of Python dicts. So + unless you know for certain that a keyword exists before deleting it, it's + best to do something like:: + + >>> try: + ... del header['BITPIX'] + ... except KeyError: + ... pass + + Or if you prefer to look before you leap:: + + >>> if 'BITPIX' in header: + ... del header['BITPIX'] + +- ``del header`` now supports slices. For example, to delete the last three + keywords from a header:: + + >>> del header[-3:] + +- Two headers can now be compared for equality--previously no two Header + objects were the same. Now they compare as equal if they contain the exact + same content. That is, this requires strict equality. + +- Two headers can now be added with the '+' operator, which returns a copy of + the left header extended by the right header with :meth:`~Header.extend`. + Assignment addition is also possible. + +- The Header.update() method used commonly with the old Header API has been + renamed to :meth:`Header.set`. The primary reason for this change is very + simple: Header implements the `dict` interface, which already has a method + called update(), but that behaves differently from the old Header.update(). + + The details of the new update() can be read in the API docs, but it is very + similar to `dict.update`. It also supports backwards compatibility with the + old update() by analysis of the arguments passed to it, so existing code will + not break immediately. However, this *will* cause a deprecation warning to + be output if they're enabled. It is best, for starters, to replace all + update() calls with set(). Recall, also, that direct assignment is now + possible for adding new keywords to a header. So by and large the only + reason to prefer using :meth:`Header.set` is its capability of inserting or + moving a keyword to a specific location using the ``before`` or ``after`` + arguments. + +- Slicing a Header with a slice index returns a new Header containing only + those cards contained in the slice. As mentioned earlier, it used to be that + slicing a Header returned a card list--something of a misfeature. In + general, objects that support slicing ought to return an object of the same + type when you slice them. + + Likewise, wildcard keywords used to return a CardList object. Now they + return a new Header--similarly to a slice. For example:: + + >>> header['NAXIS*'] + + returns a new header containing only the NAXIS and NAXISn cards from the + original header. + +.. _duck typing: http://en.wikipedia.org/wiki/Duck_typing + + +Transition Tips +=============== + +The above may seem like a lot, but the majority of existing code using PyFITS +to manipulate headers should not need to be updated, at least not immediately. +The most common operations still work the same. + +As mentioned above, it would be helpful to run your code with ``python -Wd`` to +enable deprecation warnings--that should be a good idea of where to look to +update your code. + +If your code needs to be able to support older versions of PyFITS +simultaneously with PyFITS 3.1, things are slightly trickier, but not by +much--the deprecated interfaces will not be removed for several more versions +because of this. + +- The first change worth making, which is supported by any PyFITS version in + the last several years, is remove any use of :meth:``Header.has_key`` and + replace it with ``keyword in header`` syntax. It's worth making this change + for any dict as well, since `dict.has_key` is deprecated. Running the + following regular expression over your code may help with most (but not all) + cases:: + + s/([^ ]+)\.has_key\(([^)]+)\)/\2 in \1/ + +- If possible, replace any calls to Header.update() with Header.set() (though + don't bother with this if you need to support older PyFITS versions). Also, + if you have any calls to Header.update() that can be replaced with simple + subscript assignments (eg. ``header['NAXIS'] = (2, 'Number of axes')``) do + that too, if possible. + +- Find any code that uses ``header.ascard`` or ``header.ascardlist()``. First + ascertain whether that code really needs to work directly on Card objects. + If that is definitely the case, go ahead and replace those with + ``header.cards``--that should work without too much fuss. If you do need to + support older versions, you may keep using ``header.ascard`` for now. + +- In the off chance that you have any code that slices a header, it's best to + take the result of that and create a new Header object from it. For + example:: + + >>> new_header = fits.Header(old_header[2:]) + + This avoids the problem that in PyFITS <= 3.0 slicing a Header returns a + CardList by using the result to initialize a new Header object. This will + work in both cases (in PyFITS 3.1, initializing a Header with an existing + Header just copies it, a la `list`). + +- As mentioned earlier, locate any code that deletes keywords with ``del``, and + make sure they either look before they leap (``if keyword in header:``) or + ask forgiveness (``try/except KeyError:``). + +Other Gotchas +------------- + +- As mentioned above it is not necessary to enter ``print header`` to display + a header in an interactive Python prompt. Simply entering ``>>> header`` + by itself is sufficient. Using ``print`` usually will *not* display the + header readably, because it does not include line-breaks between the header + cards. The reason is that Python has two types of string representations: + One is returned when one calls ``str(header)`` which happens automatically + when you ``print`` a variable. In the case of the Header class this actually + returns the string value of the header as it is written literally in the + FITS file, which includes no line breaks. + + The other type of string representation happens when one calls + ``repr(header)``. The `repr` of an object is just meant to be a useful + string "representation" of the object; in this case the contents of the + header but with linebreaks betwen the cards and with the END card and + padding trailing padding stripped off. This happens automatically when + one enters a variable at the Python prompt by itself without a ``print`` + call. + +- The current version of the FITS Standard (3.0) states in section 4.2.1 + that trailing spaces in string values in headers are not significant and + should be ignored. PyFITS < 3.1 *did* treat treat trailing spaces as + significant. For example if a header contained: + + KEYWORD1= 'Value ' + + then ``header['KEYWORD1']`` would return the string ``'Value '`` exactly, + with the trailing spaces intact. The new Header interface fixes this by + automatically stripping trailing spaces, so that ``header['KEYWORD1']`` would + return just ``'Value'``. + + There is, however, one convention used by the IRAF ccdmosiac task for + representing its `TNX World Coordinate System + `_ and `ZPX World + Coordinate System `_ + non-standard WCS' that uses a series of keywords in the form ``WATj_nnn`` + which store a text description of coefficients for a non-linear distortion + projection. It uses its own microformat for listing the coefficients as a + string, but the string is long, and thus broken up into several of these + ``WATj_nnn`` keywords. Correct recombination of these keywords requires + treating all whitespace literally. This convention either overlooked or + predated the prescribed treatment of whitespace in the FITS standard. + + To get around this issue a global variable ``fits.STRIP_HEADER_WHITESPACE`` + was introduced. Temporarily setting + ``fits.STRIP_HEADER_WHITESPACE.set(False)`` before reading keywords affected + by this issue will return their values with all trailing whitespace intact. + + A future version of PyFITS may be able to detect use of conventions like this + contextually and behave according to the convention, but in most cases the + default behavior of PyFITS is to behave according to the FITS Standard. diff --git a/docs/io/fits/appendix/history.rst b/docs/io/fits/appendix/history.rst new file mode 100644 index 0000000..9784567 --- /dev/null +++ b/docs/io/fits/appendix/history.rst @@ -0,0 +1,3248 @@ +.. doctest-skip-all + +astropy.io.fits History +======================= + +Prior to its inclusion in Astropy, the `astropy.io.fits` package was a stand- +alone package called `PyFITS`_. Though for the time being active development +is continuing on PyFITS, that development is also being merged into Astropy. +This page documents the release history of PyFITS prior to its merge into +Astropy. + +.. contents:: PyFITS Changelog + :depth: 2 + :local: + + +3.3.0 (unreleased) +------------------ + +New Features +^^^^^^^^^^^^ + +- Added new verification options ``fix+ignore``, ``fix+warn``, + ``fix+exception``, ``silentfix+ignore``, ``silentfix+warn``, and + ``silentfix+exception`` which give more control over how to report fixable + errors as opposed to unfixable errors. See the "Verification" section in + the PyFITS documentation for more details. + +API Changes +^^^^^^^^^^^ + +- The ``pyfits.new_table`` function is now fully deprecated (though will not + be removed for a long time, considering how widely it is used). + + Instead please use the more explicit ``pyfits.BinTableHDU.from_columns`` to + create a new binary table HDU, and the similar + ``pyfits.TableHDU.from_columns`` to create a new ASCII table. These + otherwise accept the same arguments as ``pyfits.new_table`` which is now + just a wrapper for these. + +- The ``.fromstring`` classmethod of each HDU type has been simplified such + that, true to its namesake, it only initializes an HDU from a string + containing its header *and* data. (spacetelescope/PyFITS#64) + +- Fixed an issue where header wildcard matching (for example + ``header['DATE*']``) can be used to match *any* characters that might appear + in a keyword. Previously this only matched keywords containing characters + in the set ``[0-9A-Za-z_]``. Now this can also match a hyphen ``-`` and any + other characters, as some conventions like ``HIERARCH`` and record-valued + keyword cards allow a wider range of valid characters than standard FITS + keywords. + +- This will be the *last* release to support the following APIs that have been + marked deprecated since PyFITS v3.1: + + - The ``CardList`` class, which was part of the old header implementation. + + - The ``Card.key`` attribute. Use ``Card.keyword`` instead. + + - The ``Card.cardimage`` and ``Card.ascardimage`` attributes. Use simply + ``Card.image`` or ``str(card)`` instead. + + - The ``create_card`` factory function. Simply use the normal ``Card`` + constructor instead. + + - The ``create_card_from_string`` factory function. Use ``Card.fromstring`` + instead. + + - The ``upper_key`` function. Use ``Card.normalize_keyword`` method instead + (this is not unlikely to be used outside of PyFITS itself, but it was + technically public API). + + - The usage of ``Header.update`` with ``Header.update(keyword, value, + comment)`` arguments. ``Header.update`` should only be used analogously + to ``dict.update``. Use ``Header.set`` instead. + + - The ``Header.ascard`` attribute. Use ``Header.cards`` instead for a list + of all the ``Card`` objects in the header. + + - The ``Header.rename_key`` method. Use ``Header.rename_keyword`` instead. + + - The ``Header.get_history`` method. Use ``header['HISTORY']`` instead + (normal keyword lookup). + + - The ``Header.get_comment`` method. Use ``header['COMMENT']`` instead. + + - The ``Header.toTxtFile`` method. Use ``header.totextfile`` instead. + + - The ``Header.fromTxtFile`` method. Use ``Header.fromtextfile`` instead. + + - The ``pyfits.tdump`` and ``tcreate`` functions. Use ``pyfits.tabledump`` + and ``pyfits.tableload`` respectively. + + - The ``BinTableHDU.tdump`` and ``tcreate`` methods. Use + ``BinTableHDU.dump`` and ``BinTableHDU.load`` respectively. + + - The ``txtfile`` argument to the ``Header`` constructor. Use + ``Header.fromfile`` instead. + + - The ``startColumn`` and ``endColumn`` arguments to the ``FITS_record`` + constructor. These are unlikely to be used by any user code. + + These deprecated interfaces will be removed from the development version of + PyFITS following the v3.3 release (they will still be available in any + v3.3.x bugfix releases, however). + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- PyFITS has switched to a unified code base which supports Python 2.5 through + 3.4 simultaneously without translation. This *shouldn't* have any + significant performance impacts, but please report if anything seems + noticeably slower. As a reminder, support for Python 2.5 will be ended + after PyFITS 3.3.x. + +- Warnings for deprecated APIs in PyFITS are now always displayed by default. + This is in line with a similar change made recently to Astropy: + https://github.com/astropy/astropy/pull/1871 + To disable PyFITS deprecation warnings in scripts one may call + ``pyfits.ignore_deprecation_warnings()`` after importing PyFITS. + +- ``Card`` objects have a new ``is_blank`` attribute which returns ``True`` if + the card represents a blank card (no keyword, value, or comment) and + ``False`` otherwise. + +Bug Fixes +^^^^^^^^^ + +- Fixed a regression where it was not possible to save an empty "compressed" + image to a file (in this case there is nothing to compress, hence the + quotes, but trying to do so caused a crash). (spacetelescope/PyFITS#69) + +- Fixed a regression that may have been introduced in v3.2.1 with writing + compressed image HDUs, particularly compressed images using a non-empty + GZIP_COMPRESSED_DATA column. (spacetelescope/#71) + + +3.2.4 (unreleased) +------------------ + +- Fixed a regression where multiple consecutive calls of the ``writeto`` + method on the same HDU but to different files could lead to corrupt data or + crashes on the subsequent calls after the first. (spacetelescope/PyFITS#40) + + +3.1.7 (unreleased) +------------------ + +- Nothing changed yet. + + +3.2.3 (2014-05-14) +------------------ + +- Nominal support for Python 3.4. + +- Fixed a bug with using the ``tabledump`` and ``tableload`` functions with + tables containing array columns (columns in which each element is an array + instead of a single scalar value). (spacetelescope/PyFITS#22) + +- Fixed an issue where PyFITS allowed newline characters in header values and + comments. (spacetelescope/PyFITS#51) + +- Fixed pickling of ``FITS_rec`` (table data) objects. + (spacetelescope/PyFITS#53) + +- Improved behavior when writing large compressed images on OSX by removing an + unncessary check for platform architecture. (spacetelescope/PyFITS#57) + +- Allow reading FITS files from file-like objects that do not have a + ``.closed`` attribute (and as such may not even have an "open" vs. "closed" + concept). (spacetelescope/PyFITS#56) + +- Fixed duplicate insertion of commentary keywords on compressed image + headers. (spacetelescope/PyFITS#58) + +- Fixed minor issue with comparison of header commentary card values. + (spacetelescope/PyFITS#59) + + +3.1.6 (2014-05-14) +------------------ + +- Nominal support for Python 3.4. + +- Fixed a bug with using the ``tabledump`` and ``tableload`` functions with + tables containing array columns (columns in which each element is an array + instead of a single scalar value). (Backported from 3.2.3) + +- Fixed an issue where PyFITS allowed newline characters in header values and + comments. (Backported from 3.2.3) + +- Fixed pickling of ``FITS_rec`` (table data) objects. + (Backported from 3.2.3) + +- Improved behavior when writing large compressed images on OSX by removing an + unncessary check for platform architecture. (Backported from 3.2.3) + +- Allow reading FITS files from file-like objects that do not have a + ``.closed`` attribute (and as such may not even have an "open" vs. "closed" + concept). (Backported from 3.2.3) + +- Fixed minor issue with comparison of header commentary card values. + (Backported from 3.2.3) + + +3.2.2 (2014-03-25) +------------------ + +- Fixed a regression on deletion of record-valued keyword cards using + the Header wildcard syntax. This was intended to be fixed before the + v3.2.1 release. + + +3.1.5 (2014-03-25) +------------------ + +- Fixed a regression on deletion of record-valued keyword cards using + the Header wildcard syntax. This was intended to be fixed before the + v3.1.4 release. + + +3.2.1 (2014-03-04) +------------------ + +- Nominal support for the upcoming Python 3.4. + +- Added missing features from the ``Header.insert()`` method that were + intended for inclusion in the original 3.1 release: In addition to + accepting an integer index as the first argument, it also supports supplying + a keyword name as the first argument for insertion relative to a specific + keyword. It also now supports an optional ``after`` argument. If + ``after=True`` the the insertion is made below the insertion point instead + of above it. (spacetelescope/PyFITS#12) + +- Fixed support for broadcasting of values assigned to table columns. + (spacetelescope/PyFITS#48) + +- A grab bag of minor performance improvements in headers. + (spacetelescope/PyFITS#46) + +- Fix an unrelated error that occurred when instantiating a ``ColDefs`` object + with invalid input. + +- Fixed an issue where opening an image containing pseudo-unsigned integers + and immediately writing it to a new file using the ``writeto`` method would + drop the scale factors that identified the data as unsigned. + +- Fixed a bug where writing a file with ``checksum=True`` did not add the + checksum on new files. (spacetelescope/PyFITS#8) + +- Fixed an issue where validating an HDU's checksums removed the checksum from + that HDU's header entirely (even if it was valid.) + +- Fixed checksums on compressed images, so that the ``ZHECKSUM`` and + ``ZDATASUM`` contain a checksum of the original image HDU, while + ``CHECKSUM`` and ``DATASUM`` contain checksums of the compressed image HDU. + This feature was supposed to be supported in 3.2, but the support was buggy. + +- Fixed an issue where the size of the heap was sometimes not computed + properly when writing an existing table containing variable-length array + columns to a new FITS file. This could result in corruption in the new FITS + file. (spacetelescope/PyFITS#47) + +- Fixed issue with updates to the header of ``CompImageHDU`` objects not being + preserved on save. (spacetelescope/PyFITS#23) + +- Fixed a bug where a boolean value of ``True`` in a header could not be + replaced with the integer 1, and likewise for ``False`` and 0 and vice + versa. + +- Fixed an issue similar to the above one but for numeric values--now + replacing a header value with an equivalent numeric value will up/downcast + that value. For example replacing '0' with '0.0' will write '0.0' to the + header so that it is returned as a floating point value. Likewise a float + can be downcast to an integer. (spacetelescope/PyFITS#49) + +- A handful of Python 3 compatibility fixes, especially for compatibility + with the upcoming Python 3.4. + +- Fixed unrelated crash when a header contains an invalid END card (for + example "END = "). This resulted in a cryptic traceback. Now headers like + this will detect "clearly intended" END cards and produce a warning about + their invalidity and fix them. (#217) + +- Allowed a sequence of ``Column`` objects to be passed in as the main + argument to ``FITS_rec.from_columns`` as the documentation suggests should + be possible. + +- Fixed a display formatting issue with fitsdiff where sometimes it did not + show the difference between two floating point numbers if they were the same + up to some low number of digits. (spacetelescope/PyFITS#21) + +- Fixed an issue where Python 2 sometimes allowed non-ASCII strings to be + assigned as header values if they were assigned as old-style ``str`` objects + and not ``unicode`` objects. (spacetelescope/PyFITS#37) + + +3.1.4 (2014-03-04) +------------------ + +- Added missing features from the ``Header.insert()`` method that were + intended for inclusion in the original 3.1 release: In addition to + accepting an integer index as the first argument, it also supports supplying + a keyword name as the first argument for insertion relative to a specific + keyword. It also now supports an optional ``after`` argument. If + ``after=True`` the the insertion is made below the insertion point instead + of above it. (Backported from 3.2.1) + +- A grab bag of minor performance improvements in headers. + (Backported from 3.2.1) + +- Fixed an issue where opening an image containing pseudo-unsigned integers + and immediately writing it to a new file using the ``writeto`` method would + drop the scale factors that identified the data as unsigned. + (Backported from 3.2.1) + +- Fixed a bug where writing a file with ``checksum=True`` did not add the + checksum on new files. (Backported from 3.2.1) + +- Fixed an issue where validating an HDU's checksums removed the checksum from + that HDU's header entirely (even if it was valid.) + (Backported from 3.2.1) + +- Fixed an issue where the size of the heap was sometimes not computed + properly when writing an existing table containing variable-length array + columns to a new FITS file. This could result in corruption in the new FITS + file. (Backported from 3.2.1) + +- Fixed a bug where a boolean value of ``True`` in a header could not be + replaced with the integer 1, and likewise for ``False`` and 0 and vice + versa. (Backported from 3.2.1) + +- Fixed an issue similar to the above one but for numeric values--now + replacing a header value with an equivalent numeric value will up/downcast + that value. For example replacing '0' with '0.0' will write '0.0' to the + header so that it is returned as a floating point value. Likewise a float + can be downcast to an integer. (Backported from 3.2.1) + +- Fixed unrelated crash when a header contains an invalid END card (for + example "END = "). This resulted in a cryptic traceback. Now headers like + this will detect "clearly intended" END cards and produce a warning about + their invalidity and fix them. (Backported from 3.2.1) + +- Fixed a display formatting issue with fitsdiff where sometimes it did not + show the difference between two floating point numbers if they were the same + up to some low number of digits. (Backported from 3.2.1) + +- Fixed an issue where Python 2 sometimes allowed non-ASCII strings to be + assigned as header values if they were assigned as old-style ``str`` objects + and not ``unicode`` objects. (Backported from 3.2.1) + + +3.0.13 (2014-03-04) +------------------- + +- Fixed a bug where writing a file with ``checksum=True`` did not add the + checksum on new files. (Backported from 3.2.1) + +- Fixed an issue where validating an HDU's checksums removed the checksum from + that HDU's header entirely (even if it was valid.) + (Backported from 3.2.1) + + +3.2 (2013-11-26) +---------------- + +Highlights +^^^^^^^^^^ + +- Rewrote CFITSIO-based backend for handling tile compression of FITS files. + It now uses a standard CFITSIO instead of heavily modified pieces of CFITSIO + as before. PyFITS ships with its own copy of CFITSIO v3.35 which supports + the latest version of the Tiled Image Convention (v2.3), but system + packagers may choose instead to strip this out in favor of a + system-installed version of CFITSIO. Earlier versions may work, but nothing + earlier than 3.28 has been tested yet. (#169) + +- Added support for reading and writing tables using the Q format for columns. + The Q format is identical to the P format (variable-length arrays) except + that it uses 64-bit integers for the data descriptors, allowing more than + 4 GB of variable-length array data in a single table. (#160) + +- Added initial support for table columns containing pseudo-unsigned integers. + This is currently enabled by using the ``uint=True`` option when opening + files; any table columns with the correct BZERO value will be interpreted + and returned as arrays of unsigned integers. + +- Some refactoring of the table and ``FITS_rec`` modules in order to better + separate the details of the FITS binary and ASCII table data structures from + the HDU data structures that encapsulate them. Most of these changes should + not be apparent to users (but see API Changes below). + + +API Changes +^^^^^^^^^^^ + +- Assigning to values in ``ColDefs.names``, ``ColDefs.formats``, + ``ColDefs.nulls`` and other attributes of ``ColDefs`` instances that return + lists of column properties is no longer supported. Assigning to those lists + will no longer update the corresponding columns. Instead, please just + modify the ``Column`` instances directly (``Column.name``, ``Column.null``, + etc.) + +- The ``pyfits.new_table`` function is marked "pending deprecation". This + does not mean it will be removed outright or that its functionality has + changed. It will likely be replaced in the future for a function with + similar, if not subtly different functionality. A better, if not slightly + more verbose approach is to use ``pyfits.FITS_rec.from_columns`` to create + a new ``FITS_rec`` table--this has the same interface as + ``pyfits.new_table``. The difference is that it returns a plan ``FITS_rec`` + array, and not an HDU instance. This ``FITS_rec`` object can then be used + as the data argument in the constructors for ``BinTableHDU`` (for binary + tables) or ``TableHDU`` (for ASCII tables). This is analogous to creating + an ``ImageHDU`` by passing in an image array. + ``pyfits.FITS_rec.from_columns`` is just a simpler way of creating a + FITS-compatible recarray from a FITS column specification. + +- The ``updateHeader``, ``updateHeaderData``, and ``updateCompressedData`` + methods of the ``CompDataHDU`` class are pending deprecation and moved to + internal methods. The operation of these methods depended too much on + internal state to be used safely by users; instead they are invoked + automatically in the appropriate places when reading/writing compressed image + HDUs. + +- The ``CompDataHDU.compData`` attribute is pending deprecation in favor of + the clearer and more PEP-8 compatible ``CompDataHDU.compressed_data``. + +- The constructor for ``CompDataHDU`` has been changed to accept new keyword + arguments. The new keyword arguments are essentially the same, but are in + underscore_separated format rather than camelCase format. The old arguments + are still pending deprecation. + +- The internal attributes of HDU classes ``_hdrLoc``, ``_datLoc``, and + ``_datSpan`` have been replaced with ``_header_offset``, ``_data_offset``, + and ``_data_size`` respectively. The old attribute names are still pending + deprecation. This should only be of interest to advanced users who have + created their own HDU subclasses. + +- The following previously deprecated functions and methods have been removed + entirely: ``createCard``, ``createCardFromString``, ``upperKey``, + ``ColDefs.data``, ``setExtensionNameCaseSensitive``, ``_File.getfile``, + ``_TableBaseHDU.get_coldefs``, ``Header.has_key``, ``Header.ascardlist``. + + If you run your code with a previous version of PyFITS (>= 3.0, < 3.2) with + the ``python -Wd`` argument, warnings for all deprecated interfaces still in + use will be displayed. + +- Interfaces that were pending deprecation are now fully deprecated. These + include: ``create_card``, ``create_card_from_string``, ``upper_key``, + ``Header.get_history``, and ``Header.get_comment``. + +- The ``.name`` attribute on HDUs is now directly tied to the HDU's header, so + that if ``.header['EXTNAME']`` changes so does ``.name`` and vice-versa. + +- The ``pyfits.file.PYTHON_MODES`` constant dict was renamed to + ``pyfits.file.PYFITS_MODES`` which better reflects its purpose. This is + rarely used by client code, however. Support for the old name will be + removed by PyFITS 3.4. + + +Other Changes and Additions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- The new compression code also adds support for the ZQUANTIZ and ZDITHER0 + keywords added in more recent versions of this FITS Tile Compression spec. + This includes support for lossless compression with GZIP. (#198) By default + no dithering is used, but the ``SUBTRACTIVE_DITHER_1`` and + ``SUBTRACTIVE_DITHER_2`` methods can be enabled by passing the correct + constants to the ``quantize_method`` argument to the ``CompImageHDU`` + constuctor. A seed can be manually specified, or automatically generated + using either the system clock or checksum-based methods via the + ``dither_seed`` argument. See the documentation for ``CompImageHDU`` for + more details. (#198) (spacetelescope/PYFITS#32) + +- Images compressed with the Tile Compression standard can now be larger than + 4 GB through support of the Q format. (#159) + +- All HDUs now have a ``.ver`` ``.level`` attribute that returns the value of + the EXTVAL and EXTLEVEL keywords from that HDU's header, if the exist. This + was added for consistency with the ``.name`` attribute which returns the + EXTNAME value from the header. + +- Then ``Column`` and ``ColDefs`` classes have new ``.dtype`` attributes + which give the Numpy dtype for the column data in the first case, and the + full Numpy compound dtype for each table row in the latter case. + +- There was an issue where new tables created defaulted the values in all + string columns to '0.0'. Now string columns are filled with empty strings + by default--this seems a less surprising default, but it may cause + differences with tables created with older versions of PyFITS. + +- Improved round-tripping and preservation of manually assigned column + attributes (``TNULLn``, ``TSCALn``, etc.) in table HDU headers. + (astropy/astropy#996) + + +Bug Fixes +^^^^^^^^^ + +- Binary tables containing compressed images may, optionally, contain other + columns unrelated to the tile compression convention. Although this is an + uncommon use case, it is permitted by the standard. (#159) + +- Reworked some of the file I/O routines to allow simpler, more consistent + mapping between OS-level file modes ('rb', 'wb', 'ab', etc.) and the more + "PyFITS-specific" modes used by PyFITS like "readonly" and "update". + That is, if reading a FITS file from an open file object, it doesn't matter + as much what "mode" it was opened in so long as it has the right + capabilities (read/write/etc.) Also works around bugs in the Python io + module in 2.6+ with regard to file modes. (spacetelescope/PyFITS#33) + +- Fixed an obscure issue that can occur on systems that don't have flush to + memory-mapped files implemented (namely GNU Hurd). (astropy/astropy#968) + + +3.1.3 (2013-11-26) +------------------ + +- Disallowed assigning NaN and Inf floating point values as header values, + since the FITS standard does not define a way to represent them in. Because + this is undefined, the previous behavior did not make sense and produced + invalid FITS files. (spacetelescope/PyFITS#11) + +- Added a workaround for a bug in 64-bit OSX that could cause truncation when + writing files greater than 2^32 bytes in size. (spacetelescope/PyFITS#28) + +- Fixed a long-standing issue where writing binary tables did not correctly + write the TFORMn keywords for variable-length array columns (they ommitted + the max array length parameter of the format). This was thought fixed in + v3.1.2, but it was only fixed there for compressed image HDUs and not for + binary tables in general. + +- Fixed an obscure issue that can occur on systems that don't have flush to + memory-mapped files implemented (namely GNU Hurd). (Backported from 3.2) + + +3.0.12 (2013-11-26) +------------------- + +- Disallowed assigning NaN and Inf floating point values as header values, + since the FITS standard does not define a way to represent them in. Because + this is undefined, the previous behavior did not make sense and produced + invalid FITS files. (Backported from 3.1.3) + +- Added a workaround for a bug in 64-bit OSX that could cause truncation when + writing files greater than 2^32 bytes in size. (Backported from 3.1.3) + +- Fixed a long-standing issue where writing binary tables did not correctly + write the TFORMn keywords for variable-length array columns (they ommitted + the max array length parameter of the format). This was thought fixed in + v3.1.2, but it was only fixed there for compressed image HDUs and not for + binary tables in general. (Backported from 3.1.3) + +- Fixed an obscure issue that can occur on systems that don't have flush to + memory-mapped files implemented (namely GNU Hurd). (Backported from 3.2) + + +3.1.3 (unreleased) +------------------ + +- Disallowed assigning NaN and Inf floating point values as header values, + since the FITS standard does not define a way to represent them in. Because + this is undefined, the previous behavior did not make sense and produced + invalid FITS files. (spacetelescope/PyFITS#11) + + +3.0.12 (unreleased) +------------------- + +- Disallowed assigning NaN and Inf floating point values as header values, + since the FITS standard does not define a way to represent them in. Because + this is undefined, the previous behavior did not make sense and produced + invalid FITS files. (Backported from 3.1.3) + +- Added a workaround for a bug in 64-bit OSX that could cause truncation when + writing files greater than 2^32 bytes in size. (Backported from 3.1.3) + + +3.1.2 (2013-04-22) +------------------ + +- When an error occurs opening a file in fitsdiff the exception message will + now at least mention which file had the error. (#168) + +- Fixed support for opening gzipped FITS files by filename in a writeable mode + (PyFITS has supported writing to gzip files for some time now, but only + enabled it when GzipFile objects were passed to ``pyfits.open()`` due to + some legacy code preventing full gzip support. (#195) + +- Added a more helpful error message in the case of malformatted FITS files + that contain non-float NULL values in an ASCII table but are missing the + required TNULLn keywords in the header. (#197) + +- Fixed an (apparently long-standing) issue where writing compressed images + did not correctly write the TFORMn keywords for variable-length array + columns (they omitted the max array length parameter of the format). (#199) + +- Slightly refactored how tables containing variable-length array columns are + handled to add two improvements: Fixes an issue where accessing the data + after a call to the ``pyfits.getdata`` convenience function caused an + exception, and allows the VLA data to be read from an existing mmap of the + FITS file. (#200) + +- Fixed a bug that could occur when opening a table containing + multi-dimensional columns (i.e. via the TDIMn keyword) and then writing it + out to a new file. (#201) + +- Added use of the console_scripts entry point to install the fitsdiff and + fitscheck scripts, which if nothing else provides better Windows support. + The generated scripts now override the ones explicitly defined in the + scripts/ directory (which were just trivial stubs to begin with). (#202) + +- Fixed a bug on Python 3 where attempting to open a non-existent file on + Python 3 caused a seemingly unrelated traceback. (#203) + +- Fixed a bug in fitsdiff that reported two header keywords containing NaN + as value as different. (#204) + +- Fixed an issue in the tests that caused some tests to fail if pyfits is + installed with read-only permissions. (#208) + +- Fixed a bug where instantiating a ``BinTableHDU`` from a numpy array + containing boolean fields converted all the values to ``False``. (#215) + +- Fixed an issue where passing an array of integers into the constructor of + ``Column()`` when the column type is floats of the same byte width caused the + column array to become garbled. (#218) + +- Fixed inconsistent behavior in creating CONTINUE cards from byte strings + versus Unicode strings in Python 2--CONTINUE cards can now be created + properly from Unicode strings (so long as they are convertible to ASCII). + (spacetelescope/PyFITS#1) + +- Fixed a couple cases where creating a new table using TDIMn in some of the + columns could caused a crash. (spacetelescope/PyFITS#3) + +- Fixed a bug in parsing HIERARCH keywords that do not have a space after + the first equals sign (before the value). (spacetelescope/PyFITS#5) + +- Prevented extra leading whitespace on HIERARCH keywords from being treated + as part of the keyword. (spacetelescope/PyFITS#6) + +- Fixed a bug where HIERARCH keywords containing lower-case letters was + mistakenly marked as invalid during header validation. + (spacetelescope/PyFITS#7) + +- Fixed an issue that was ancillary to (spacetelescope/PyFITS#7) where the + ``Header.index()`` method did not work correctly with HIERARCH keywords + containing lower-case letters. + + +3.0.11 (2013-04-17) +------------------- + +- Fixed support for opening gzipped FITS files by filename in a writeable mode + (PyFITS has supported writing to gzip files for some time now, but only + enabled it when GzipFile objects were passed to ``pyfits.open()`` due to + some legacy code preventing full gzip support. Backported from 3.1.2. (#195) + +- Added a more helpful error message in the case of malformatted FITS files + that contain non-float NULL values in an ASCII table but are missing the + required TNULLn keywords in the header. Backported from 3.1.2. (#197) + +- Fixed an (apparently long-standing) issue where writing compressed images did + not correctly write the TFORMn keywords for variable-length array columns + (they ommitted the max array length parameter of the format). Backported from + 3.1.2. (#199) + +- Slightly refactored how tables containing variable-length array columns are + handled to add two improvements: Fixes an issue where accessing the data + after a call to the ``pyfits.getdata`` convenience function caused an + exception, and allows the VLA data to be read from an existing mmap of the + FITS file. Backported from 3.1.2. (#200) + +- Fixed a bug that could occur when opening a table containing + multi-dimensional columns (i.e. via the TDIMn keyword) and then writing it + out to a new file. Backported from 3.1.2. (#201) + +- Fixed a bug on Python 3 where attempting to open a non-existent file on + Python 3 caused a seemingly unrelated traceback. Backported from 3.1.2. + (#203) + +- Fixed a bug in fitsdiff that reported two header keywords containing NaN + as value as different. Backported from 3.1.2. (#204) + +- Fixed an issue in the tests that caused some tests to fail if pyfits is + installed with read-only permissions. Backported from 3.1.2. (#208) + +- Fixed a bug where instantiating a ``BinTableHDU`` from a numpy array + containing boolean fields converted all the values to ``False``. Backported + from 3.1.2. (#215) + +- Fixed an issue where passing an array of integers into the constructor of + ``Column()`` when the column type is floats of the same byte width caused the + column array to become garbled. Backported from 3.1.2. (#218) + +- Fixed a couple cases where creating a new table using TDIMn in some of the + columns could caused a crash. Backported from 3.1.2. + (spacetelescope/PyFITS#3) + + +3.1.1 (2013-01-02) +------------------ + +This is a bug fix release for the 3.1.x series. + +Bug Fixes +^^^^^^^^^ + +- Improved handling of scaled images and pseudo-unsigned integer images in + compressed image HDUs. They now work more transparently like normal image + HDUs with support for the ``do_not_scale_image_data`` and ``uint`` options, + as well as ``scale_back`` and ``save_backup``. The ``.scale()`` method + works better too. (#88) + +- Permits non-string values for the EXTNAME keyword when reading in a file, + rather than throwing an exception due to the malformatting. Added + verification for the format of the EXTNAME keyword when writing. (#96) + +- Added support for EXTNAME and EXTVER in PRIMARY HDUs. That is, if EXTNAME + is specified in the header, it will also be reflected in the ``.name`` + attribute and in ``pyfits.info()``. These keywords used to be verboten in + PRIMARY HDUs, but the latest version of the FITS standard allows them. + (#151) + +- HCOMPRESS can again be used to compress data cubes (and higher-dimensional + arrays) so long as the tile size is effectively 2-dimensional. In fact, + PyFITS will automatically use compatible tile sizes even if they're not + explicitly specified. (#171) + +- Added support for the optional ``endcard`` parameter in the + ``Header.fromtextfile()`` and ``Header.totextfile()`` methods. Although + ``endcard=False`` was a reasonable default assumption, there are still text + dumps of FITS headers that include the END card, so this should have been + more flexible. (#176) + +- Fixed a crash when running fitsdiff on two empty (that is, zero row) tables. + (#178) + +- Fixed an issue where opening files containing random groups HDUs in update + mode could cause an unnecessary rewrite of the file even if none of the + data is modified. (#179) + +- Fixed a bug that could caused a deadlock in the filesystem on OSX if PyFITS + is used with Numpy 1.7 in some cases. (#180) + +- Fixed a crash when generating diff reports from diffs using the + ``ignore_comments`` options. (#181) + +- Fixed some bugs with WCS Paper IV record-valued keyword cards: + + - Cards that looked kind of like RVKCs but were not intended to be were + over-permissively treated as such--commentary keywords like COMMENT and + HISTORY were particularly affected. (#183) + + - Looking up a card in a header by its standard FITS keyword only should + always return the raw value of that card. That way cards containing + values that happen to valid RVKCs but were not intended to be will still + be treated like normal cards. (#184) + + - Looking up a RVKC in a header with only part of the field-specifier (for + example "DP1.AXIS" instead of "DP1.AXIS.1") was implicitly treated as a + wildcard lookup. (#184) + +- Fixed a crash when diffing two FITS files where at least one contains a + compressed image HDU which was not recognized as an image instead of a + table. (#187) + +- Fixed bugs in the backwards compatibility layer for the ``CardList.index`` + and ``CardList.count`` methods. (#190) + +- Improved ``__repr__`` and text file representation of cards with long values + that are split into CONTINUE cards. (#193) + +- Fixed a crash when trying to assign a long (> 72 character) value to blank + ('') keywords. This also changed how blank keywords are represented--there + are still exactly 8 spaces before any commentary content can begin; this + *may* affect the exact display of header cards that assumed there could be + fewer spaces in a blank keyword card before the content begins. However, the + current approach is more in line with the requirements of the FITS standard. + (#194) + + +3.0.10 (2013-01-02) +------------------- + +- Improved handling of scaled images and pseudo-unsigned integer images in + compressed image HDUs. They now work more transparently like normal image + HDUs with support for the ``do_not_scale_image_data`` and ``uint`` options, + as well as ``scale_back`` and ``save_backup``. The ``.scale()`` method + works better too. Backported from 3.1.1. (#88) + +- Permits non-string values for the EXTNAME keyword when reading in a file, + rather than throwing an exception due to the malformatting. Added + verification for the format of the EXTNAME keyword when writing. Backported + from 3.1.1. (#96) + +- Added support for EXTNAME and EXTVER in PRIMARY HDUs. That is, if EXTNAME + is specified in the header, it will also be reflected in the ``.name`` + attribute and in ``pyfits.info()``. These keywords used to be verbotten in + PRIMARY HDUs, but the latest version of the FITS standard allows them. + Backported from 3.1.1. (#151) + +- HCOMPRESS can again be used to compress data cubes (and higher-dimensional + arrays) so long as the tile size is effectively 2-dimensional. In fact, + PyFITS will not automatically use compatible tile sizes even if they're not + explicitly specified. Backported from 3.1.1. (#171) + +- Fixed a bug when writing out files containing zero-width table columns, + where the TFIELDS keyword would be updated incorrectly, leaving the table + largely unreadable. Backported from 3.1.0. (#174) + +- Fixed an issue where opening files containing random groups HDUs in update + mode could cause an unnecessary rewrite of the file even if none of the + data is modified. Backported from 3.1.1. (#179) + +- Fixed a bug that could caused a deadlock in the filesystem on OSX if PyFITS + is used with Numpy 1.7 in some cases. Backported from 3.1.1. (#180) + + +3.1 (2012-08-08) +---------------- + +Highlights +^^^^^^^^^^ + +- The ``Header`` object has been significantly reworked, and ``CardList`` + objects are now deprecated (their functionality folded into the ``Header`` + class). See API Changes below for more details. + +- Memory maps are now used by default to access HDU data. See API Changes + below for more details. + +- Now includes a new version of the ``fitsdiff`` program for comparing two + FITS files, and a new FITS comparison API used by ``fitsdiff``. See New + Features below. + +API Changes +^^^^^^^^^^^ + +- The ``Header`` class has been rewritten, and the ``CardList`` class is + deprecated. Most of the basic details of working with FITS headers are + unchanged, and will not be noticed by most users. But there are differences + in some areas that will be of interest to advanced users, and to application + developers. For full details of the changes, see the "Header Interface + Transition Guide" section in the PyFITS documentation. See ticket #64 on + the PyFITS Trac for further details and background. Some highlights are + listed below: + + * The Header class now fully implements the Python dict interface, and can + be used interchangeably with a dict, where the keys are header keywords. + + * New keywords can be added to the header using normal keyword assignment + (previously it was necessary to use ``Header.update`` to add new + keywords). For example:: + + >>> header['NAXIS'] = 2 + + will update the existing 'FOO' keyword if it already exists, or add a new + one if it doesn't exist, just like a dict. + + * It is possible to assign both a value and a comment at the same time using + a tuple:: + + >>> header['NAXIS'] = (2, 'Number of axes') + + * To add/update a new card and ensure it's added in a specific location, use + ``Header.set()``:: + + >>> header.set('NAXIS', 2, 'Number of axes', after='BITPIX') + + This works the same as the old ``Header.update()``. ``Header.update()`` + still works in the old way too, but is deprecated. + + * Although ``Card`` objects still exist, it generally is not necessary to + work with them directly. ``Header.ascardlist()``/``Header.ascard`` are + deprecated and should not be used. To directly access the ``Card`` + objects in a header, use ``Header.cards``. + + * To access card comments, it is still possible to either go through the + card itself, or through ``Header.comments``. For example:: + + >>> header.cards['NAXIS'].comment + Number of axes + >>> header.comments['NAXIS'] + Number of axes + + * ``Card`` objects can now be used interchangeably with + ``(keyword, value, comment)`` 3-tuples. They still have ``.value`` and + ``.comment`` attributes as well. The ``.key`` attribute has been renamed + to ``.keyword`` for consistency, though ``.key`` is still supported (but + deprecated). + +- Memory mapping is now used by default to access HDU data. That is, + ``pyfits.open()`` uses ``memmap=True`` as the default. This provides better + performance in the majority of use cases--there are only some I/O intensive + applications where it might not be desirable. Enabling mmap by default also + enabled finding and fixing a large number of bugs in PyFITS' handling of + memory-mapped data (most of these bug fixes were backported to PyFITS + 3.0.5). (#85) + + * A new ``pyfits.USE_MEMMAP`` global variable was added. Set + ``pyfits.USE_MEMMAP = False`` to change the default memmap setting for + opening files. This is especially useful for controlling the behavior in + applications where pyfits is deeply embedded. + + * Likewise, a new ``PYFITS_USE_MEMMAP`` environment variable is supported. + Set ``PYFITS_USE_MEMMAP = 0`` in your environment to change the default + behavior. + +- The ``size()`` method on HDU objects is now a ``.size`` property--this + returns the size in bytes of the data portion of the HDU, and in most cases + is equivalent to ``hdu.data.nbytes`` (#83) + +- ``BinTableHDU.tdump`` and ``BinTableHDU.tcreate`` are deprecated--use + ``BinTableHDU.dump`` and ``BinTableHDU.load`` instead. The new methods + output the table data in a slightly different format from previous versions, + which places quotes around each value. This format is compatible with data + dumps from previous versions of PyFITS, but not vice-versa due to a parsing + bug in older versions. + +- Likewise the ``pyfits.tdump`` and ``pyfits.tcreate`` convenience function + versions of these methods have been renamed ``pyfits.tabledump`` and + ``pyfits.tableload``. The old deprecated, but currently retained for + backwards compatibility. (r1125) + +- A new global variable ``pyfits.EXTENSION_NAME_CASE_SENSITIVE`` was added. + This serves as a replacement for ``pyfits.setExtensionNameCaseSensitive`` + which is not deprecated and may be removed in a future version. To enable + case-sensitivity of extension names (i.e. treat 'sci' as distict from 'SCI') + set ``pyfits.EXTENSION_NAME_CASE_SENSITIVE = True``. The default is + ``False``. (r1139) + +- A new global configuration variable ``pyfits.STRIP_HEADER_WHITESPACE`` was + added. By default, if a string value in a header contains trailing + whitespace, that whitespace is automatically removed when the value is read. + Now if you set ``pyfits.STRIP_HEADER_WHITESPACE = False`` all whitespace is + preserved. (#146) + +- The old ``classExtensions`` extension mechanism (which was deprecated in + PyFITS 3.0) is removed outright. To our knowledge it was no longer used + anywhere. (r1309) + +- Warning messages from PyFITS issued through the Python warnings API are now + output to stderr instead of stdout, as is the default. PyFITS no longer + modifies the default behavior of the warnings module with respect to which + stream it outputs to. (r1319) + +- The ``checksum`` argument to ``pyfits.open()`` now accepts a value of + 'remove', which causes any existing CHECKSUM/DATASUM keywords to be ignored, + and removed when the file is saved. + +New Features +^^^^^^^^^^^^ + +- Added support for the proposed "FITS" extension HDU type. See + http://listmgr.cv.nrao.edu/pipermail/fitsbits/2002-April/001094.html. FITS + HDUs contain an entire FITS file embedded in their data section. ``FitsHDU`` + objects work like other HDU types in PyFITS. Their ``.data`` attribute + returns the raw data array. However, they have a special ``.hdulist`` + attribute which processes the data as a FITS file and returns it as an + in-memory HDUList object. FitsHDU objects also support a + ``FitsHDU.fromhdulist()`` classmethod which returns a new ``FitsHDU`` object + that embeds the supplied HDUList. (#80) + +- Added a new ``.is_image`` attribute on HDU objects, which is True if the HDU + data is an 'image' as opposed to a table or something else. Here the + meaning of 'image' is fairly loose, and mostly just means a Primary or Image + extension HDU, or possibly a compressed image HDU (#71) + +- Added an ``HDUList.fromstring`` classmethod which can parse a FITS file + already in memory and instantiate and ``HDUList`` object from it. This + could be useful for integrating PyFITS with other libraries that work on + FITS file, such as CFITSIO. It may also be useful in streaming + applications. The name is a slight misnomer, in that it actually accepts + any Python object that implements the buffer interface, which includes + ``bytes``, ``bytearray``, ``memoryview``, ``numpy.ndarray``, etc. (#90) + +- Added a new ``pyfits.diff`` module which contains facilities for comparing + FITS files. One can use the ``pyfits.diff.FITSDiff`` class to compare two + FITS files in their entirety. There is also a ``pyfits.diff.HeaderDiff`` + class for just comparing two FITS headers, and other similar interfaces. + See the PyFITS Documentation for more details on this interface. The + ``pyfits.diff`` module powers the new ``fitsdiff`` program installed with + PyFITS. After installing PyFITS, run ``fitsdiff --help`` for usage details. + +- ``pyfits.open()`` now accepts a ``scale_back`` argument. If set to + ``True``, this automatically scales the data using the original BZERO and + BSCALE parameters the file had when it was first opened, if any, as well as + the original BITPIX. For example, if the original BITPIX were 16, this + would be equivalent to calling ``hdu.scale('int16', 'old')`` just before + calling ``flush()`` or ``close()`` on the file. This option applies to all + HDUs in the file. (#120) + +- ``pyfits.open()`` now accepts a ``save_backup`` argument. If set to + ``True``, this automatically saves a backup of the original file before + flushing any changes to it (this of course only applies to update and append + mode). This may be especially useful when working with scaled image data. + (#121) + +Changes in Behavior +^^^^^^^^^^^^^^^^^^^ + +- Warnings from PyFITS are not output to stderr by default, instead of stdout + as it has been for some time. This is contrary to most users' expectations + and makes it more difficult for them to separate output from PyFITS from the + desired output for their scripts. (r1319) + +Bug Fixes +^^^^^^^^^ + +- Fixed ``pyfits.tcreate()`` (now ``pyfits.tableload()``) to be more robust + when encountering blank lines in a column definition file (#14) + +- Fixed a fairly rare crash that could occur in the handling of CONTINUE cards + when using Numpy 1.4 or lower (though 1.4 is the oldest version supported by + PyFITS). (r1330) + +- Fixed ``_BaseHDU.fromstring`` to actually correctly instantiate an HDU + object from a string/buffer containing the header and data of that HDU. + This allowed for the implementation of ``HDUList.fromstring`` described + above. (#90) + +- Fixed a rare corner case where, in some use cases, (mildly, recoverably) + malformatted float values in headers were not properly returned as floats. + (#137) + +- Fixed a corollary to the previous bug where float values with a leading zero + before the decimal point had the leading zero unnecessarily removed when + saving changes to the file (eg. "0.001" would be written back as ".001" even + if no changes were otherwise made to the file). (#137) + +- When opening a file containing CHECKSUM and/or DATASUM keywords in update + mode, the CHECKSUM/DATASUM are updated and preserved even if the file was + opened with checksum=False. This change in behavior prevents checksums from + being unintentionally removed. (#148) + +- Fixed a bug where ``ImageHDU.scale(option='old')`` wasn't working at all--it + was not restoring the image to its original BSCALE and BZERO values. (#162) + +- Fixed a bug when writing out files containing zero-width table columns, + where the TFIELDS keyword would be updated incorrectly, leaving the table + largely unreadable. This fix will be backported to the 3.0.x series in + version 3.0.10. (#174) + + +3.0.9 (2012-08-06) +------------------ + +This is a bug fix release for the 3.0.x series. + +Bug Fixes +^^^^^^^^^ + +- Fixed ``Header.values()``/``Header.itervalues()`` and ``Header.items()``/ + ``Header.iteritems()`` to correctly return the different values for + duplicate keywords (particularly commentary keywords like HISTORY and + COMMENT). This makes the old Header implementation slightly more compatible + with the new implementation in PyFITS 3.1. (#127) + + .. note:: + This fix did not change the existing behavior from earlier PyFITS + versions where ``Header.keys()`` returns all keywords in the header with + duplicates removed. PyFITS 3.1 changes that behavior, so that + ``Header.keys()`` includes duplicates. + +- Fixed a bug where ``ImageHDU.scale(option='old')`` wasn't working at all--it + was not restoring the image to its original BSCALE and BZERO values. (#162) + +- Fixed a bug where opening a file containing compressed image HDUs in + 'update' mode and then immediately closing it without making any changes + caused the file to be rewritten unncessarily. (#167) + +- Fixed two memory leaks that could occur when writing compressed image data, + or in some cases when opening files containing compressed image HDUs in + 'update' mode. (#168) + + +3.0.8 (2012-06-04) +------------------ + +Changes in Behavior +^^^^^^^^^^^^^^^^^^^ + +- Prior to this release, image data sections did not work with scaled + data--that is, images with non-trivial BSCALE and/or BZERO values. + Previously, in order to read such images in sections, it was necessary to + manually apply the BSCALE+BZERO to each section. It's worth noting that + sections *did* support pseudo-unsigned ints (flakily). This change just + extends that support for general BSCALE+BZERO values. + +Bug Fixes +^^^^^^^^^ + +- Fixed a bug that prevented updates to values in boolean table columns from + being saved. This turned out to be a symptom of a deeper problem that could + prevent other table updates from being saved as well. (#139) + +- Fixed a corner case in which a keyword comment ending with the string "END" + could, in some circumstances, cause headers (and the rest of the file after + that point) to be misread. (#142) + +- Fixed support for scaled image data and psuedo-unsigned ints in image data + sections (``hdu.section``). Previously this was not supported at all. At + some point support was supposedly added, but it was buggy and incomplete. + Now the feature seems to work much better. (#143) + +- Fixed the documentation to point out that image data sections *do* support + non-contiguous slices (and have for a long time). The documentation was + never updated to reflect this, and misinformed users that only contiguous + slices were supported, leading to some confusion. (#144) + +- Fixed a bug where creating an ``HDUList`` object containing multiple PRIMARY + HDUs caused an infinite recursion when validating the object prior to + writing to a file. (#145) + +- Fixed a rare but serious case where saving an update to a file that + previously had a CHECKSUM and/or DATASUM keyword, but removed the checksum + in saving, could cause the file to be slightly corrupted and unreadable. + (#147) + +- Fixed problems with reading "non-standard" FITS files with primary headers + containing SIMPLE = F. PyFITS has never made many guarantees as to how such + files are handled. But it should at least be possible to read their + headers, and the data if possible. Saving changes to such a file should not + try to prepend an unwanted valid PRIMARY HDU. (#157) + +- Fixed a bug where opening an image with ``disable_image_compression = True`` + caused compression to be disabled for all subsequent ``pyfits.open()`` calls. + (r1651) + + +3.0.7 (2012-04-10) +------------------ + +Changes in Behavior +^^^^^^^^^^^^^^^^^^^ + +- Slices of GroupData objects now return new GroupData objects instead of + extended multi-row _Group objects. This is analogous to how PyFITS 3.0 fixed + FITS_rec slicing, and should have been fixed for GroupData at the same time. + The old behavior caused bugs where functions internal to Numpy expected that + slicing an ndarray would return a new ndarray. As this is a rare usecase + with a rare feature most users are unlikely to be affected by this change. + +- The previously internal _Group object for representing individual group + records in a GroupData object are renamed Group and are now a public + interface. However, there's almost no good reason to create Group objects + directly, so it shouldn't be considered a "new feature". + +- An annoyance from PyFITS 3.0.6 was fixed, where the value of the EXTEND + keyword was always being set to F if there are not actually any extension + HDUs. It was unnecessary to modify this value. + +Bug Fixes +^^^^^^^^^ + +- Fixed GroupData objects to return new GroupData objects when sliced instead + of _Group record objects. See "Changes in behavior" above for more details. + +- Fixed slicing of Group objects--previously it was not possible to slice + slice them at all. + +- Made it possible to assign ``np.bool_`` objects as header values. (#123) + +- Fixed overly strict handling of the EXTEND keyword; see "Changes in + behavior" above. (#124) + +- Fixed many cases where an HDU's header would be marked as "modified" by + PyFITS and rewritten, even when no changes to the header are necessary. + (#125) + +- Fixed a bug where the values of the PTYPEn keywords in a random groups HDU + were forced to be all lower-case when saving the file. (#130) + +- Removed an unnecessary inline import in ``ExtensionHDU.__setattr__`` that was + causing some slowdown when opening files containing a large number of + extensions, plus a few other small (but not insignificant) performance + improvements thanks to Julian Taylor. (#133) + +- Fixed a regression where header blocks containing invalid end-of-header + padding (i.e. null bytes instead of spaces) couldn't be parsed by PyFITS. + Such headers can be parsed again, but a warning is raised, as such headers + are not valid FITS. (#136) + +- Fixed a memory leak where table data in random groups HDUs weren't being + garbage collected. (#138) + + +3.0.6 (2012-02-29) +------------------ + +Highlights +^^^^^^^^^^ + +The main reason for this release is to fix an issue that was introduced in +PyFITS 3.0.5 where merely opening a file containing scaled data (that is, with +non-trivial BSCALE and BZERO keywords) in 'update' mode would cause the data +to be automatically rescaled--possibly converting the data from ints to +floats--as soon as the file is closed, even if the application did not touch +the data. Now PyFITS will only rescale the data in an extension when the data +is actually accessed by the application. So opening a file in 'update' mode +in order to modify the header or append new extensions will not cause any +change to the data in existing extensions. + +This release also fixes a few Windows-specific bugs found through more +extensive Windows testing, and other miscellaneous bugs. + +Bug Fixes +^^^^^^^^^ + +- More accurate error messages when opening files containing invalid header + cards. (#109) + +- Fixed a possible reference cycle/memory leak that was caught through more + extensive testing on Windows. (#112) + +- Fixed 'ostream' mode to open the underlying file in 'wb' mode instead of 'w' + mode. (#112) + +- Fixed a Windows-only issue where trying to save updates to a resized FITS + file could result in a crash due to there being open mmaps on that file. + (#112) + +- Fixed a crash when trying to create a FITS table (i.e. with new_table()) + from a Numpy array containing bool fields. (#113) + +- Fixed a bug where manually initializing an ``HDUList`` with a list of of + HDUs wouldn't set the correct EXTEND keyword value on the primary HDU. + (#114) + +- Fixed a crash that could occur when trying to deepcopy a Header in Python < + 2.7. (#115) + +- Fixed an issue where merely opening a scaled image in 'update' mode would + cause the data to be converted to floats when the file is closed. (#119) + + +3.0.5 (2012-01-30) +------------------ + +- Fixed a crash that could occur when accessing image sections of files + opened with memmap=True. (r1211) + +- Fixed the inconsistency in the behavior of files opened in 'readonly' mode + when memmap=True vs. when memmap=False. In the latter case, although + changes to array data were not saved to disk, it was possible to update the + array data in memory. On the other hand with memmap=True, 'readonly' mode + prevented even in-memory modification to the data. This is what + 'copyonwrite' mode was for, but difference in behavior was confusing. Now + 'readonly' is equivalent to 'copyonwrite' when using memmap. If the old + behavior of denying changes to the array data is necessary, a new + 'denywrite' mode may be used, though it is only applicable to files opened + with memmap. (r1275) + +- Fixed an issue where files opened with memmap=True would return image data + as a raw numpy.memmap object, which can cause some unexpected + behaviors--instead memmap object is viewed as a numpy.ndarray. (r1285) + +- Fixed an issue in Python 3 where a workaround for a bug in Numpy on Python 3 + interacted badly with some other software, namely to vo.table package (and + possibly others). (r1320, r1337, and #110) + +- Fixed buggy behavior in the handling of SIGINTs (i.e. Ctrl-C keyboard + interrupts) while flushing changes to a FITS file. PyFITS already prevented + SIGINTs from causing an incomplete flush, but did not clean up the signal + handlers properly afterwards, or reraise the keyboard interrupt once the + flush was complete. (r1321) + +- Fixed a crash that could occur in Python 3 when opening files with checksum + checking enabled. (r1336) + +- Fixed a small bug that could cause a crash in the ``StreamingHDU`` interface + when using Numpy below version 1.5. + +- Fixed a crash that could occur when creating a new ``CompImageHDU`` from an + array of big-endian data. (#104) + +- Fixed a crash when opening a file with extra zero padding at the end. + Though FITS files should not have such padding, it's not explicitly forbidden + by the format either, and PyFITS shouldn't stumble over it. (#106) + +- Fixed a major slowdown in opening tables containing large columns of string + values. (#111) + + +3.0.4 (2011-11-22) +------------------ + +- Fixed a crash when writing HCOMPRESS compressed images that could happen on + Python 2.5 and 2.6. (r1217) + +- Fixed a crash when slicing an table in a file opened in 'readonly' mode with + memmap=True. (r1230) + +- Writing changes to a file or writing to a new file verifies the output in + 'fix' mode by default instead of 'exception'--that is, PyFITS will + automatically fix common FITS format errors rather than raising an + exception. (r1243) + +- Fixed a bug where convenience functions such as getval() and getheader() + crashed when specifying just 'PRIMARY' as the extension to use (r1263). + +- Fixed a bug that prevented passing keyword arguments (beyond the standard + data and header arguments) as positional arguments to the constructors of + extension HDU classes. + +- Fixed some tests that were failing on Windows--in this case the tests + themselves failed to close some temp files and Windows refused to delete them + while there were still open handles on them. (r1295) + +- Fixed an issue with floating point formatting in header values on Python 2.5 + for Windows (and possibly other platforms). The exponent was zero-padded to + 3 digits; although the FITS standard makes no specification on this, the + formatting is now normalized to always pad the exponent to two digits. + (r1295) + +- Fixed a bug where long commentary cards (such as HISTORY and COMMENT) were + broken into multiple CONTINUE cards. However, commentary cards are not + expected to be found in CONTINUE cards. Instead these long cards are broken + into multiple commentary cards. (#97) + +- GZIP/ZIP-compressed FITS files can be detected and opened regardless of + their filename extension. (#99) + +- Fixed a serious bug where opening scaled images in 'update' mode and then + closing the file without touching the data would cause the file to be + corrupted. (#101) + + +3.0.3 (2011-10-05) +------------------ + +- Fixed several small bugs involving corner cases in record-valued keyword + cards (#70) + +- In some cases HDU creation failed if the first keyword value in the header + was not a string value (#89) + +- Fixed a crash when trying to compute the HDU checksum when the data array + contains an odd number of bytes (#91) + +- Disabled an unnecessary warning that was displayed on opening compressed + HDUs with disable_image_compression = True (#92) + +- Fixed a typo in code for handling HCOMPRESS compressed images. + + +3.0.2 (2011-09-23) +------------------ + +- The ``BinTableHDU.tcreate`` method and by extension the ``pyfits.tcreate`` + function don't get tripped up by blank lines anymore (#14) + +- The presence, value, and position of the EXTEND keyword in Primary HDUs is + verified when reading/writing a FITS file (#32) + +- Improved documentation (in warning messages as well as in the handbook) that + PyFITS uses zero-based indexing (as one would expect for C/Python code, but + contrary to the PyFITS standard which was written with FORTRAN in mind) + (#68) + +- Fixed a bug where updating a header card comment could cause the value to be + lost if it had not already been read from the card image string. + +- Fixed a related bug where changes made directly to Card object in a header + (i.e. assigning directly to card.value or card.comment) would not propagate + when flushing changes to the file (#69) [Note: This and the bug above it + were originally reported as being fixed in version 3.0.1, but the fix was + never included in the release.] + +- Improved file handling, particularly in Python 3 which had a few small file + I/O-related bugs (#76) + +- Fixed a bug where updating a FITS file would sometimes cause it to lose its + original file permissions (#79) + +- Fixed the handling of TDIMn keywords; 3.0 added support for them, but got + the axis order backards (they were treated as though they were row-major) + (#82) + +- Fixed a crash when a FITS file containing scaled data is opened and + immediately written to a new file without explicitly viewing the data first + (#84) + +- Fixed a bug where creating a table with columns named either 'names' or + 'formats' resulted in an infinite recursion (#86) + + +3.0.1 (2011-09-12) +------------------ + +- Fixed a bug where updating a header card comment could cause the value to be + lost if it had not already been read from the card image string. + +- Changed ``_TableBaseHDU.data`` so that if the data contain an empty table a + ``FITS_rec`` object with zero rows is returned rather than ``None``. + +- The ``.key`` attribute of ``RecordValuedKeywordCards`` now returns the full + keyword+field-specifier value, instead of just the plain keyword (#46) + +- Fixed a related bug where changes made directly to Card object in a header + (i.e. assigning directly to card.value or card.comment) would not propagate + when flushing changes to the file (#69) + +- Fixed a bug where writing a table with zero rows could fail in some cases + (#72) + +- Miscellanous small bug fixes that were causing some tests to fail, + particularly on Python 3 (#74, #75) + +- Fixed a bug where creating a table column from an array in non-native byte + order would not preserve the byte order, thus interpreting the column array + using the wrong byte order (#77) + + +3.0.0 (2011-08-23) +-------------------- + +- Contains major changes, bumping the version to 3.0 + +- Large amounts of refactoring and reorganization of the code; tried to + preserve public API backwards-compatibility with older versions (private API + has many changes and is not guaranteed to be backwards-compatible). There + are a few small public API changes to be aware of: + + * The pyfits.rec module has been removed completely. If your version of + numpy does not have the numpy.core.records module it is too old to be used + with PyFITS. + + * The ``Header.ascardlist()`` method is deprecated--use the ``.ascard`` + attribute instead. + + * ``Card`` instances have a new ``.cardimage`` attribute that should be used + rather than ``.ascardimage()``, which may become deprecated. + + * The ``Card.fromstring()`` method is now a classmethod. It returns a new + ``Card`` instance rather than modifying an existing instance. + + * The ``req_cards()`` method on HDU instances has changed: The ``pos`` + argument is not longer a string. It is either an integer value (meaning + the card's position must match that value) or it can be a function that + takes the card's position as it's argument, and returns True if the + position is valid. Likewise, the ``test`` argument no longer takes a + string, but instead a function that validates the card's value and returns + True or False. + + * The ``get_coldefs()`` method of table HDUs is deprecated. Use the + ``.columns`` attribute instead. + + * The ``ColDefs.data`` attribute is deprecated--use ``ColDefs.columns`` + instead (though in general you shouldn't mess with it directly--it might + become internal at some point). + + * ``FITS_record`` objects take ``start`` and ``end`` as arguments instead of + ``startColumn`` and ``endColumn`` (these are rarely created manually, so + it's unlikely that this change will affect anyone). + + * ``BinTableHDU.tcreate()`` is now a classmethod, and returns a new + ``BinTableHDU`` instance. + + * Use ``ExtensionHDU`` and ``NonstandardExtHDU`` for making new extension HDU + classes. They are now public interfaces, wheres previously they were + private and prefixed with underscores. + + * Possibly others--please report if you find any changes that cause + difficulties. + +- Calls to deprecated functions will display a Deprecation warning. However, + in Python 2.7 and up Deprecation warnings are ignored by default, so run + Python with the ``-Wd`` option to see if you're using any deprecated + functions. If we get close to actually removing any functions, we might + make the Deprecation warnings display by default. + +- Added basic Python 3 support + +- Added support for multi-dimensional columns in tables as specified by the + TDIMn keywords (#47) + +- Fixed a major memory leak that occurred when creating new tables with the + ``new_table()`` function (#49) + be padded with zero-bytes) vs ASCII tables (where strings are padded with + spaces) (#15) + +- Fixed a bug in which the case of Random Access Group parameters names was not + preserved when writing (#41) + +- Added support for binary table fields with zero width (#42) + +- Added support for wider integer types in ASCII tables; although this is non- + standard, some GEIS images require it (#45) + +- Fixed a bug that caused the index_of() method of HDULists to crash when the + HDUList object is created from scratch (#48) + +- Fixed the behavior of string padding in binary tables (where strings should + be padded with nulls instead of spaces) + +- Fixed a rare issue that caused excessive memory usage when computing + checksums using a non-standard block size (see r818) + +- Add support for forced uint data in image sections (#53) + +- Fixed an issue where variable-length array columns were not extended when + creating a new table with more rows than the original (#54) + +- Fixed tuple and list-based indexing of FITS_rec objects (#55) + +- Fixed an issue where BZERO and BSCALE keywords were appended to headers in + the wrong location (#56) + +- ``FITS_record`` objects (table rows) have full slicing support, including + stepping, etc. (#59) + +- Fixed a bug where updating multiple files simultaneously (such as when + running parallel processes) could lead to a race condition with mktemp() + (#61) + +- Fixed a bug where compressed image headers were not in the order expected by + the funpack utility (#62) + + +2.4.0 (2011-01-10) +-------------------- +The following enhancements were added: + +- Checksum support now correctly conforms to the FITS standard. pyfits + supports reading and writing both the old checksums and new + standard-compliant checksums. The ``fitscheck`` command-line utility is + provided to verify and update checksums. + +- Added a new optional keyword argument ``do_not_scale_image_data`` + to the ``pyfits.open`` convenience function. When this argument + is provided as True, and an ImageHDU is read that contains scaled + data, the data is not automatically scaled when it is read. This + option may be used when opening a fits file for update, when you only + want to update some header data. Without the use of this argument, if + the header updates required the size of the fits file to change, then + when writing the updated information, the data would be read, scaled, + and written back out in its scaled format (usually with a different + data type) instead of in its non-scaled format. + +- Added a new optional keyword argument ``disable_image_compression`` to the + ``pyfits.open`` function. When ``True``, any compressed image HDU's will + be read in like they are binary table HDU's. + +- Added a ``verify`` keyword argument to the ``pyfits.append`` function. When + ``False``, ``append`` will assume the existing FITS file is already valid + and simply append new content to the end of the file, resulting in a large + speed up appending to large files. + +- Added HDU methods ``update_ext_name`` and ``update_ext_version`` for + updating the name and version of an HDU. + +- Added HDU method ``filebytes`` to calculate the number of bytes that will be + written to the file associated with the HDU. + +- Enhanced the section class to allow reading non-contiguous image data. + Previously, the section class could only be used to read contiguous data. + (CNSHD781626) + +- Added method ``HDUList.fileinfo()`` that returns a dictionary with + information about the location of header and data in the file associated + with the HDU. + +The following bugs were fixed: + +- Reading in some malformed FITS headers would cause a ``NameError`` + exception, rather than information about the cause of the error. + +- pyfits can now handle non-compliant ``CONTINUE`` cards produced by Java + FITS. + +- ``BinTable`` columns with ``TSCALn`` are now byte-swapped correctly. + +- Ensure that floating-point card values are no longer than 20 characters. + +- Updated ``flush`` so that when the data has changed in an HDU for a file + opened in update mode, the header will be updated to match the changed data + before writing out the HDU. + +- Allow ``HIERARCH`` cards to contain a keyword and value whose total + character length is 69 characters. Previous length was limited at 68 + characters. + +- Calls to ``FITS_rec['columnName']`` now return an ``ndarray``. exactly the + same as a call to ``FITS_rec.field('columnName')`` or + ``FITS_rec.columnName``. Previously, ``FITS_rec['columnName']`` returned a + much less useful ``fits_record`` object. (CNSHD789053) + +- Corrected the ``append`` convenience function to eliminate the reading of + the HDU data from the file that is being appended to. (CNSHD794738) + +- Eliminated common symbols between the pyfitsComp module and the cfitsio and + zlib libraries. These can cause problems on systems that use both PyFITS + and cfitsio or zlib. (CNSHD795046) + + +2.3.1 (2010-06-03) +-------------------- + +The following bugs were fixed: + +- Replaced code in the Compressed Image HDU extension which was covered under + a GNU General Public License with code that is covered under a BSD License. + This change allows the distribution of pyfits under a BSD License. + + +2.3 (2010-05-11) +------------------ + +The following enhancements were made: + +- Completely eliminate support for numarray. + +- Rework pyfits documentation to use Sphinx. + +- Support python 2.6 and future division. + +- Added a new method to get the file name associated with an HDUList object. + The method HDUList.filename() returns the name of an associated file. It + returns None if no file is associated with the HDUList. + +- Support the python 2.5 'with' statement when opening fits files. + (CNSHD766308) It is now possible to use the following construct: + + >>> from __future__ import with_statement import pyfits + >>> with pyfits.open("input.fits") as hdul: + ... #process hdul + >>> + +- Extended the support for reading unsigned integer 16 values from an ImageHDU + to include unsigned integer 32 and unsigned integer 64 values. ImageHDU + data is considered to be unsigned integer 16 when the data type is signed + integer 16 and BZERO is equal to 2**15 (32784) and BSCALE is equal to 1. + ImageHDU data is considered to be unsigned integer 32 when the data type is + signed integer 32 and BZERO is equal to 2**31 and BSCALE is equal to 1. + ImageHDU data is considered to be unsigned integer 64 when the data type is + signed integer 64 and BZERO is equal to 2**63 and BSCALE is equal to 1. An + optional keyword argument (uint) was added to the open convenience function + for this purpose. Supplying a value of True for this argument will cause + data of any of these types to be read in and scaled into the appropriate + unsigned integer array (uint16, uint32, or uint64) instead of into the + normal float 32 or float 64 array. If an HDU associated with a file that + was opened with the 'int' option and containing unsigned integer 16, 32, or + 64 data is written to a file, the data will be reverse scaled into a signed + integer 16, 32, or 64 array and written out to the file along with the + appropriate BSCALE/BZERO header cards. Note that for backward + compatibility, the 'uint16' keyword argument will still be accepted in the + open function when handling unsigned integer 16 conversion. + +- Provided the capability to access the data for a column of a fits table by + indexing the table using the column name. This is consistent with Record + Arrays in numpy (array with fields). (CNSHD763378) The following example + will illustrate this: + + >>> import pyfits + >>> hdul = pyfits.open('input.fits') + >>> table = hdul[1].data + >>> table.names + ['c1','c2','c3','c4'] + >>> print table.field('c2') # this is the data for column 2 + ['abc' 'xy'] + >>> print table['c2'] # this is also the data for column 2 + array(['abc', 'xy '], dtype='|S3') + >>> print table[1] # this is the data for row 1 + (2, 'xy', 6.6999997138977054, True) + +- Provided capabilities to create a BinaryTableHDU directly from a numpy + Record Array (array with fields). The new capabilities include table + creation, writing a numpy Record Array directly to a fits file using the + pyfits.writeto and pyfits.append convenience functions. Reading the data + for a BinaryTableHDU from a fits file directly into a numpy Record Array + using the pyfits.getdata convenience function. (CNSHD749034) Thanks to + Erin Sheldon at Brookhaven National Laboratory for help with this. + + The following should illustrate these new capabilities: + + >>> import pyfits + >>> import numpy + >>> t=numpy.zeros(5,dtype=[('x','f4'),('y','2i4')]) \ + ... # Create a numpy Record Array with fields + >>> hdu = pyfits.BinTableHDU(t) \ + ... # Create a Binary Table HDU directly from the Record Array + >>> print hdu.data + [(0.0, array([0, 0], dtype=int32)) + (0.0, array([0, 0], dtype=int32)) + (0.0, array([0, 0], dtype=int32)) + (0.0, array([0, 0], dtype=int32)) + (0.0, array([0, 0], dtype=int32))] + >>> hdu.writeto('test1.fits',clobber=True) \ + ... # Write the HDU to a file + >>> pyfits.info('test1.fits') + Filename: test1.fits + No. Name Type Cards Dimensions Format + 0 PRIMARY PrimaryHDU 4 () uint8 + 1 BinTableHDU 12 5R x 2C [E, 2J] + >>> pyfits.writeto('test.fits', t, clobber=True) \ + ... # Write the Record Array directly to a file + >>> pyfits.append('test.fits', t) \ + ... # Append another Record Array to the file + >>> pyfits.info('test.fits') + Filename: test.fits + No. Name Type Cards Dimensions Format + 0 PRIMARY PrimaryHDU 4 () uint8 + 1 BinTableHDU 12 5R x 2C [E, 2J] + 2 BinTableHDU 12 5R x 2C [E, 2J] + >>> d=pyfits.getdata('test.fits',ext=1) \ + ... # Get the first extension from the file as a FITS_rec + >>> print type(d) + + >>> print d + [(0.0, array([0, 0], dtype=int32)) + (0.0, array([0, 0], dtype=int32)) + (0.0, array([0, 0], dtype=int32)) + (0.0, array([0, 0], dtype=int32)) + (0.0, array([0, 0], dtype=int32))] + >>> d=pyfits.getdata('test.fits',ext=1,view=numpy.ndarray) \ + ... # Get the first extension from the file as a numpy Record + Array + >>> print type(d) + + >>> print d + [(0.0, [0, 0]) (0.0, [0, 0]) (0.0, [0, 0]) (0.0, [0, 0]) + (0.0, [0, 0])] + >>> print d.dtype + [('x', '>f4'), ('y', '>i4', 2)] + >>> d=pyfits.getdata('test.fits',ext=1,upper=True, + ... view=pyfits.FITS_rec) \ + ... # Force the Record Array field names to be in upper case + regardless of how they are stored in the file + >>> print d.dtype + [('X', '>f4'), ('Y', '>i4', 2)] + +- Provided support for writing fits data to file-like objects that do not + support the random access methods seek() and tell(). Most pyfits functions + or methods will treat these file-like objects as an empty file that cannot + be read, only written. It is also expected that the file-like object is in + a writable condition (ie. opened) when passed into a pyfits function or + method. The following methods and functions will allow writing to a + non-random access file-like object: HDUList.writeto(), HDUList.flush(), + pyfits.writeto(), and pyfits.append(). The pyfits.open() convenience + function may be used to create an HDUList object that is associated with the + provided file-like object. (CNSHD770036) + + An illustration of the new capabilities follows. In this example fits data + is written to standard output which is associated with a file opened in + write-only mode: + + >>> import pyfits + >>> import numpy as np + >>> import sys + >>> + >>> hdu = pyfits.PrimaryHDU(np.arange(100,dtype=np.int32)) + >>> hdul = pyfits.HDUList() + >>> hdul.append(hdu) + >>> tmpfile = open('tmpfile.py','w') + >>> sys.stdout = tmpfile + >>> hdul.writeto(sys.stdout, clobber=True) + >>> sys.stdout = sys.__stdout__ + >>> tmpfile.close() + >>> pyfits.info('tmpfile.py') + Filename: tmpfile.py + No. Name Type Cards Dimensions Format + 0 PRIMARY PrimaryHDU 5 (100,) int32 + >>> + +- Provided support for slicing a FITS_record object. The FITS_record object + represents the data from a row of a table. Pyfits now supports the slice + syntax to retrieve values from the row. The following illustrates this new + syntax: + + >>> hdul = pyfits.open('table.fits') + >>> row = hdul[1].data[0] + >>> row + ('clear', 'nicmos', 1, 30, 'clear', 'idno= 100') + >>> a, b, c, d, e = row[0:5] + >>> a + 'clear' + >>> b + 'nicmos' + >>> c + 1 + >>> d + 30 + >>> e + 'clear' + >>> + +- Allow the assignment of a row value for a pyfits table using a tuple or a + list as input. The following example illustrates this new feature: + + >>> c1=pyfits.Column(name='target',format='10A') + >>> c2=pyfits.Column(name='counts',format='J',unit='DN') + >>> c3=pyfits.Column(name='notes',format='A10') + >>> c4=pyfits.Column(name='spectrum',format='5E') + >>> c5=pyfits.Column(name='flag',format='L') + >>> coldefs=pyfits.ColDefs([c1,c2,c3,c4,c5]) + >>> + >>> tbhdu=pyfits.new_table(coldefs, nrows = 5) + >>> + >>> # Assigning data to a table's row using a tuple + >>> tbhdu.data[2] = ('NGC1',312,'A Note', + ... num.array([1.1,2.2,3.3,4.4,5.5],dtype=num.float32), + ... True) + >>> + >>> # Assigning data to a tables row using a list + >>> tbhdu.data[3] = ['JIM1','33','A Note', + ... num.array([1.,2.,3.,4.,5.],dtype=num.float32),True] + +- Allow the creation of a Variable Length Format (P format) column from a list + of data. The following example illustrates this new feature: + + >>> a = [num.array([7.2e-20,7.3e-20]),num.array([0.0]), + ... num.array([0.0])] + >>> acol = pyfits.Column(name='testa',format='PD()',array=a) + >>> acol.array + _VLF([[ 7.20000000e-20 7.30000000e-20], [ 0.], [ 0.]], + dtype=object) + >>> + +- Allow the assignment of multiple rows in a table using the slice syntax. The + following example illustrates this new feature: + + >>> counts = num.array([312,334,308,317]) + >>> names = num.array(['NGC1','NGC2','NGC3','NCG4']) + >>> c1=pyfits.Column(name='target',format='10A',array=names) + >>> c2=pyfits.Column(name='counts',format='J',unit='DN', + ... array=counts) + >>> c3=pyfits.Column(name='notes',format='A10') + >>> c4=pyfits.Column(name='spectrum',format='5E') + >>> c5=pyfits.Column(name='flag',format='L',array=[1,0,1,1]) + >>> coldefs=pyfits.ColDefs([c1,c2,c3,c4,c5]) + >>> + >>> tbhdu1=pyfits.new_table(coldefs) + >>> + >>> counts = num.array([112,134,108,117]) + >>> names = num.array(['NGC5','NGC6','NGC7','NCG8']) + >>> c1=pyfits.Column(name='target',format='10A',array=names) + >>> c2=pyfits.Column(name='counts',format='J',unit='DN', + ... array=counts) + >>> c3=pyfits.Column(name='notes',format='A10') + >>> c4=pyfits.Column(name='spectrum',format='5E') + >>> c5=pyfits.Column(name='flag',format='L',array=[0,1,0,0]) + >>> coldefs=pyfits.ColDefs([c1,c2,c3,c4,c5]) + >>> + >>> tbhdu=pyfits.new_table(coldefs) + >>> tbhdu.data[0][3] = num.array([1.,2.,3.,4.,5.], + ... dtype=num.float32) + >>> + >>> tbhdu2=pyfits.new_table(tbhdu1.data, nrows=9) + >>> + >>> # Assign the 4 rows from the second table to rows 5 thru + ... 8 of the new table. Note that the last row of the new + ... table will still be initialized to the default values. + >>> tbhdu2.data[4:] = tbhdu.data + >>> + >>> print tbhdu2.data + [ ('NGC1', 312, '0.0', array([ 0., 0., 0., 0., 0.], + dtype=float32), True) + ('NGC2', 334, '0.0', array([ 0., 0., 0., 0., 0.], + dtype=float32), False) + ('NGC3', 308, '0.0', array([ 0., 0., 0., 0., 0.], + dtype=float32), True) + ('NCG4', 317, '0.0', array([ 0., 0., 0., 0., 0.], + dtype=float32), True) + ('NGC5', 112, '0.0', array([ 1., 2., 3., 4., 5.], + dtype=float32), False) + ('NGC6', 134, '0.0', array([ 0., 0., 0., 0., 0.], + dtype=float32), True) + ('NGC7', 108, '0.0', array([ 0., 0., 0., 0., 0.], + dtype=float32), False) + ('NCG8', 117, '0.0', array([ 0., 0., 0., 0., 0.], + dtype=float32), False) + ('0.0', 0, '0.0', array([ 0., 0., 0., 0., 0.], + dtype=float32), False)] + >>> + +The following bugs were fixed: + +- Corrected bugs in HDUList.append and HDUList.insert to correctly handle the + situation where you want to insert or append a Primary HDU as something + other than the first HDU in an HDUList and the situation where you want to + insert or append an Extension HDU as the first HDU in an HDUList. + +- Corrected a bug involving scaled images (both compressed and not compressed) + that include a BLANK, or ZBLANK card in the header. When the image values + match the BLANK or ZBLANK value, the value should be replaced with NaN after + scaling. Instead, pyfits was scaling the BLANK or ZBLANK value and + returning it. (CNSHD766129) + +- Corrected a byteswapping bug that occurs when writing certain column data. + (CNSHD763307) + +- Corrected a bug that occurs when creating a column from a chararray when one + or more elements are shorter than the specified format length. The bug + wrote nulls instead of spaces to the file. (CNSHD695419) + +- Corrected a bug in the HDU verification software to ensure that the header + contains no NAXISn cards where n > NAXIS. + +- Corrected a bug involving reading and writing compressed image data. When + written, the header keyword card ZTENSION will always have the value 'IMAGE' + and when read, if the ZTENSION value is not 'IMAGE' the user will receive a + warning, but the data will still be treated as image data. + +- Corrected a bug that restricted the ability to create a custom HDU class and + use it with pyfits. The bug fix will allow something like this: + + >>> import pyfits + >>> class MyPrimaryHDU(pyfits.PrimaryHDU): + ... def __init__(self, data=None, header=None): + ... pyfits.PrimaryHDU.__init__(self, data, header) + ... def _summary(self): + ... """ + ... Reimplement a method of the class. + ... """ + ... s = pyfits.PrimaryHDU._summary(self) + ... # change the behavior to suit me. + ... s1 = 'MyPRIMARY ' + s[11:] + ... return s1 + ... + >>> hdul=pyfits.open("pix.fits", + ... classExtensions={pyfits.PrimaryHDU: MyPrimaryHDU}) + >>> hdul.info() + Filename: pix.fits + No. Name Type Cards Dimensions Format + 0 MyPRIMARY MyPrimaryHDU 59 (512, 512) int16 + >>> + +- Modified ColDefs.add_col so that instead of returning a new ColDefs object + with the column added to the end, it simply appends the new column to the + current ColDefs object in place. (CNSHD768778) + +- Corrected a bug in ColDefs.del_col which raised a KeyError exception when + deleting a column from a ColDefs object. + +- Modified the open convenience function so that when a file is opened in + readonly mode and the file contains no HDU's an IOError is raised. + +- Modified _TableBaseHDU to ensure that all locations where data is referenced + in the object actually reference the same ndarray, instead of copies of the + array. + +- Corrected a bug in the Column class that failed to initialize data when the + data is a boolean array. (CNSHD779136) + +- Corrected a bug that caused an exception to be raised when creating a + variable length format column from character data (PA format). + +- Modified installation code so that when installing on Windows, when a C++ + compiler compatable with the Python binary is not found, the installation + completes with a warning that all optional extension modules failed to + build. Previously, an Error was issued and the installation stopped. + + +2.2.2 (2009-10-12) +-------------------- + +Updates described in this release are only supported in the NUMPY version of +pyfits. + +The following bugs were fixed: + +- Corrected a bug that caused an exception to be raised when creating a + CompImageHDU using an initial header that does not match the image data in + terms of the number of axis. + + +2.2.1 (2009-10-06) +-------------------- + +Updates described in this release are only supported in the NUMPY version of +pyfits. + +The following bugs were fixed: + +- Corrected a bug that prevented the opening of a fits file where a header + contained a CHECKSUM card but no DATASUM card. + +- Corrected a bug that caused NULLs to be written instead of blanks when an + ASCII table was created using a numpy chararray in which the original data + contained trailing blanks. (CNSHD695419) + + +2.2 (2009-09-23) +------------------ + +Updates described in this release are only supported in the NUMPY version of +pyfits. + +The following enhancements were made: + +- Provide support for the FITS Checksum Keyword Convention. (CNSHD754301) + +- Adding the checksum=True keyword argument to the open convenience function + will cause checksums to be verified on file open: + + >>> hdul=pyfits.open('in.fits', checksum=True) + +- On output, CHECKSUM and DATASUM cards may be output to all HDU's in a fits + file by using the keyword argument checksum=True in calls to the writeto + convenience function, the HDUList.writeto method, the writeto methods of all + of the HDU classes, and the append convenience function: + + >>> hdul.writeto('out.fits', checksum=True) + +- Implemented a new insert method to the HDUList class that allows for the + insertion of a HDU into a HDUList at a given index: + + >>> hdul.insert(2,hdu) + +- Provided the capability to handle Unicode input for file names. + +- Provided support for integer division required by Python 3.0. + +The following bugs were fixed: + +- Corrected a bug that caused an index out of bounds exception to be raised + when iterating over the rows of a binary table HDU using the syntax "for + row in tbhdu.data: ". (CNSHD748609) + +- Corrected a bug that prevented the use of the writeto convenience function + for writing table data to a file. (CNSHD749024) + +- Modified the code to raise an IOError exception with the comment "Header + missing END card." when pyfits can't find a valid END card for a header when + opening a file. + + - This change addressed a problem with a non-standard fits file that + contained several new-line characters at the end of each header and at the + end of the file. However, since some people want to be able to open these + non-standard files anyway, an option was added to the open convenience + function to allow these files to be opened without exception: + + >>> pyfits.open('infile.fits',ignore_missing_end=True) + +- Corrected a bug that prevented the use of StringIO objects as fits files + when reading and writing table data. Previously, only image data was + supported. (CNSHD753698) + +- Corrected a bug that caused a bus error to be generated when compressing + image data using GZIP_1 under the Solaris operating system. + +- Corrected bugs that prevented pyfits from properly reading Random Groups + HDU's using numpy. (CNSHD756570) + +- Corrected a bug that can occur when writing a fits file. (CNSHD757508) + + - If no default SIGINT signal handler has not been assigned, before the + write, a TypeError exception is raised in the _File.flush() method when + attempting to return the signal handler to its previous state. Notably + this occurred when using mod_python. The code was changed to use SIG_DFL + when no old handler was defined. + +- Corrected a bug in CompImageHDU that prevented rescaling the image data + using hdu.scale(option='old'). + + +2.1.1 (2009-04-22) +------------------- + +Updates described in this release are only supported in the NUMPY version of +pyfits. + +The following bugs were fixed: + +- Corrected a bug that caused an exception to be raised when closing a file + opened for append, where an HDU was appended to the file, after data was + accessed from the file. This exception was only raised when running on a + Windows platform. + +- Updated the installation scripts, compression source code, and benchmark + test scripts to properly install, build, and execute on a Windows platform. + + +2.1 (2009-04-14) +------------------ + +Updates described in this release are only supported in the NUMPY version of +pyfits. + +The following enhancements were made: + +- Added new tdump and tcreate capabilities to pyfits. + + - The new tdump convenience function allows the contents of a binary table + HDU to be dumped to a set of three files in ASCII format. One file will + contain column definitions, the second will contain header parameters, and + the third will contain header data. + + - The new tcreate convenience function allows the creation of a binary table + HDU from the three files dumped by the tdump convenience function. + + - The primary use for the tdump/tcreate methods are to allow editing in a + standard text editor of the binary table data and parameters. + +- Added support for case sensitive values of the EXTNAME card in an extension + header. (CNSHD745784) + + - By default, pyfits converts the value of EXTNAME cards to upper case when + reading from a file. A new convenience function + (setExtensionNameCaseSensitive) was implemented to allow a user to + circumvent this behavior so that the EXTNAME value remains in the same + case as it is in the file. + + - With the following function call, pyfits will maintain the case of all + characters in the EXTNAME card values of all extension HDU's during the + entire python session, or until another call to the function is made: + + >>> import pyfits + >>> pyfits.setExtensionNameCaseSensitive() + + - The following function call will return pyfits to its default (all upper + case) behavior: + + >>> pyfits.setExtensionNameCaseSensitive(False) + + +- Added support for reading and writing FITS files in which the value of the + first card in the header is 'SIMPLE=F'. In this case, the pyfits open + function returns an HDUList object that contains a single HDU of the new + type _NonstandardHDU. The header for this HDU is like a normal header (with + the exception that the first card contains SIMPLE=F instead of SIMPLE=T). + Like normal HDU's the reading of the data is delayed until actually + requested. The data is read from the file into a string starting from the + first byte after the header END card and continuing till the end of the + file. When written, the header is written, followed by the data string. No + attempt is made to pad the data string so that it fills into a standard 2880 + byte FITS block. (CNSHD744730) + +- Added support for FITS files containing extensions with unknown XTENSION + card values. (CNSHD744730) Standard FITS files support extension HDU's of + types TABLE, IMAGE, BINTABLE, and A3DTABLE. Accessing a nonstandard + extension from a FITS file will now create a _NonstandardExtHDU object. + Accessing the data of this object will cause the data to be read from the + file into a string. If the HDU is written back to a file the string data is + written after the Header and padded to fill a standard 2880 byte FITS block. + +The following bugs were fixed: + +- Extensive changes were made to the tiled image compression code to support + the latest enhancements made in CFITSIO version 3.13 to support this + convention. + +- Eliminated a memory leak in the tiled image compression code. + +- Corrected a bug in the FITS_record.__setitem__ method which raised a + NameError exception when attempting to set a value in a FITS_record object. + (CNSHD745844) + +- Corrected a bug that caused a TypeError exception to be raised when reading + fits files containing large table HDU's (>2Gig). (CNSHD745522) + +- Corrected a bug that caused a TypeError exception to be raised for all calls + to the warnings module when running under Python 2.6. The formatwarning + method in the warnings module was changed in Python 2.6 to include a new + argument. (CNSHD746592) + +- Corrected the behavior of the membership (in) operator in the Header class + to check against header card keywords instead of card values. (CNSHD744730) + +- Corrected the behavior of iteration on a Header object. The new behavior + iterates over the unique card keywords instead of the card values. + + +2.0.1 (2009-02-03) +-------------------- + +Updates described in this release are only supported in the NUMPY version of +pyfits. + +The following bugs were fixed: + +- Eliminated a memory leak when reading Table HDU's from a fits file. + (CNSHD741877) + + +2.0 (2009-01-30) +------------------ + +Updates described in this release are only supported in the NUMPY version of +pyfits. + +The following enhancements were made: + +- Provide initial support for an image compression convention known as the + "Tiled Image Compression Convention" `[1]`_. + + - The principle used in this convention is to first divide the n-dimensional + image into a rectangular grid of subimages or "tiles". Each tile is then + compressed as a continuous block of data, and the resulting compressed + byte stream is stored in a row of a variable length column in a FITS + binary table. Several commonly used algorithms for compressing image + tiles are supported. These include, GZIP, RICE, H-Compress and IRAF pixel + list (PLIO). + + - Support for compressed image data is provided using the optional + "pyfitsComp" module contained in a C shared library (pyfitsCompmodule.so). + + - The header of a compressed image HDU appears to the user like any image + header. The actual header stored in the FITS file is that of a binary + table HDU with a set of special keywords, defined by the convention, to + describe the structure of the compressed image. The conversion between + binary table HDU header and image HDU header is all performed behind the + scenes. Since the HDU is actually a binary table, it may not appear as a + primary HDU in a FITS file. + + - The data of a compressed image HDU appears to the user as standard + uncompressed image data. The actual data is stored in the fits file as + Binary Table data containing at least one column (COMPRESSED_DATA). Each + row of this variable-length column contains the byte stream that was + generated as a result of compressing the corresponding image tile. + Several optional columns may also appear. These include, + UNCOMPRESSED_DATA to hold the uncompressed pixel values for tiles that + cannot be compressed, ZSCALE and ZZERO to hold the linear scale factor and + zero point offset which may be needed to transform the raw uncompressed + values back to the original image pixel values, and ZBLANK to hold the + integer value used to represent undefined pixels (if any) in the image. + + - To create a compressed image HDU from scratch, simply construct a + CompImageHDU object from an uncompressed image data array and its + associated image header. From there, the HDU can be treated just like any + image HDU: + + >>> hdu=pyfits.CompImageHDU(imageData,imageHeader) + >>> hdu.writeto('compressed_image.fits') + + - The signature for the CompImageHDU initializer method describes the + possible options for constructing a CompImageHDU object:: + + def __init__(self, data=None, header=None, name=None, + compressionType='RICE_1', + tileSize=None, + hcompScale=0., + hcompSmooth=0, + quantizeLevel=16.): + """ + data: data of the image + header: header to be associated with the + image + name: the EXTNAME value; if this value + is None, then the name from the + input image header will be used; + if there is no name in the input + image header then the default name + 'COMPRESSED_IMAGE' is used + compressionType: compression algorithm 'RICE_1', + 'PLIO_1', 'GZIP_1', 'HCOMPRESS_1' + tileSize: compression tile sizes default + treats each row of image as a tile + hcompScale: HCOMPRESS scale parameter + hcompSmooth: HCOMPRESS smooth parameter + quantizeLevel: floating point quantization level; + """ + +- Added two new convenience functions. The setval function allows the setting + of the value of a single header card in a fits file. The delval function + allows the deletion of a single header card in a fits file. + +- A modification was made to allow the reading of data from a fits file + containing a Table HDU that has duplicate field names. It is normally a + requirement that the field names in a Table HDU be unique. Prior to this + change a ValueError was raised, when the data was accessed, to indicate that + the HDU contained duplicate field names. Now, a warning is issued and the + field names are made unique in the internal record array. This will not + change the TTYPEn header card values. You will be able to get the data from + all fields using the field name, including the first field containing the + name that is duplicated. To access the data of the other fields with the + duplicated names you will need to use the field number instead of the field + name. (CNSHD737193) + +- An enhancement was made to allow the reading of unsigned integer 16 values + from an ImageHDU when the data is signed integer 16 and BZERO is equal to + 32784 and BSCALE is equal to 1 (the standard way for scaling unsigned + integer 16 data). A new optional keyword argument (uint16) was added to the + open convenience function. Supplying a value of True for this argument will + cause data of this type to be read in and scaled into an unsigned integer 16 + array, instead of a float 32 array. If a HDU associated with a file that + was opened with the uint16 option and containing unsigned integer 16 data is + written to a file, the data will be reverse scaled into an integer 16 array + and written out to the file and the BSCALE/BZERO header cards will be + written with the values 1 and 32768 respectively. (CHSHD736064) Reference + the following example: + + >>> import pyfits + >>> hdul=pyfits.open('o4sp040b0_raw.fits',uint16=1) + >>> hdul[1].data + array([[1507, 1509, 1505, ..., 1498, 1500, 1487], + [1508, 1507, 1509, ..., 1498, 1505, 1490], + [1505, 1507, 1505, ..., 1499, 1504, 1491], + ..., + [1505, 1506, 1507, ..., 1497, 1502, 1487], + [1507, 1507, 1504, ..., 1495, 1499, 1486], + [1515, 1507, 1504, ..., 1492, 1498, 1487]], dtype=uint16) + >>> hdul.writeto('tmp.fits') + >>> hdul1=pyfits.open('tmp.fits',uint16=1) + >>> hdul1[1].data + array([[1507, 1509, 1505, ..., 1498, 1500, 1487], + [1508, 1507, 1509, ..., 1498, 1505, 1490], + [1505, 1507, 1505, ..., 1499, 1504, 1491], + ..., + [1505, 1506, 1507, ..., 1497, 1502, 1487], + [1507, 1507, 1504, ..., 1495, 1499, 1486], + [1515, 1507, 1504, ..., 1492, 1498, 1487]], dtype=uint16) + >>> hdul1=pyfits.open('tmp.fits') + >>> hdul1[1].data + array([[ 1507., 1509., 1505., ..., 1498., 1500., 1487.], + [ 1508., 1507., 1509., ..., 1498., 1505., 1490.], + [ 1505., 1507., 1505., ..., 1499., 1504., 1491.], + ..., + [ 1505., 1506., 1507., ..., 1497., 1502., 1487.], + [ 1507., 1507., 1504., ..., 1495., 1499., 1486.], + [ 1515., 1507., 1504., ..., 1492., 1498., 1487.]], dtype=float32) + +- Enhanced the message generated when a ValueError exception is raised when + attempting to access a header card with an unparsable value. The message + now includes the Card name. + +The following bugs were fixed: + +- Corrected a bug that occurs when appending a binary table HDU to a fits + file. Data was not being byteswapped on little endian machines. + (CNSHD737243) + +- Corrected a bug that occurs when trying to write an ImageHDU that is missing + the required PCOUNT card in the header. An UnboundLocalError exception + complaining that the local variable 'insert_pos' was referenced before + assignment was being raised in the method _ValidHDU.req_cards. The code was + modified so that it would properly issue a more meaningful ValueError + exception with a description of what required card is missing in the header. + +- Eliminated a redundant warning message about the PCOUNT card when validating + an ImageHDU header with a PCOUNT card that is missing or has a value other + than 0. + +.. _[1]: http://fits.gsfc.nasa.gov/registry/tilecompression.html + + +1.4.1 (2008-11-04) +-------------------- + +Updates described in this release are only supported in the NUMPY version of +pyfits. + +The following enhancements were made: + +- Enhanced the way import errors are reported to provide more information. + +The following bugs were fixed: + +- Corrected a bug that occurs when a card value is a string and contains a + colon but is not a record-valued keyword card. + +- Corrected a bug where pyfits fails to properly handle a record-valued + keyword card with values using exponential notation and trailing blanks. + + +1.4 (2008-07-07) +------------------ + +Updates described in this release are only supported in the NUMPY version of +pyfits. + +The following enhancements were made: + +- Added support for file objects and file like objects. + + - All convenience functions and class methods that take a file name will now + also accept a file object or file like object. File like objects + supported are StringIO and GzipFile objects. Other file like objects will + work only if they implement all of the standard file object methods. + + - For the most part, file or file like objects may be either opened or + closed at function call. An opened object must be opened with the proper + mode depending on the function or method called. Whenever possible, if + the object is opened before the method is called, it will remain open + after the call. This will not be possible when writing a HDUList that has + been resized or when writing to a GzipFile object regardless of whether it + is resized. If the object is closed at the time of the function call, + only the name from the object is used, not the object itself. The pyfits + code will extract the file name used by the object and use that to create + an underlying file object on which the function will be performed. + +- Added support for record-valued keyword cards as introduced in the "FITS WCS + Paper IV proposal for representing a more general distortion model". + + - Record-valued keyword cards are string-valued cards where the string is + interpreted as a definition giving a record field name, and its floating + point value. In a FITS header they have the following syntax:: + + keyword= 'field-specifier: float' + + where keyword is a standard eight-character FITS keyword name, float is + the standard FITS ASCII representation of a floating point number, and + these are separated by a colon followed by a single blank. + + The grammar for field-specifier is:: + + field-specifier: + field + field-specifier.field + + field: + identifier + identifier.index + + where identifier is a sequence of letters (upper or lower case), + underscores, and digits of which the first character must not be a digit, + and index is a sequence of digits. No blank characters may occur in the + field-specifier. The index is provided primarily for defining array + elements though it need not be used for that purpose. + + Multiple record-valued keywords of the same name but differing values may + be present in a FITS header. The field-specifier may be viewed as part of + the keyword name. + + Some examples follow:: + + DP1 = 'NAXIS: 2' + DP1 = 'AXIS.1: 1' + DP1 = 'AXIS.2: 2' + DP1 = 'NAUX: 2' + DP1 = 'AUX.1.COEFF.0: 0' + DP1 = 'AUX.1.POWER.0: 1' + DP1 = 'AUX.1.COEFF.1: 0.00048828125' + DP1 = 'AUX.1.POWER.1: 1' + + - As with standard header cards, the value of a record-valued keyword card + can be accessed using either the index of the card in a HDU's header or + via the keyword name. When accessing using the keyword name, the user may + specify just the card keyword or the card keyword followed by a period + followed by the field-specifier. Note that while the card keyword is case + insensitive, the field-specifier is not. Thus, hdu['abc.def'], + hdu['ABC.def'], or hdu['aBc.def'] are all equivalent but hdu['ABC.DEF'] is + not. + + - When accessed using the card index of the HDU's header the value returned + will be the entire string value of the card. For example: + + >>> print hdr[10] + NAXIS: 2 + >>> print hdr[11] + AXIS.1: 1 + + - When accessed using the keyword name exclusive of the field-specifier, the + entire string value of the header card with the lowest index having that + keyword name will be returned. For example: + + >>> print hdr['DP1'] + NAXIS: 2 + + - When accessing using the keyword name and the field-specifier, the value + returned will be the floating point value associated with the + record-valued keyword card. For example: + + >>> print hdr['DP1.NAXIS'] + 2.0 + + - Any attempt to access a non-existent record-valued keyword card value will + cause an exception to be raised (IndexError exception for index access or + KeyError for keyword name access). + + - Updating the value of a record-valued keyword card can also be + accomplished using either index or keyword name. For example: + + >>> print hdr['DP1.NAXIS'] + 2.0 + >>> hdr['DP1.NAXIS'] = 3.0 + >>> print hdr['DP1.NAXIS'] + 3.0 + + - Adding a new record-valued keyword card to an existing header is + accomplished using the Header.update() method just like any other card. + For example: + + >>> hdr.update('DP1', 'AXIS.3: 1', 'a comment', after='DP1.AXIS.2') + + - Deleting a record-valued keyword card from an existing header is + accomplished using the standard list deletion syntax just like any other + card. For example: + + >>> del hdr['DP1.AXIS.1'] + + - In addition to accessing record-valued keyword cards individually using a + card index or keyword name, cards can be accessed in groups using a set of + special pattern matching keys. This access is made available via the + standard list indexing operator providing a keyword name string that + contains one or more of the special pattern matching keys. Instead of + returning a value, a CardList object will be returned containing shared + instances of the Cards in the header that match the given keyword + specification. + + - There are three special pattern matching keys. The first key '*' will + match any string of zero or more characters within the current level of + the field-specifier. The second key '?' will match a single character. + The third key '...' must appear at the end of the keyword name string and + will match all keywords that match the preceding pattern down all levels + of the field-specifier. All combinations of ?, \*, and ... are permitted + (though ... is only permitted at the end). Some examples follow: + + >>> cl=hdr['DP1.AXIS.*'] + >>> print cl + DP1 = 'AXIS.1: 1' + DP1 = 'AXIS.2: 2' + >>> cl=hdr['DP1.*'] + >>> print cl + DP1 = 'NAXIS: 2' + DP1 = 'NAUX: 2' + >>> cl=hdr['DP1.AUX...'] + >>> print cl + DP1 = 'AUX.1.COEFF.0: 0' + DP1 = 'AUX.1.POWER.0: 1' + DP1 = 'AUX.1.COEFF.1: 0.00048828125' + DP1 = 'AUX.1.POWER.1: 1' + >>> cl=hdr['DP?.NAXIS'] + >>> print cl + DP1 = 'NAXIS: 2' + DP2 = 'NAXIS: 2' + DP3 = 'NAXIS: 2' + >>> cl=hdr['DP1.A*S.*'] + >>> print cl + DP1 = 'AXIS.1: 1' + DP1 = 'AXIS.2: 2' + + - The use of the special pattern matching keys for adding or updating header + cards in an existing header is not allowed. However, the deletion of + cards from the header using the special keys is allowed. For example: + + >>> del hdr['DP3.A*...'] + +- As noted above, accessing pyfits Header object using the special pattern + matching keys will return a CardList object. This CardList object can + itself be searched in order to further refine the list of Cards. For + example: + + >>> cl=hdr['DP1...'] + >>> print cl + DP1 = 'NAXIS: 2' + DP1 = 'AXIS.1: 1' + DP1 = 'AXIS.2: 2' + DP1 = 'NAUX: 2' + DP1 = 'AUX.1.COEFF.1: 0.000488' + DP1 = 'AUX.2.COEFF.2: 0.00097656' + >>> cl1=cl['*.*AUX...'] + >>> print cl1 + DP1 = 'NAUX: 2' + DP1 = 'AUX.1.COEFF.1: 0.000488' + DP1 = 'AUX.2.COEFF.2: 0.00097656' + + - The CardList keys() method will allow the retrieval of all of the key + values in the CardList. For example: + + >>> cl=hdr['DP1.AXIS.*'] + >>> print cl + DP1 = 'AXIS.1: 1' + DP1 = 'AXIS.2: 2' + >>> cl.keys() + ['DP1.AXIS.1', 'DP1.AXIS.2'] + + - The CardList values() method will allow the retrieval of all of the values + in the CardList. For example: + + >>> cl=hdr['DP1.AXIS.*'] + >>> print cl + DP1 = 'AXIS.1: 1' + DP1 = 'AXIS.2: 2' + >>> cl.values() + [1.0, 2.0] + + - Individual cards can be retrieved from the list using standard list + indexing. For example: + + >>> cl=hdr['DP1.AXIS.*'] + >>> c=cl[0] + >>> print c + DP1 = 'AXIS.1: 1' + >>> c=cl['DP1.AXIS.2'] + >>> print c + DP1 = 'AXIS.2: 2' + + - Individual card values can be retrieved from the list using the value + attribute of the card. For example: + + >>> cl=hdr['DP1.AXIS.*'] + >>> cl[0].value + 1.0 + + - The cards in the CardList are shared instances of the cards in the source + header. Therefore, modifying a card in the CardList also modifies it in + the source header. However, making an addition or a deletion to the + CardList will not affect the source header. For example: + + >>> hdr['DP1.AXIS.1'] + 1.0 + >>> cl=hdr['DP1.AXIS.*'] + >>> cl[0].value = 4.0 + >>> hdr['DP1.AXIS.1'] + 4.0 + >>> del cl[0] + >>> print cl['DP1.AXIS.1'] + Traceback (most recent call last): + File "", line 1, in + File "NP_pyfits.py", line 977, in __getitem__ + return self.ascard[key].value + File "NP_pyfits.py", line 1258, in __getitem__ + _key = self.index_of(key) + File "NP_pyfits.py", line 1403, in index_of + raise KeyError, 'Keyword %s not found.' % `key` + KeyError: "Keyword 'DP1.AXIS.1' not found." + >>> hdr['DP1.AXIS.1'] + 4.0 + + - A FITS header consists of card images. In pyfits each card image is + manifested by a Card object. A pyfits Header object contains a list of + Card objects in the form of a CardList object. A record-valued keyword + card image is represented in pyfits by a RecordValuedKeywordCard object. + This object inherits from a Card object and has all of the methods and + attributes of a Card object. + + - A new RecordValuedKeywordCard object is created with the + RecordValuedKeywordCard constructor: RecordValuedKeywordCard(key, value, + comment). The key and value arguments may be specified in two ways. The + key value may be given as the 8 character keyword only, in which case the + value must be a character string containing the field-specifier, a colon + followed by a space, followed by the actual value. The second option is + to provide the key as a string containing the keyword and field-specifier, + in which case the value must be the actual floating point value. For + example: + + >>> c1 = pyfits.RecordValuedKeywordCard('DP1', 'NAXIS: 2', 'Number of variables') + >>> c2 = pyfits.RecordValuedKeywordCard('DP1.AXIS.1', 1.0, 'Axis number') + + - RecordValuedKeywordCards have attributes .key, .field_specifier, .value, + and .comment. Both .value and .comment can be changed but not .key or + .field_specifier. The constructor will extract the field-specifier from + the input key or value, whichever is appropriate. The .key attribute is + the 8 character keyword. + + - Just like standard Cards, a RecordValuedKeywordCard may be constructed + from a string using the fromstring() method or verified using the verify() + method. For example: + + >>> c1 = pyfits.RecordValuedKeywordCard().fromstring( + "DP1 = 'NAXIS: 2' / Number of independent variables") + >>> c2 = pyfits.RecordValuedKeywordCard().fromstring( + "DP1 = 'AXIS.1: X' / Axis number") + >>> print c1; print c2 + DP1 = 'NAXIS: 2' / Number of independent variables + DP1 = 'AXIS.1: X' / Axis number + >>> c2.verify() + Output verification result: + Card image is not FITS standard (unparsable value string). + + - A standard card that meets the criteria of a RecordValuedKeywordCard may + be turned into a RecordValuedKeywordCard using the class method coerce. + If the card object does not meet the required criteria then the original + card object is just returned. + + >>> c1 = pyfits.Card('DP1','AUX: 1','comment') + >>> c2 = pyfits.RecordValuedKeywordCard.coerce(c1) + >>> print type(c2) + <'pyfits.NP_pyfits.RecordValuedKeywordCard'> + + - Two other card creation methods are also available as + RecordVauedKeywordCard class methods. These are createCard() which will + create the appropriate card object (Card or RecordValuedKeywordCard) given + input key, value, and comment, and createCardFromString which will create + the appropriate card object given an input string. These two methods are + also available as convenience functions: + + >>> c1 = pyfits.RecordValuedKeywordCard.createCard('DP1','AUX: 1','comment) + + or + + >>> c1 = pyfits.createCard('DP1','AUX: 1','comment) + >>> print type(c1) + <'pyfits.NP_pyfits.RecordValuedKeywordCard'> + + >>> c1 = pyfits.RecordValuedKeywordCard.createCard('DP1','AUX 1','comment) + + or + + >>> c1 = pyfits.createCard('DP1','AUX 1','comment) + >>> print type(c1) + <'pyfits.NP_pyfits.Card'> + + >>> c1 = pyfits.RecordValuedKeywordCard.createCardFromString \ + ("DP1 = 'AUX: 1.0' / comment") + + or + + >>> c1 = pyfits.createCardFromString("DP1 = 'AUX: 1.0' / comment") + >>> print type(c1) + <'pyfits.NP_pyfits.RecordValuedKeywordCard'> + +The following bugs were fixed: + +- Corrected a bug that occurs when writing a HDU out to a file. During the + write, any Keyboard Interrupts are trapped so that the write completes + before the interrupt is handled. Unfortunately, the Keyboard Interrupt was + not properly reinstated after the write completed. This was fixed. + (CNSHD711138) + +- Corrected a bug when using ipython, where temporary files created with the + tempFile.NamedTemporaryFile method are not automatically removed. This can + happen for instance when opening a Gzipped fits file or when open a fits + file over the internet. The files will now be removed. (CNSHD718307) + +- Corrected a bug in the append convenience function's call to the writeto + convenience function. The classExtensions argument must be passed as a + keyword argument. + +- Corrected a bug that occurs when retrieving variable length character arrays + from binary table HDUs (PA() format) and using slicing to obtain rows of + data containing variable length arrays. The code issued a TypeError + exception. The data can now be accessed with no exceptions. (CNSHD718749) + +- Corrected a bug that occurs when retrieving data from a fits file opened in + memory map mode when the file contains multiple image extensions or ASCII + table or binary table HDUs. The code issued a TypeError exception. The + data can now be accessed with no exceptions. (CNSHD707426) + +- Corrected a bug that occurs when attempting to get a subset of data from a + Binary Table HDU and then use the data to create a new Binary Table HDU + object. A TypeError exception was raised. The data can now be subsetted + and used to create a new HDU. (CNSHD723761) + +- Corrected a bug that occurs when attempting to scale an Image HDU back to + its original data type using the _ImageBaseHDU.scale method. The code was + not resetting the BITPIX header card back to the original data type. This + has been corrected. + +- Changed the code to issue a KeyError exception instead of a NameError + exception when accessing a non-existent field in a table. + + +1.3 (2008-02-22) +------------------ + +Updates described in this release are only supported in the NUMPY version of +pyfits. + +The following enhancements were made: + +- Provided support for a new extension to pyfits called *stpyfits*. + + - The *stpyfits* module is a wrapper around pyfits. It provides all of the + features and functions of pyfits along with some STScI specific features. + Currently, the only new feature supported by stpyfits is the ability to + read and write fits files that contain image data quality extensions with + constant data value arrays. See stpyfits `[2]`_ for more details on + stpyfits. + +- Added a new feature to allow trailing HDUs to be deleted from a fits file + without actually reading the data from the file. + + - This supports a JWST requirement to delete a trailing HDU from a file + whose primary Image HDU is too large to be read on a 32 bit machine. + +- Updated pyfits to use the warnings module to issue warnings. All warnings + will still be issued to stdout, exactly as they were before, however, you + may now suppress warnings with the -Wignore command line option. For + example, to run a script that will ignore warnings use the following command + line syntax: + + python -Wignore yourscript.py + +- Updated the open convenience function to allow the input of an already + opened file object in place of a file name when opening a fits file. + +- Updated the writeto convenience function to allow it to accept the + output_verify option. + + - In this way, the user can use the argument output_verify='fix' to allow + pyfits to correct any errors it encounters in the provided header before + writing the data to the file. + +- Updated the verification code to provide additional detail with a + VerifyError exception. + +- Added the capability to create a binary table HDU directly from a + numpy.ndarray. This may be done using either the new_table convenience + function or the BinTableHDU constructor. + + +The following performance improvements were made: + +- Modified the import logic to dramatically decrease the time it takes to + import pyfits. + +- Modified the code to provide performance improvements when copying and + examining header cards. + +The following bugs were fixed: + +- Corrected a bug that occurs when reading the data from a fits file that + includes BZERO/BSCALE scaling. When the data is read in from the file, + pyfits automatically scales the data using the BZERO/BSCALE values in the + header. In the previous release, pyfits created a 32 bit floating point + array to hold the scaled data. This could cause a problem when the value of + BZERO is so large that the scaled value will not fit into the float 32. For + this release, when the input data is 32 bit integer, a 64 bit floating point + array is used for the scaled data. + +- Corrected a bug that caused an exception to be raised when attempting to + scale image data using the ImageHDU.scale method. + +- Corrected a bug in the new_table convenience function that occurred when a + binary table was created using a ColDefs object as input and supplying an + nrows argument for a number of rows that is greater than the number of rows + present in the input ColDefs object. The previous version of pyfits failed + to allocate the necessary memory for the additional rows. + +- Corrected a bug in the new_table convenience function that caused an + exception to be thrown when creating an ASCII table. + +- Corrected a bug in the new_table convenience function that will allow the + input of a ColDefs object that was read from a file as a binary table with a + data value equal to None. + +- Corrected a bug in the construction of ASCII tables from Column objects that + are created with noncontinuous start columns. + +- Corrected bugs in a number of areas that would sometimes cause a failure to + improperly raise an exception when an error occurred. + +- Corrected a bug where attempting to open a non-existent fits file on a + windows platform using a drive letter in the file specification caused a + misleading IOError exception to be raised. + +.. _[2]: http://stsdas.stsci.edu/stsci_python_sphinxdocs_2.13/tools/stpyfits.html + + +1.1 (2007-06-15) +------------------ + +- Modified to use either NUMPY or NUMARRAY. + +- New file writing modes have been provided to allow streaming data to + extensions without requiring the whole output extension image in memory. See + documentation on StreamingHDU. + +- Improvements to minimize byteswapping and memory usage by byteswapping in + place. + +- Now supports ':' characters in filenames. + +- Handles keyboard interrupts during long operations. + +- Preserves the byte order of the input image arrays. + + +1.0.1 (2006-03-24) +-------------------- + +The changes to PyFITS were primarily to improve the docstrings and to +reclassify some public functions and variables as private. Readgeis and +fitsdiff which were distributed with PyFITS in previous releases were moved to +pytools. This release of PyFITS is v1.0.1. The next release of PyFITS will +support both numarray and numpy (and will be available separately from +stsci_python, as are all the python packages contained within stsci_python). +An alpha release for PyFITS numpy support will be made around the time of this +stsci_python release. + +- Updated docstrings for public functions. + +- Made some previously public functions private. + + +1.0 (2005-11-01) +------------------ + +Major Changes since v0.9.6: + +- Added support for the HIERARCH convention + +- Added support for iteration and slicing for HDU lists + +- PyFITS now uses the standard setup.py installation script + +- Add utility functions at the module level, they include: + + - getheader + - getdata + - getval + - writeto + - append + - update + - info + +Minor changes since v0.9.6: + +- Fix a bug to make single-column ASCII table work. + +- Fix a bug so a new table constructed from an existing table with X-formatted + columns will work. + +- Fix a problem in verifying HDUList right after the open statement. + +- Verify that elements in an HDUList, besides the first one, are ExtensionHDU. + +- Add output verification in methods flush() and close(). + +- Modify the the design of the open() function to remove the output_verify + argument. + +- Remove the groups argument in GroupsHDU's contructor. + +- Redesign the column definition class to make its column components more + accessible. Also to make it conducive for higher level functionalities, + e.g. combining two column definitions. + +- Replace the Boolean class with the Python Boolean type. The old TRUE/FALSE + will still work. + +- Convert classes to the new style. + +- Better format when printing card or card list. + +- Add the optional argument clobber to all writeto() functions and methods. + +- If adding a blank card, will not use existing blank card's space. + +PyFITS Version 1.0 REQUIRES Python 2.3 or later. + + +0.9.6 (2004-11-11) +-------------------- + +Major changes since v0.9.3: + +- Support for variable length array tables. + +- Support for writing ASCII table extensions. + +- Support for random groups, both reading and writing. + +Some minor changes: + +- Support for numbers with leading zeros in an ASCII table extension. + +- Changed scaled columns' data type from Float32 to Float64 to preserve + precision. + +- Made Column constructor more flexible in accepting format specification. + + +0.9.3 (2004-07-02) +-------------------- + +Changes since v0.9.0: + +- Lazy instanciation of full Headers/Cards for all HDU's when the file is + opened. At the open, only extracts vital info (e.g. NAXIS's) from the + header parts. This change will speed up the performance if the user only + needs to access one extension in a multi-extension FITS file. + +- Support the X format (bit flags) columns, both reading and writing, in a + binary table. At the user interface, they are converted to Boolean arrays + for easy manipulation. For example, if the column's TFORM is "11X", + internally the data is stored in 2 bytes, but the user will see, at each row + of this column, a Boolean array of 11 elements. + +- Fix a bug such that when a table extension has no data, it will not try to + scale the data when updating/writing the HDU list. + + +0.9 (2004-04-27) +------------------ + +Changes since v0.8.0: + +- Rewriting of the Card class to separate the parsing and verification of + header cards + +- Restructure the keyword indexing scheme which speed up certain applications + (update large number of new keywords and reading a header with larger + numbers of cards) by a factor of 30 or more + +- Change the default to be lenient FITS standard checking on input and strict + FITS standard checking on output + +- Support CONTINUE cards, both reading and writing + +- Verification can now be performed at any of the HDUList, HDU, and Card + levels + +- Support (contiguous) subsection (attribute .section) of images to reduce + memory usage for large images + + +0.8.0 (2003-08-19) +-------------------- + +**NOTE:** This version will only work with numarray Version 0.6. In addition, +earlier versions of PyFITS will not work with numarray 0.6. Therefore, both +must be updated simultaneously. + +Changes since 0.7.6: + +- Compatible with numarray 0.6/records 2.0 + +- For binary tables, now it is possible to update the original array if a + scaled field is updated. + +- Support of complex columns + +- Modify the __getitem__ method in FITS_rec. In order to make sure the scaled + quantities are also viewing the same data as the original FITS_rec, all + fields need to be "touched" when __getitem__ is called. + +- Add a new attribute mmobject for HDUList, and close the memmap object when + close HDUList object. Earlier version does not close memmap object and can + cause memory lockup. + +- Enable 'update' as a legitimate memmap mode. + +- Do not print message when closing an HDUList object which is not created + from reading a FITS file. Such message is confusing. + +- remove the internal attribute "closed" and related method (__getattr__ in + HDUList). It is redundant. + + +0.7.6 (2002-11-22) + +**NOTE:** This version will only work with numarray Version 0.4. + +Changes since 0.7.5: + +- Change x*=n to numarray.multiply(x, n, x) where n is a floating number, in + order to make pyfits to work under Python 2.2. (2 occurrences) + +- Modify the "update" method in the Header class to use the "fixed-format" + card even if the card already exists. This is to avoid the mis-alignment as + shown below: + + After running drizzle on ACS images it creates a CD matrix whose elements + have very many digits, *e.g.*: + + CD1_1 = 1.1187596304411E-05 / partial of first axis coordinate w.r.t. x + CD1_2 = -8.502767249350019E-06 / partial of first axis coordinate w.r.t. y + + with pyfits, an "update" on these header items and write in new values which + has fewer digits, *e.g.*: + + CD1_1 = 1.0963011E-05 / partial of first axis coordinate w.r.t. x + CD1_2 = -8.527229E-06 / partial of first axis coordinate w.r.t. y + +- Change some internal variables to make their appearance more consistent: + + old name new name + + __octalRegex _octalRegex + __readblock() _readblock() + __formatter() _formatter(). + __value_RE _value_RE + __numr _numr + __comment_RE _comment_RE + __keywd_RE _keywd_RE + __number_RE _number_RE. + tmpName() _tmpName() + dimShape _dimShape + ErrList _ErrList + +- Move up the module description. Move the copyright statement to the bottom + and assign to the variable __credits__. + +- change the following line: + + self.__dict__ = input.__dict__ + + to + + self.__setstate__(input.__getstate__()) + + in order for pyfits to run under numarray 0.4. + +- edit _readblock to add the (optional) firstblock argument and raise IOError + if the the first 8 characters in the first block is not 'SIMPLE ' or + 'XTENSION'. Edit the function open to check for IOError to skip the last + null filled block(s). Edit readHDU to add the firstblock argument. + + +0.7.5 (2002-08-16) +-------------------- + +Changes since v0.7.3: + +- Memory mapping now works for readonly mode, both for images and binary + tables. + + Usage: pyfits.open('filename', memmap=1) + +- Edit the field method in FITS_rec class to make the column scaling for + numbers use less temporary memory. (does not work under 2.2, due to Python + "bug" of array \*=) + +- Delete bscale/bzero in the ImageBaseHDU constructor. + +- Update bitpix in BaseImageHDU.__getattr__ after deleting bscale/bzero. (bug + fix) + +- In BaseImageHDU.__getattr__ point self.data to raw_data if float and if not + memmap. (bug fix). + +- Change the function get_tbdata() to private: _get_tbdata(). + + +0.7.3 (2002-07-12) +-------------------- + +Changes since v0.7.2: + +- It will scale all integer image data to Float32, if BSCALE/BZERO != 1/0. It + will also expunge the BSCALE/BZERO keywords. + +- Add the scale() method for ImageBaseHDU, so data can be scaled just before + being written to the file. It has the following arguments: + + type: destination data type (string), e.g. Int32, Float32, UInt8, etc. + + option: scaling scheme. if 'old', use the old BSCALE/BZERO values. if + 'minmax', use the data range to fit into the full range of specified integer + type. Float destination data type will not be scaled for this option. + + bscale/bzero: user specifiable BSCALE/BZERO values. They overwrite the + "option". + +- Deal with data area resizing in 'update' mode. + +- Make the data scaling (both input and output) faster and use less memory. + +- Bug fix to make column name change takes effect for field. + +- Bug fix to avoid exception if the key is not present in the header already. + This affects (fixes) add_history(), add_comment(), and add_blank(). + +- Bug fix in __getattr__() in Card class. The change made in 0.7.2 to rstrip + the comment must be string type to avoid exception. + +0.7.2.1 (2002-06-25) +---------------------- + +A couple of bugs were addressed in this version. + +- Fix a bug in _add_commentary(). Due to a change in index_of() during version + 0.6.5.5, _add_commentary needs to be modified to avoid exception if the key + is not present in the header already. This affects (fixes) add_history(), + add_comment(), and add_blank(). + +- Fix a bug in __getattr__() in Card class. The change made in 0.7.2 to rstrip + the comment must be string type to avoid exception. + + +0.7.2 (2002-06-19) +-------------------- + +The two major improvements from Version 0.6.2 are: + +- support reading tables with "scaled" columns (e.g. tscal/tzero, Boolean, + and ASCII tables) + +- a prototype output verification. + +This version of PyFITS requires numarray version 0.3.4. + +Other changes include: + +- Implement the new HDU hierarchy proposed earlier this year. This in turn + reduces some of the redundant methods common to several HDU classes. + +- Add 3 new methods to the Header class: add_history, add_comment, and + add_blank. + +- The table attributes _columns are now .columns and the attributes in ColDefs + are now all without the underscores. So, a user can get a list of column + names by: hdu.columns.names. + +- The "fill" argument in the new_table method now has a new meaning:
If + set to true (=1), it will fill the entire new table with zeros/blanks. + Otherwise (=0), just the extra rows/cells are filled with zeros/blanks. + Fill values other than zero/blank are now not possible. + +- Add the argument output_verify to the open method and writeto method. Not + in the flush or close methods yet, due to possible complication. + +- A new copy method for tables, the copy is totally independent from the table + it copies from. + +- The tostring() call in writeHDUdata takes up extra space to store the string + object. Use tofile() instead, to save space. + +- Make changes from _byteswap to _byteorder, following corresponding changes + in numarray and recarray. + +- Insert(update) EXTEND in PrimaryHDU only when header is None. + +- Strip the trailing blanks for the comment value of a card. + +- Add seek(0) right after the __buildin__.open(0), because for the 'ab+' mode, + the pointer is at the end after open in Linux, but it is at the beginning in + Solaris. + +- Add checking of data against header, update header keywords (NAXIS's, + BITPIX) when they don't agree with the data. + +- change version to __version__. + +There are also many other minor internal bug fixes and +technical changes. + + +0.6.2 (2002-02-12) +-------------------- + +This version requires numarray version 0.2. + +Things not yet supported but are part of future development: + +- Verification and/or correction of FITS objects being written to disk so that + they are legal FITS. This is being added now and should be available in + about a month. Currently, one may construct FITS headers that are + inconsistent with the data and write such FITS objects to disk. Future + versions will provide options to either a) correct discrepancies and warn, + b) correct discrepancies silently, c) throw a Python exception, or d) write + illegal FITS (for test purposes!). + +- Support for ascii tables or random groups format. Support for ASCII tables + will be done soon (~1 month). When random group support is added is + uncertain. + +- Support for memory mapping FITS data (to reduce memory demands). We expect + to provide this capability in about 3 months. + +- Support for columns in binary tables having scaled values (e.g. BSCALE or + BZERO) or boolean values. Currently booleans are stored as Int8 arrays and + users must explicitly convert them into a boolean array. Likewise, scaled + columns must be copied with scaling and offset by testing for those + attributes explicitly. Future versions will produce such copies + automatically. + +- Support for tables with TNULL values. This awaits an enhancement to numarray + to support mask arrays (planned). (At least a couple of months off). + +.. _PyFITS: http://www.stsci.edu/resources/software_hardware/pyfits diff --git a/docs/io/fits/images/Blue.jpg b/docs/io/fits/images/Blue.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ac9fa4c1060921cd563e20451cd9a94ce9212c9b GIT binary patch literal 35560 zcmbq)Wm6nX)Ab@ja^VtO!V+A9yCt~WLU4jha9G?WKyY_wfyIJ51a}s?6b>AkYBb`T$e_00J`7|F-;pP5=O8q<1K&2#9FtfcO9X zTks7BfPjRE^bYwQ+B-BMFhF~IXjkUaN% zWJG0`ZkFA(hY@JynlgvmZ2>kxZv%zqYKy!9F7G|eTyipLH=}2^nFYL|qxV!-aNn!H zqHln$#*;%AxoSVxi!&Rp(c^8=(@fJ2p$1G#G0VN)9`M8-xV58XRlxTl7d)(4J-O{5 zUgAYBhIw}4_VNNwhMy8^Wl{MxcJ?mteY@D%94r2bNAHRGqtTXGb!$(Hy~3Zbf9hmm z|9j?|%f1M2WQDc4qQtyu#r?w`z%q>n!>T~u8iYJ8agp|Hk+{nRWBvWhD@VOp1}MII z+FcZ!A543MLO(z~0Y9Z!dnhpyTj9BNy*sJB0crHTcIRb_b%;^JCm}Rq#Uj5&8jWs5?XTPXm8U-yEG| zeti-mV#RwZ4;a`>d~^Ww>)cXLtACfw{gbI^tG6X2m)?+?hV1HYSa=LVdB#8 z>6k;Fy+d{MnW;O?%iQlAS>`zdbFyn|LhA_kV}8C*7=>Vsw*A4Ay`+Qt2kWiIG8)Y_ zXwLR&NptEGtxe*+8+?07LrKP~u4|#~Ow9IPMsd&7rZHtmbu)Cjj7LtDCXM`iMG*B^ za6F7ttaUNlb^aEre2hzS$T^9R=M9P9Y85H6NvzCR%i~_yZ>Prdx<|irz%=F2(3xEG z)eNpgEW;(i7j*E!ATD7L!b)gcVR&+6q?U-PEf+&WVt0t?@vVA3c$g`vtL`$H?8wyU@(qijx0IIOIk1HO0R{(6`J@%y_KGQ!h+f{FXI9l~?_^vyQ2gaQr{hH4RYa5pNX8+@bKF=hfHXC_x{h2&A)TV4hAWYC+zv{rR=5km z+|`Gx>@ay~Q+de^pUUf5TakZQABe2s6{3c_x3TxM%Sa^>t)WnF_)m8KbJD>G zINN>JZz%K*w{j5MiN~)&GcfsP#we_10?t<~%7fq_9RZ^5@Gsu*eY}qjbKFb-5#&y1 zSkrp!v?)G4N<+Mol3cAI9gDbfd5Oiw2S{JF!;eyA3fhbH=1xQ20H(bkk1w9n;5%ij zrFZ_)(tA(qxvY^EC!;7#s?hIWbv+%+?5RPJW9HzvCr17t|%-`lPnc8 z=SQNlIq3(2J2eFGe!1rj)sbLs^n;E^pO>|Yqj=ivn6g-Q<(+>4G}~1);}$%kF_r*o z)Xwp!H$|dxh`-R!@2#zcPg4(kaakFP&Ma@NSc8_ShgXOEbQzqF78`|(Cj)!!dRJcP z{en@bb1l!81Q%QjJyytN<4Z5I0!OQAI&!YfYp~!QiUKAVJZd1MUY*u-N3HT}5_1%; z8y68e-ltSM6|aoaa({@?jbbc2d3%#u2U~JT+Z&*$QnZW-%Li#5hgMSJf;vEF_Df`< z6l0-t3{#R;hG`@edVlBT{ag&ctU^7u=0)HtI|jro-M|@9-%9cO9U2$r>~_a zK6AUS>6S7Houi~@#pLKDaF3uA4UFHgh}csYf&Eq<{q}_j=7U$qe7{}1oL1b?EZIL@+H1&_ndTs*JoWfspV zMFpOW_mV7YH{>wS-LFOMZXF+I(LfHtz`3Hq=%ir>*dN6&pm#z0Ha#QqEEPK zcbzFn`B73bI}W=c+=8}kMQ0Qu>3;+W{jGc>p2CJ2t5P*1O+`$FCp@pCc3lLJ=y>4C zkOX7xjfV{ICju$ew16UF^2fC7oHkBiNo%h#1KF&LxYnyx-6pP8tDO0bcevE%P z+vHq~P9dCY^(8*&;<|_3W23?4HIyb-VO}nih`^_rBM-`S3Ak6YvTaq--cOO9A}3h^ zzuBmlS|478D0k8Iy`nSk66G9kljNrI>h`Ro>K>srN>LGB`mp(o(z98ywdVm(nXH^P z)+=2%zD@OVn~KyCiC`|| z?$d~jxV`}xMR+;eyrT1v09MYy zfCNb^o;Wj-i}<=i1(Q^|>xjv58M!o|{#6i)4rzS^jlz{z3|_uGlq|V&1rZjs#d(48PGew7OJC5K_6bZP-`ChOc1W8x;vRrn3fV65GCtB4CS)s1@ zb~iG4KMmG6BNG}D*n*KhRF_IA-e3xvw= zZG9Q5(e1Gjd^RfH0A+*gjg?P@PkUW^tFW^3c`o?r8JF@2lK;u|Vae&eAJZw%Md9fy z8Lql((UMJyTQk($kdXV+08@E7>R^7>P|1ohx0>G(undXGYeJkPh|YD5x^EvWwEH-( zG*a)_&x$W6|H8z`@*aBbi^%NzX^uWE+XXMlDoNC)J8hEGk^7!y@oOcFln-_sI zwpE=%__!ACb{=4Ms;@~|4<6PlvEam=rvBy|==%Buiq28o=>1VGO@^+*UoI$@2J58I zmB5&JYSN#>FI50SB>_u0TFU_%K5P73RWoezh9&9x84m5TuA{vtQL0>vWwuFkkThV3 zLRLhA1c`$Nc|ymH{@0;~62+C6?HeHKS$d>QPW4Q0D`GWze}*A?i;{!H$9lBfd=FJ81Bl)wqZD*sJ>s2}OXA|pG{teKd<>wL| z2}wGT4mngZcocbNAG|tU9QpQh?5SQp=?!3QEQ~FoItuv`Uv{`Z9_nzE|GH}-j{82C zBpXA<1Cx_>nB<2N|M!Gh`vR-=e0Gh&>-F;ApAvO7HDbSX{tWJt!tZ|)&`u3G-K&BT zv`7&$$g!~!#xNyA@+L_F%LM+s*rAr!ofM0jtV%IXwMf}G?oSyhh>E$Nk)Hhcy9;hi zrE6$6;b5qw7m6;`F-zV}7B5)!8N*q4w2($Qju<+ASSIOFtoY!{3fe4oLf>tgsw_}S zRJQOHU>+g?V5b9IcbMxw43{>5R2hbq?!ymo&?0V1y!mGs;L>B1MZ+JpPB(Jp5vZ}P zqB$3q`JBlxx>&+1Y>kl54n1_t+p$a{=oK4sG6AbLaxWj_dG-*01NAQ}Y&j5>znT*m zNMnXLnS~T6OS>M;Vw(L~kXx!MGSGy%Gg%{g$&wAeiUs8Q!8J!9N*Z+N=oVhqh>F{c zz_JS(W%C-dpy9kq0@?<2mW*N|z-^3sy0H6zbgA?gBeNTf}r;N6gV&5QQayZfbW?6BoL zUtQ@yjyC}N1q^1gRNnna=**1UA}NC=X?;S92o#rgeg^_6>A_#S2Cs0dS8LE4S8XII zt~iy0EI(u5A0~*zIWL&1+9{u?OOfEGgvgx(FxN5K2L|wz+tBy4{a3Jh3mR|q`Vid; z=+NR2P>Vd=M*Di6g8N?9d%yF8{B_;{Xs<$ewLBV91SAB}%!i!wUenG3xSzS;$|6Gb z?pUw>wxdsi_tzH_yg(1%oXvDaD)z3k!0NCb^_}^LRpnd<{}LmOiazgWuwN}r$CM_X zl$lz-r+;W?mL|1yTU1f0hDjlq_IT4xND?}i7lg)Yde?kQ7C01 zZ; zeRCk8S$94$QKYMqu&>jhnsaTcZ-5Z>1rLV7na>fnZM$=s@2@^JZdVeG|llqVoP@jmcH<{&bqdk-WJQaAI8ZBEIzG)G( zv6H7>FhKt_SnYnje&+ozGSZL+2MswjPHc~2>H$;!G22O86l|rU@{QZEC1=+QUmC_Z zzHpTgS%CzvAUadFj30ZJmQw8^jq{U&dLaTPn%|O=OR+@s(We#r(mrE~@cML=OKHuL zmN-aY5fy>H3!7`ndnMv4%HddGeJygEG5k~nl4tbU`$DDDLzz-G%}KtXAzCRA``HrCl{3$V)03Lndfg?_McTTHzhUMPETQaDL4HN!@>#|e z{PmK%pZF36Z`fRj6{0oX`z?%-Jbz=|ZWPyOw642e$JA_8cdN~2v?s&=ec_Q}OFB4v z53<&ysKT@L1E`V0yt1gnr`6p~%U1rO+zC&Qdus0ua9;HqTcrw!AkzeABH5nIcT!(R z;z%ZzBQQQc!p%3?`<32<)}TpaObOPMOb@uN7^H=AT&iQzt7Vt= zee*s@zX_E2IY2y2iDIOn-jb;)_xdws<#4 zEyH))5_V8y$ZVm(S;IskOWq^yHi=J(;>JNcNo1x^Tbooaht+IG)AZk5qCjwpINInP z5{{dc<{2cf+AmhN7}b07AHbdg&zATSj#H11)_L~v<2iC5X6RAfqGlKwdh`q-2ZhsW z9qTh}n1+GausU;x{SF(uz!SR2>0lke7a%M3gt}W)AW$52f_V0qU0*<@M0!jM$-;na z?zgc-$k58v#vF(Vb9`X1W#+^(cTfeZ4H0s}oZ&K~a%#Sjl1X!*WnvL6Xd{U+)-OOj ziyf<#0o{Jl95_JgOQ++EUKuz`(Xj!~c|osr{F?rXic*0~jHlEyrg@msn9B|rmE|~P z0sIMG58+X@)W`BqEm11{9vO|^@Z&$^(@+V zJ$>;XWr&}5o!!~{T^Vy;WqSN9w7yki?L4EXVjyDQdghzk3RVHP7G!mFitU^(ITWie z2x4RTx7s}O<;jHI<>>KVT%At(v@N~N9^3q<>?%wBm@X?wUc#j8%f&@K_5<2rA(9P? zh~3~igDW*E+(O({1|(I^j_R^2H*at7plel|cV7qSbKXdza5ZS;h^Kj@@ZCUU_9GcU^-XIl)Uwsd)>X45iBedU$qL_1m%8> zWPp{#7`QHVSg*YSWCqJsDmQ3=-t5G&8?_Pa5{S|3Oe#NjS<8^$=>_}yPc57ZIEwvw0}w%39m*xkzk)vGv+E39>oa5>0L(e6p)O~8_0ak0 z;!5vG`h!xv7D-CV(;7+~3p$@d>;HT>`lt?YvY67fAA7e=KTA&d3UiUkpDZ#)bA#OM zbW8k0)>_qNFF*np-G2wCEY*?apt;nEV~DlMJa+#srn{~|i?PcnN=-rHhM~z|Xhg?; zCZ}$f%ZJ^e1gFr!=Kz#mk;hY!VWcVGFEQr3%6(^Nw&`w=$=3ll*id*s_uGE+aB0Zn zU|BiY&BdSxL)F0S3)P*TX(6xrbwu+v_&Rqx@j0)VZ+mXZE)g02Do+u)`NQOE6{Uz+X7k=cJS;IwVzs5wt(VJxQ<)u! z|Iip#tp_ykepNND_3{!2%m8hiFMd`N)q8-oMk)@ZJU}$7z!yS}0e#C0Iw@7@ILm2R z;s?xQBrHAEe&1Tqud|ysfZ$Ku1Urz;0fFv04g;@`kyu@FQe|f5#L23_7aE z(D3MGE?xdL!PYEbX9qH$|JJzWe+_qO_b=MzE3)KH=L8gR$e7;uH(C66vX{Z!^napn z+1z%-Tg}fjh3#hW50BUMZ*&qoEb>8?+1Gh__XIfQFk~kuR`xfz_n}F|>e+N5pjBNI zYn6jRwe&;koG`GB#|m9-#GLeAdDlhBAjETElx^vQCVj8SDLYoA+`h0gB3J2Hf(w@* z-%3;SkzcID0qPCy_2nCYVW|v;m4XEBcYv}4mI$UPlM3UZbsKds~_AJ0#46k#BoC@WG*!L4V7T1Wnz%*52MaGj=(oA^!3J_p>tTAnT`Q*h5n z={?MQCwW@Fin+JftLcmk6-1L;GBYLGFsK}XKt_^3bI;SR7E81SqwKLxi0GidTXd_i zD$kahsad9;Ko9go{P-`diA%&g!q{;pdz!x+SXa zp&BWY5eHjI_D4y!zuVniq@s4jmNHBl7mg7 zz{au?6{6Ww^Wz&71AmyEyz9Joy?0&k>9kLyqnaRe^Yjg%t#*}jwUL!=4K6XD<@!#_yvF+Dy>-^XzU*CZ<@LNT2s|vXAB| zM!|WUvJsy-55cF$zyLqR*<0Xp!hIXgR+I=lG)oWLJk312RIsP%CYSoJ0hLL+V_n!A zfTyJ)<#;D^z^s!!c6@3+%hRg$w%Wdz_gV2E((bo!XbfJj&FAlS4VT0M!Ts|lL=O#h zes&8J%Cbc6D_0LRY$O2!G6c&6y;Udk$pl*>YEqur$A28_$Es-lt3{S^#f9j|Xn>2k zE0XmZAjRrjFYFbex{sarMBCliws$7_|e)ChmyA^*Seol=~N_pO+f=f8rV#t!p|sxRyg! zm`5f+_I3*G2EB>4Hj7-Amh5Qc;i5GHP8lBSA-#C;)W?r%%WTZfer=!Mqe+{W%LW1h z&<^D?v)N`Y?AdBtzK7R1E9IVN^Xn)h9-%5HV~!l&Z)B zv>8}VmSgN9ihCHjQ;w*cgvJ!|_W3np{T{%;WN_Dxa+#TSOaKZWtYILZ9zg&ve0HcvUtoviSQO%x|woo zQ)9ClH<^$y&rwnufA+dkzLcrRwBj~U=V}pk{Hsv*k2Tc{)_7;AtrH<7REGb6a;Cc1 z>SSZm;(StLB}096NcngtZ}tY4t4gCz(7~!K8NA+yhfK1L7Do9+nDw78-OAen!ViA z;rn8GwE*9PlnFyL@~{_j!@170Lw7ONZFNy_#rk!%efJE@s!-Oo zO{pRE*xR5og)H-*{-ZY!TIpx^k5ijo_lIEh9IcGNjmc}+YSqwS*|Zpizta+%zP-CghI!FZ?$UzMYVCujAnwCGRoC}< z=?r*=+_kSbNh1A6j;$0!()6h54!H#h5bkE>YKd_1yf~hsWVWQd&iC7TINb5YcP>fycM_9O*(3Itw+V0ZhBv8egIaC7 zE>2a5#|k@CJ5))hnyN<_eDK;o*F8_}8+P$-dexz2Dv<9UX388C4uz zs#AbWtbSz2hGKrDRt7Iv8x-*Nr48IfosS#g@2 zqcd8ZFcj!aXKwzq|^cW&c;aUuA@~}?m>&vXz7$J2fbv!I)XcpLuxciQ)9_@H%(p_9-cKk@`GRbu z2Q=bUuEGPm8t<- z8H0Ig|G{L#W@cjC>4ir-(Mq6+*IonuvO(N8wisQ5LjtBH4T-9n);ReDTBrDf&m^++ zqVcJGhFP_q*ES4mg%Rs{w(*<@c=T!j@yJ4hBHm0}TZ8Bx?IQn-GkhCEUZpdNk5Txm zk3z^~xrK;m?C#lPeRA2WnHN}e#mt{}3WXe4wLWzpdo;8wL>*MWvw(>P(&)~O@eXfH zg>TbHf($MrT*80zATPaB`miv9=hrG4a!+;hP0)M@hYO#~+P`Kc9;5{cO?3HrFFYde zMU#_GK4y)Gu+WEJPnTj|n9QIY+;t$XXgl*UHSE~K{bvZ)OXmSJ!>{^hb!Qh$?&)QTqY@PTg25fT|0Dd-ao+65 z*cudH%`I66H2*D>0AwUvTT_Je7uf<02V0`C{-cupofHyF;b&!MmuzJ-3*=efZ{s!V zu4+$&JJZxuKaDJw85&R9uDy*9Qja`tmS{!XI_lp5M)eiVYC$!;b3n_1zw1q}x0b zO);1ef?+h_#;=i-&iCE$1V*B@p{pap@t*QTYR zPLW?y4_{nNm{~#yod|FbZRJK!Au9DMNkPcjsgUeFZuh4xc+wPAY~B41#i31AF=AWt zzr5-E`RQ6nxCzt&a<4>k;b9n~dg!F=&Y3tSLmkNZON;erX2F$yOME_J;x{p4tJKny zL>jNCEhz^m#GRaQl;EIr$(8FU!D7jzwO{KfXMN=NoX83A80@;GL4h^s-E%`S2r6;c z6ZneAH=L5Z>PYS<@`vYM8+&(;tX7~gn~@~SHuO;vG-etc_;j-2LKagVKN6}Boy1Wi z)+z=iQp6)jq&0_V-%Fp?G4~>YUB{nzM;uxo1-^#15?1A7KDgxQU<}05Nshi>)OrIb zP?AQGt9eR+G<3M93{*>PXN=m^xvt&pj&IwElqcQaU)Yf{vig|bd+_)gox%ANU zh(2Dg`7-GW()lTw7<()cV!Ix!fX9Pr*)U7C4y=^}&)U_r6_D;NY+Bg*+GbWxGw6$7 z2bNJPD(~()14|DJ^FW?iXfbr9@GyBjUox^J^mv^rzSg3F!LIW$5QI84$-KoRFGA01 zd@UFimM6L)^Pb(Y_&s`6WODQdj#*;0DmtCnShUHz#=|+#a4OveCMHem48ur(sVlfW}|BP&)tg0SyI`n306y%?~v8yWSIFR30GM3;%4ROD<*;!H*JBg)-`oa z?hl;hx~WRa?A%ax?c@hX*P$SlxY&kz9)afhJ(QgVXPK}X`&gHvHh8l+Oypm0#^Yv!6!PZ^Ol$ z2HpTYjgD7|;RLB2;=?Hq;ZBr14~t=V*>-L_C#p3J@c{X@3j{)T*+i(3eW=U z@XIrQ1v7Z|X*+RePMSGR%lq2-${lga7-mWbHgakOWSu?UKoDTUe}-hzqYiNa(IpXZ zjlOZMruIsx_NYw(+@yW=BeNbT$S^PgX8Z=otFo7~m^jNFizlMG_CU3=?VPwk^4`4G zKWWnCz*JxgZJa$R60|yD)O>eO&s&wzY;jRI;4k4fkdX_I`Qq4Qc_$d{N;@nMr%9-q z|5nuWj&?a%xO;4dl7hx{rn!Fy!05qWWfNkVzK?nBkgxl+vI!g#)lYlsv+d~Usv^YjVe1HXbI_YUv zL)*oF3Ap+&YW^Uh@GT93sX=00m2ne0e^fh1ud)n)g?V`#G*%^}U#@g$mUqrEtvu2* zoF~5sQs!U2JkiE}UT=|G1C|a&OIld?I8RvW2(hcc@BBv_>+??20*j7RV(-8+sZ>pO z{xdZMHom#-Ke+6cn`{p8Z+J%AUJV3)SnP;W{nG(h6{Is8=74_EJLLEWz88BWH%m5F zB_it;v)UVwY=3&zkYe%6AS|^5zOqa11j&eV98N@F)c}wIxrr;mEf-2xz>U92jZKoi z^f*7qwudWN{-PN0@}RQ1P&L1rv65k(s6;xO>k|5s`$yWpSAZgZ&S6$LBSAaZ&`mXi zHppjKw6PY*5M%}rs;-Mo=&~X%sa^I~(!($jbOb#xk46ba&KNgn*#_(LUVKO${I7bH zV!p90GiZEkK=Km%-i4oGOkVxyo&(&|M94-o4)f)MmAi4uXb4m{VJlQuu-+M0` zGr547C42OLtnm%t*}AFMFoi<6A>}S0Eo=_zM_@GFCvtfq9Ic3;j;B|8H)nA?0XJ7q zzCgkK9!6Yi{M${xAmKE!K=qn3d$h^^Mu0p#qA0=3{w2(trQP@!YbPv>qT0JwFvYD@ zmwQ{yZIl)KsYiNckK)Q+cKcy$k%R)U%mY4-MbKU7xeMce0|c#Y*S3Y{MZU9u7U&)J zyJKOCPo;rBou>=P9#{1d`y{_AEq?v5(k6HOrS~#uu(=vlu;zP+6RE=wxo_3ls!l{# zJFRS!JHaVMeIygITusQEYr_sz-uzl^_PEaR&H5A!Q{~)hAleM(F{a=T+m8c89~bG% zkcWc@7N5{WY)*e-EecA>Fb8B{s|%}^UJJ6xFwlAs$8pVG^=7Ky9%?(1N!ca+B-ARI zBdF79^K!cgl8MO4p@qn;yBOXM)K=OA;xSet|r zQJUJA*zug4YxqHC39Vc?LSYJ*=50A@lfxSpz87&cmzmzpDQhxuM(hA+KIX~Ba*zB4O6{z0zQQ0OzzR`;A2`Q*wdRMYV)w!FSb zHXTu^t~n>;7ZPOAHT}p6n}~0SUg|~BW>H)#CLmDNvdYFw#VKM0U@q>OO{17#pNTc9 zY)3yJgLN+BdKOWR;P||0UAVlx_P=-W26(|3D;>2PwyMr84V(4)e2^^{b1XxV9t?Es z--UneZV-M}eGKiI>`L~(6naj7LObTX2tS`d?}0J(rp%s!jH-EEDFpOL^do4LnP zz{ch1H4Yb@4k!MVC-u-yF>yt-bkVeuNWXWw8?3_570K*h2`2-APPESB@e8zvnje=0 z2PJ2ts;5Q1MCGWpG&ejhP98`t`3Ing=Q3dm{5=76(4006Ltj}NtzM38;Mhsg8N!?h zZVdK{LlX?7q1`vziBB+A3F{#&t@3U-549;)U5|{oSgCRW}k;PmA0{vu?v<%ZJQeePx?A0GzB;l_s(#xTLv{9F7;*GzNGZ4}_)-LjBSvy<;;*pDp{g=R9 z-Pv_$I?O5GmFZeCzAl_COISCk>`SRqYnK)g-<^?PW?gYwGTOByN*zl7Os04Y+hGjM z19Ek=ZJLo6<8nEA=lASk`6kSakb^pov_ zSDVE+DUbbnmev#4GXK6FP`=6M*j2p8vjk7+v%IzXl%|;JPR*bZM z@^SVhVhM(%V(B_A6#AR91kvwITQK|oJ$}QL1$lM zK-FxkISx`ZK7wvy<`mJlwi1mZgD-`;Pa<}a$?jk5Ji`8-u8)|l4*k9S+uGXr1DhW5 z8>LNP>(_#L3L#O`t)9Po?2j*tOEL?>YPM2@>aM@9_jThR`tNsi|MhsZ<6IMSWTAC_ z=vzE#LRaJpJ=bjxOMb^1U$vlK_C++!k?4}1C6q&i`;eSF`wj46i=3{L>x83q`;>h7UJJlZ5dse!b6XgJRUT(dsQ=j!)(o|^=z=LTf1d{OXcH5xiBU@ zg!U5|0vmRaCuJCk2--vHC@Ji z5Emz?=u+?v90`^CP}A)s^{NnTql;*A>X-NZ`=%d6VGtItEe*09jqkKET8uusxum-d zA?6!k#?0f3n8X@Y*1twS5IY5dvEqfq7@@||f8A-+f3mKEB!bHa|LgdX$(#76OtREm z9y+3uqJ>J+x;_10DN-W65dt#e$vi5Nq`N+^^QvuaKhJ>{o*E$@ZL@^Mb}Fj@cbp3% zH!36NPwbSM)#cA6k@KCKfj#f567*>UMiRJy0`d59q_|Co?hiO(NoRFN2`Ly&-<9Xs z!$11Dg|9YupV$-qcQYPZh`~MVUB$TJ3&hY9Fuq^PKzPlJcNE({zPaHQluCr=@ZOO9 zWXrww@EksuN*HH;yG7_t|7J54#4jRLoPtz%5fvTjzlhe*cL4foi z0LQN1wbv3=)Wgl9?-cG?t{6<^3cfp7e=} zCzjJ<7j~o3`A*=Jj{cw0Z3#QOKmMkp&G_;eN#Ao@+DmpdNLWUHiwk<-FbCpT&m&Eu zq(q50;#7$Vut9V^qtfmac6k-`7GMfC4k&>@Z2;3goI{FnXcTDFR>d4@YjhEcr!qf1 z2K;;Pj;|w4y}cI-yk7mpy3BN%3g^{>p9ttTwid*G)>&s)wn38$GD;)G43ueK{Z1g@ z#yN&!^aeRl(>3hZ&UEhZ#nHaluyzfGO5~cMka&X$nIW-%@QKw54m*{8##&Wmn+{4i zUr!hB%g6EYJg14>lT_-y=dG;lj_BjB+ud+!zCVk-AB@)q69$N75X5%bS3BBTxolmC z{=4G)4^Lo_O2B~hD@Jyh3+xTBMzSKi9DX81xmhk_wh+@#VM?a%n_*b3^~2O%M?r|& z-iJ)LV2i;ZmTjlp&x1}1eM)9D~-NkmKdM5fVQ0CK8$CK2)bPTr_Zk{&mRMw3@MI} zNdB^X`G>8LlvBE%=w_@0D?({b9eON;&+e@hJVQCx-!GN^%=3MxOJhgl>HSWtSqVcY zG@w=2k`-rJK}zGq7)H*zWM72+?(D6q$T5bm5rM|b7T)pkE?|YrNp3T(-jct@z%w4j z1-K^VKBL0}+jQ)fVZ$YLNucBL7y)a`0fsgeWaCzT_78*KM4&61N1a#;d~_bByWP*D z?vZsSsN+{CrlX2nkxtSKy~L>$rh;f;F{cDt3HpUvK05DOpPc~18HMXPIsU7nDtB0O z-e0PABF}^yu^+lEhPR6Gb`x90g0A8vTCvE+8pi${(Pb3HYk>2Y?kq*Jb$?$8vD&5d zqn~Hd95zrF5g@8!h(7{TAU@RFTreBywooPx$CDiemK}427)D_ug)GvpH-H5I_cm{* z{wM0v*|!-YGCwMd%mqwcRKkqc$hg0!6)gx`DGw1ioI(SY72_8s6U1S@f|;;2hBR{g3U5d)gL?{y! zB&0FBZpx04dYL0{TP}kmD_%Y^R!>bbRDLT|tj#GAegl99TJ#&}gr*n+JCG&M+)M87 zA8Wy|@yz+;YI4oevTAfukdz8Hlxaq|2lb-}U^S7{1{`HBd-*i{DwfSV50R9j&U&G< zRPr~yJI)1o%st^)UxkleW&P+vnhnT3*|~Rjq4MiFFI`KAa?e`7o^_or_2laNws&kT zvfD8H%B~uua(-1L6H#u~{$N7hiQLwwwwO)x4@43fwFu(oO5K`6^4uH-^)bTbR z{_fj4-pYzWy1sy@j`oK+Qf~GS;QitOlV4#+`Z$=nR5ybm%3b34LVD|TYyIDDgO?pF zb4A%2DZ3Yw;pUX8!{)QBl?N3|M5nmY=(IUCl&ES=M|^dZ{X%Siu<-`?sp^y`gjjn$ zC}=gi7dMn)P!(K*uN9My?JWi{-UU%b@_(A5tb5GN7+*vMan8R_O6(5iwg(EnyFjH) ze(Nu}B(>Dd>COj#Im!@`-MHYZ0k=Qaiuxl*!#YhHSZd{%s_~~8BGna117^34$uEcE zn2w(%7I(>d!S$;~y7VBQ0)0V31Q5SX0ET4NpKwm`A>aI_6yM74PwF01t5vfF&DO0+ z6_ct7`XdLko`K}Z0^w}gBtS@p1}ZFf=i`IycfUCypCNxK^;PZ z?&lBt{ygT|47?8!@RW+BQFcUh>WMWy(|#vDt7bkdW3C9i-e$4?{;2>*%PTzFVkdmW z&?b_7lNZ93pRAvG}EiiMkb(7 z*-Gk@Azem49dBRw|0dKMR$1bnt8fi9aGOW9&0 zvN%?K>fwT~fe82ANOjt*VM`LYF}B7EFXD;S&be+itUJ+cDC-n+~U z1tXfVmHnsmH*!=kBd&jCBFMzLh9#)VmR`7SP9;w^caoh1B`7UOmIeJP`e*Y-`FKO| z{PxA~FRjsXA;|#{aq>D`okLlH_%yh;x8@8@d9>S%TjDvD-7nj}+ z{5$iKJxF-lTG6xw!B~7e^)>aM^r-G>QT{zfC=}(=RSmaF$NX{0^LgEsD0B zK=Et*nqOinAPry3tFWAWr6Lc@<~2;VDib_B%Xs;)o8fkJe*-^^uiiZskt^d#FAVdj zO?q73Q+zl`6G5z>QQjxZaoee0e-`!ukJQh!$@t%NbXlqlnplCx|0O|49A)(CJZI>5 z;^q0Yc;fy|t!6NA!8m<{7B*h!C-%x*%G2Yc1&CyHwjPJXPOn=rVm^5zqRIEq}twB9{TLdc@Fu^>S-?J+aJ*k)zQ&m5OY3ZksbRt>2tyP?0oaU zQ@+OwQS>=O#0?!8^+;BwLT}+d(b21F9)Qz~#`l$Rhh(9;f+F zI_5cEnkj!T+>i1StJ<-6DE#P@=fW^_>dFhj?%al2BMs^XHy>`|ZJ=evqshI53-(*8 z$OtU0;gjCzmu&qMx!)Bf?D|EDH;eJ*%ys$lKiIXj%}912V(8@${Oc-`ceqxXp+(B0 z!EW)msvmXsh3GyB^`yzsXXm!6AYgoaK0cvt;v9CWVw>LXClJ)#oZix5& z`oocG#g+ai3vK6SXV0n*msq4GTGKH1>$3onW|psOOySdMOyROIXu#?)r&dW~Ks3Z4 z_m*dw%0>of#ZemD|CdG&zsUUO0rtrso_d7&)vSXC#;H@okBV1L@kbjotLcA#rJC|C zH2uP2RBL4L4@;IC*-_RMlAA)w;(cieI;d9nQ4)wV)66b0E$G&ruD!4-!E?qU?`&qu zsGf4oz``~7@E>qAWuD0Bo=pXa^s$LscMfvkpTGZ&_s4d~k#3hl*_3lqw_C0%cdV97 z<2q397F)Z95^g888kkvhK9`Fp*t;NITRFw-yb@J?)*@jg(t zsD)3NV<(*@oM&T#rpfpjsJl`gNusB=gKg5L(p9_1T@p!-sc1TzxR*NE6S2IeGs8i!kc=n)2marWj3y92r3rktnr)!!gc` zf#zKpgP(Yb*D5CW*>gl`Rn2LV*={O{6;Y(6=%Lu_x>@v`oalLw~iby3N7}yd#Zg`tL6D04VfJZYE*9bD3YH408h1_ z`E4!5SV`s+R?E%Ym|Uy9;mbB8M!R#QU2>@15etS3@@`zU=GsR?24Lqi4fGqm_}BLT z0C!(!PkzKxy}Reqd_Ob(TQ@hPQowYqM~{s@etkM;md-VIySzVUy8YwbJ8w#r<&g2* zi2MxxbD*REiVHG!Oswn2AvS6MU~U}Y1j)&bMsyFD_{qV3a#D@vNXpbyaJr1y2tygCf? z-NU?DZw_dM^vN4xO!tanZ)ii`Ez4 z=Bvgb$MX_g((nN67yR$~J5#rLb|2f{XP6JP27Q=4#jQJ0uC}e7g&W}>5~^a1YO9BM zjdksOyoXI)H;@jDrhNJ%wA@RC@PkAymLAekM!|@&QfX46YZ{w1^d~>AZsnm6asoA` z2#vYNwojEd4(#0p?f2J=E5uZeU1e~Sq*BDyqU+NjR~*3QbIL?`jYRp|+ox=n+;-O0 zgX_gs{{ZIx5ooA;ic`ibERHM>$XyP34qdtCTILJ_0U{3QoxGOqs~YxMXkw+3#aHvX z{ECTj>ryy8RF(m`K^ZOgVswoO$Q|9dHx_o4arw<_eYur5b>h3l(mW()+^SxKnGO*p zw7P&KL=r@45WE3_@jd$UZFGgP(zPfR<6P8)nyLVRIfdW|1PJgt3V_?YdpEmcUe2&X z<~3uUM#Dlbm43lD>m~D}S4g)71Y4yIT{CD~f3AqrBb@V>YTdiit;2xQq?4&wRYH4#F$#BEx^eSaY=g&#}sBFWNPR z>Z^7S?+tBi?hOz^$Ba?8TrO5CpmA~M2P9$h=2q;w?ZtS`<#+8Bdhge_$yTijs^6Ba zWr1po1T&NbOIZRQ2qQd>M`l*5UDL2RvRzuVqnhCQX!P6`t54DY0Q}c4_hp`YUmb?w z0d>3Xv}T&d?0C42u{>@JOdx{9$&;BUEWjiYw`1YlE^tN{y35;^k}V$4kKo2y$#F1~VE{{RtZ?wq3is|R+qOXmpWP6)=@k0;}o77f>Z)HIg=c}Ly#E>f(U>m z7yi@nt~$hyaX)DHhxn%IqD^-zNnR4K+$7V2U%p*0=~qJ8xaQtRIwC*@0Ee>1w^w$` z0+p(w3j@BggsjzNRg)7SMOlNX1Q{{4k9NYY*JE46+3ntamXM6&N zPY*EH{{RFfrgM;hMO1y(;kcue(&m(?BR-KIP2r(~(C<_-GvX$ia^{P74Yw{RZYZG} zvyhpDH960TBxevLZAIb>5N`Yc4SVyyNmdd)G|wX-#&Rk#IS~eUhZ3FDE8N`m7TwGL z03%$r{Pko}>r~rUO!|g>Ap1V7M!pAg$5_*5$Ai2;>Y4uloBT=JcO27-QR1#^;G~mu zc#3g&DI`9j1G^lqe-CcWl($Q{ylVK2^*$bde_b|0n!e6^nSI}Ue~*sNl{)n^Pp@C? ze%8ftUG)Cvd)=RZ+dl2SC^`PYoT%L&j_K<=D^D|3Ub=gco@f00k56XD*4VNiP7pjv z8G=rE<(8SoK=d=VQBYO(E8ov^-PC9IN5`whHl?mJKtsP!=wrjg`wxgdsn={=tXD?x z!CDO!)<%kq>4=y_2?jMWT-envtn|gwKy&hIaG^vQElz_9e32aT6wPt(}`;@0;tz zy{$aKj|K(94N`dCSAO^);kWw0cgaumAKC&R{#5R&Tz+}#^pT%V!c-m?#lkLK^3I!_^Dw3y!;IjOJDRXl9M*!IXr;iE#!S{%}!eRkL(#_a`q=X#Im#Ts&fO{5hCjnrdzyHL z^KVIEhhO~bY_pY?xHf6aDaA#;n$koWDl{Se3jry_@&Vu5I?LV#e!XxHh?9Tpxj3IOke3E`uEWE$jx|$J)Kw{{UdW zE4x-b`8(3?*=fT4miA`RY&C;BLByD``xS{@n(tL!{ujC38r@o+aqbpm?3Po!HQg~+ zpDu3O`#8Qidw=ZP8{XJmu`B-2uXt+^HDvz)1+@;AEZi}^;mBRHOJHruy=d{6If0gA zZhiSg{HFUv$1H2VUV9$JU6WvJaf~~(Tv)ud*j>8K6_;Z1P~EvSf&g~Q+nEu!6XJ|X zdSa8Mt*SqJZmt`YxXUc%BvTwwSDkd2qWmv)LDZl51R) zSDJLeBv#45wJpnsKWch{V9f5Dmk;1PKV#uu{{WvQ>zq32%80*lZ$+wH+hx51$hTR; zpQ4tzp6m?dNE<v@D%8zM3A*7J^wJ|O z?LYvNl*Zj3gs*qpF1u^y8cnJX?2g!6ZA!Rk&HH8-29DdkV6kNJyN2$Ga@+!5G=4Ra zq=$tpe>&n!RMO*#i$^?-&iJFxo6+GrR%YNhGTFOR^p}Kh5_AMfCu-E&u9qv7%F2rA zS5<-fjwo5OJgO@2vqOSbEv97Bb0m!R@6VHkw#e?^hp`LAjSEt%G}EEFUYCiGJjZrR zGxRVr15=nAJ@$mp5!x|+E4C`5Ro*#NAhXK?xh0ud!-Nn7NaeqX8HlYM_D6DE9%;>B z(Mr5rJ3@6Ls;-iAD)HqbIcqfo)DmVyMa}|xl2%F>qOTpyQ^6>#tFWmA9uI-t5>8M_ z@EcOsdtM~N_cvzcNJ{V$<<_FK(N$8BATK;|8JeQ0%5ynnSXd~ts`-1Rdx=ttkdL6@ z06>sH{{VN}mv&6s-zWasnD#;16|U=vF#Vgf%WBV>y`v9bd5Ub;18-ZLL6hK%gdF_W z{{W0<&Ucx2gpf#=pg5wb~TF&SsQ=5NV9|jkl(%nN>|ASng|-)1TiSUvV#zs-$XDeIrE*rRev4FQ)#Z zy##y5-LSPa@=m{K{xs~RYHre{G=!_)-rP|{ys3fGd}m#^Z>RVF0ChWQN+VtR^zt8m z?t2mRZ}xw7%Og!(-thjfzxn(;{v@>&n5qQ^fjflvK=C?j8GAF@V`6P=P;aS1)Q%EG zG{nf^qH6C|sm?PHd)deWZkv7$xfV6Lwb9DgR|uk@>WYKJ{{XE>$m9gaU7>e7E#0ka z(xnKC;z@B1wR$1?exMOh*FSk1S4EA*G7`Aasfq@2XDRa$-OH$)=QNRG8H}cy^x^uX zPIJ^{yB?_gW>sOrZZ3ryQtn?ATeymuh^U!^F&Sl{r0;4@-PiJ0`8eRLaoVO3!@aFu z?wE4}FimH;?-MX+ORE}508lbG;vG3Wo_Y61q!2sb5It+ydp^=TDeQw4+c9rvJ)->B z&lIq4Z$(CmHNcL|;>XLrUR*VH)yR`hq!H9c!#nv__JeEe6BT@y`#V{K4#)dMe!}b9 zKW8nLT_rErp3Q9FAPw^;EgFXs^6=4|uz&vmcY-a$cWwqPZVs%Lx*gZUqP`k{H&F$6 zD5FqX7aE3@HR}^dB%O1DVy2ZbRpX0O$0^w}O+Ff&uMIbOH)6R*K{^PWgb|j|Yr_Q^ zHu<)?w{Fx&6RNVdQX~)$i1L`IW*PZ3nyk{LDIeBuC`!qH>z&_#^6=Vh@}{aVN#(kN z*+f-4wDFeXDoAhxig&(@evCOh=OdW#vG9j&cl;G~qUD9|OEp+FSqkV)Wl{t(!Psu${LEm}=BBp`vaqw8djJw*m(a_0+|y5KFbJIIZdVxPv$|GZ z+)Z}Is;X;5tO`jiswdO>P&DO^lM~QR)xIq4I^lFqhU!1up%AX;41Sl5{>106_#LT- zHz)Kgrt+P_udXu@rcye2bOR}vW>%$;TDmI(RL?|aW(bH8uRwgHle4wG!zyX0t6t{f zl4wm{xkSW49?E0RGc^@ytJTla_YPz8<0ShdYc~luaP%ilV9}7>qaqa~XgJa{_J}0G z5=iMAdxniuqVTQMW+QG3!K(n!9bd&s11gD!XSgJ?CXKBPBg_hy7Hd{J9CBbiFoiT zq^QaxY7J2e@w7A0}<6@y#b=K7_$~k4!RU+6)aN)W+L2*~c z06SQ{lKhD=`qn$%I>b0J%RDVlG|LI%>Mfi^U1J4M3lsoX)pYMN4tvCIfe|(5Y2P61 zGQpMZ;fpVJyB;CKtsP$$RohUvKXBlc&|>+!Ic(Hj3?6e#fy*ttzq@>y@pIF0yI{K^ z#ElzDi`Lli_-OHpAa6*9uppBJnTQ0>En}<~aKm*vvMq&{Myg7o71UUinj<_J5Gf(o zfX{koJ>8!>3dEhDj6-t5m@|~aO5gAXIK$6exLFj6xIGk?TNSXCfNk`qz9452F}25S z`9|VwHHMVHZf&hw*Th8{%pD+lde&m=M=wg&oK9Bch*bTP$<80)ny(m zp+khTzA-C~G;|03aA&N^owzR)Ve6%;^~ltR28x{SZ*&_oe349;|c8x^}>>wV{BvUyj9L+_fux&#`1{>@%m zANT2R>vQ!VH!D%T zj};V!98in_oMd2syi9b;Pv}?Vso0-p_mAg-*sllPZ&(4hXSk`h5oKq8ZMHde+C6r4 z{{Wh+-F|Won_Vq-yV;CJ&6zKQ07%|Bjy?HBc5d;7&DzQKP{#^<)UsAe=utP8w0WoN zI7PkHWz`MCQq|L1zozChB$8n8HX*Hw>6|9-3O2*HT3_(~dO_OBNJwyjT+b>RhhjO* zjze9j`^Bp0#d_(xnXMV$C(|D;@fr1i0W$<0CeFhSmVW((H;U0sCUTkylOFRG9OGCW zB0$~S(!4i|s=$aLjhR!+ByDKcuv3M?<#_0#IgSW#%>d|S5uDCM#Sx9MvASIAb!Tw3 zYq_2wN&7ulfIfM{cmjg)> z2_%95f_8wqa9KLuTZ*MspVB}T1%Kob*Ft{D(`AvW=276$#;fh2#gytNPt_1NWOxrF zmg&;n6A9&B=MJ<%;t;P{(mKE~{jBn8ZE&fLTgmrpsZ$gT&R#yK5h5eQI1?nHcX2|d z;;O*zjumuKmPF5i2cgIf3@dK)S8r3tqUVH8u;I(%jpz-^FGMWcWI5YA30%e zD2oeK(yP)H6-$ajgEQVPAmk%Rk|Qk}X~j-1_GOqhuBy{fTHO7t)l_{UaTyMzbq6sA zAr5Z4qsE6Kiopu0!&CrCltIhYjI;Yuu33EXG-^>ARaq)4tuFdL3jYA;4xT@|S6idI zeNE_9juAgN}xs`qy*-}IlQ&Xd4GeZ$)xTRz%q^fd2(SUdS~c&hG?IsX7HHnHsv zt<XU}%~c~mT07gs!Otx6%n&W>3a0CG7VaXZy0fCN41t{?yqV~A_W8%BJ(GN# zdqM3NvnxA{Cxot-Tu0gi4$y9xYUJ6fE?ZmJV6I&i?zdM6{k7VT`iBh53t%;sKVCT>yD|n-WFkrzniYsZ(zC( z1>MVhu9p`<+cs?8bH7fi-Scs`n78^xfId#$yL^{%P7K0W;l>^9M%~)&q+Kt~J1S9p zWTprdJ>z`T`svFzZFP6^XlWlai9O>v#^0(Q&-*&;x%LUc&Ysh5qSD@)u5>eUl5Lb# z>Zn4z$W(_>%fP6D3C;~O8+m>5((LEto!XUMp!R)?>=?tc>_t+I1{=pXK|tx1!t8~` zx(tzIy#k6_wz{@-8X9i&({GBHrnhf~(;gUeMb%tE=`ApzQ8Vt7Juauq*acr=2OEg)6_e%@aE=g$na?I(sDc!G1vaMb4_t2#+;FQSzMq`@YL>DDtKoboa{ zjK)Vi9pMa!2BS2|>zIsy)R>V^LnNGn8GpI=<@kvF^x9AK{n~Wbd;6My^4;s_vYgdb zcehjW&+yOVp319FsYv~;eaB+yr*^LO6XWmkBjRVJmiKkHoAgZH-Fl6dP%w?PsGV0#MZAiDotWvu+M!Hi_@1mjLaY-5a zex!`%37GMnzhnOZ$s^f2d~0L*vk~@H^98or!ArgqMO7AdRlMS4cppeZmkoZCES^A( zGx6J@zm%iozi7RDl5SRuRv+wIpbf{hM7F z%NXHz38ID;ua84!sV2Q@u1ZQ0K;ukhFivx`E&;dV{8@dsSnSo8Hyfj(&4Nx{qS+-@ zR(Jtr-mclRQzB*MGtd~!Cy9mF8Z7r}0u<1x^nF#Fis(2X2Sa$7oQNaav~}flvu{NW z>Q=COwW6UMUMNd=)5qTTZ%>@;r59U*Ehr^J!=37-QA)%>BSGdQfDFMSEuC=f;>T6n ziGFVB&8f3j{l;&RYUsN8wZ|h1@Em3HmnQM%yz>~HfwbKM%2!FO(#;TX0vG5i=u;r>E;A_0xAkNu8J!LF3{gq-zBvl&xS^N7E{o^kU=x z022nO2S_vBgR^^Dmf{`WEq;!q{{Y!*UXkbbcoRDf%oW?$yI7{Q{@Rj7A!zqh0UpiT zV@*U^)myYvO4)IH;E&S$WAhWPz)^H>OFGkp$tQ?)10p0CFdz*edb={MduPKG(zsTo zB_l`G=j@Z8gr7Lb$Uitbs;WM~pv$VWR@t>gWfQ0{TDwVbI8>jR&SPN z^R;CJcLeWneKc`mbXcD3=6=lmytdIb_gAei(pEYc;(}yV8qD{iVlwjCb{8%!G!aT?{=1J4pRRBhVLskTJ33k-7o2Or$vKDoRN@TvA0|)fOg{? zbBk{F>|c))V(m+-3$wKmS?$3d6Q}K3ydk>eW;v*hLCldLfHp40+P7(VW~?`COB1MY z-WYVP!qn9zQVB3o_s!+=h67hFopQ|e(lj{}>W5?h0LnkwCM3EnpJ;fZ*L-b=iv=#U zErhv4d1}Js+3R-e@?pD}AOK4<&avmFPN%xA{{YHki0E+74R(Kv?u;-nMbl(hIN+|6 z2Ckm`HAF#I zo3>0$*AU+@-43nS98??J5q7&9$2g)_M3^e!VuR<%+)#8|JCV+EPYcu5;7F55WA4zL zzrvQ>F>zp)53dw_=A%~JReer3y+oX6aN)C7v-HP+!Gk1-2aqv;Wmwj(&)nndU4bU}H5WxTl_=!%Z+$xB z{S`Z|UGjyrya!n)rhF@j7r9&zdvm)gvdY%v)m3f^%BZZ7$MnW#CoJU18^dTqJ=Rp! zq(|dSe+>2+N@qTYBklbD8)@qe+P=rBUVrD;Z@bj^Zx4s&KK+%agPxpBzh6iiM^9(J zV(DF+=UAiOem*-Ag;dj7r!5bE{QZ&d^V#JUli}-MkFa=wJ!4Gvnc>GSDR_-RI!@;c zxfSP`{gbaxFT36Ei*B|AH!ER^Q%=mAhUF!L zMrb+#cgv9l(Qe`ZFb`ay8XNg)T;SU)G?#A1-m$5hmX2wnsi_Lc2^F|O!*D>|%+gHB zs!nHb=aKT0w5@8ywC(kG*c7c4qIyTGsO!_O>+1y3L$>0o zsEQNeDxfD$DnN+KC^B>2CtV0Is*1B$=}ER)v{p%0>jakctz9i8C#~WDEr};N2Qe}L zCp3JcT594~zgl~#@BWEzSFeVO*EB+VSfpM<~K;w zGF~d|(XG1}-YhJyY$(H2G*#tyy~UldO|nSNT%R!D{VV8j%#8^nF%irvnhc)PQq8%P z=_<-fD7t#0CBR)7A4M`Vn3>KtoP6TL4l1IydQvZaFAZR3ngIQ7@e*|gawj?62WXYz z`lFWCmTIVY)vXKhMEXDp&PJdTBX{hb;vf-UIKHo1+(BJ6%D#`%ALBV}+ zJa?Z+Gm$!I0L+N!>kdA>KDJx#)mxgRLaQ1ZCX%76R8?3j^nz#lRWf1>e$HoZmBE<* z01>B$(Mj|AYp%wdr5gt-tHVo;n&k-1txj7EwIspFM$!CBe>aCNxGPh*vO{w;Rg#h8 z2_rosIvDpGcHq7X?Q;P$>~9!js|ROz?*~4;V^`g$U+gb0ElUSB*2r3L-y>~+)vHp2 z+2tOY=d_7Cc)i+Emg}>3l{Kt#Ox0Pv3j(=G-9kQ^IPzLMbV&i8xdwI60d#iJG5 zZZ_o-Ys)`s;TLNngQSsJHF&V$6{-4Elhk=;XK&P}%99Me`8izs5$!wV$&C9z?AFop zn)yAJS1#5seX7`O*h3y%?6_9mw`z;pUtl|m5n*d{aPgSKZyEE?Z=^6b`-Q>|-Y8p*R+(pzAD{5qCvD-#6vPqG#(9|_5hg$;PH;gv!3n?gk9Yk$6aJ@{K)Y?n~{{WZ)-<-AH`!nvHm7{leFJFtEoGusV-LGAw z&ML7e-rMtS4$*QN@b0uZHE@-G#&PM`@W0i0UZ3K2d9=BqHxzLc8G7~gjQ;?S=S{nB zEFkno_?~nNtep+3HKf>0`6#n^e zw|1k78aiWGDIoryR>p!0AoPwr84wGz^wJ`n>KS>v{JH-CDXX=!wg;V^*>QqbTWS|Y zF6G;NG*(UD8%5i$clS>!xHB^#Oh(_DKbF1ku~l%dW0u~@T0ghkt+-v=j@mIb!PCBL z5#x8)CDvU8Fq*Cxwzaj`Euou6ZrM-Ig#Q3QAQ|&k?5(<&+tl8Q!n%y7ydzyVq9;As z(mS&m?9XdWi{D0TGy_g`rej>-2pgj_F#r%pf!bQT=S{0h*Gkl^wE%IHOn{Q2pb4D} z2r;Q6AUbU1_WHFhDB)GX;g0xBLaK2{01ybuU`}!kVo8W-Qk5UPG}fRMcccFRC`lmy z0FH(ah}aO(@_$O7^i2X_&#+|?y}6**-L$UPZ@9Wm;>hl9Ge?+n#8EVq;c=(*%Ka~| zO{P}+D0JU>6qQeL8NP2!hnLaZFh&qj! zs--Fg8`Y&&@%oY_YDomQ{^^lDPEk>vl6zY^S(bECchN9aq<}c8{(;7yvP@|tbQ`WX za^c4!(_6)1KuUGM;TX+exq^HE9Xfu@&lkQ;R&{{&vX!&lO(xCN`sBd$`~WoF21z>o z^sxu7DsbMrM=IvExH8x*KDZMoFI2Yvi<#*FXCRVAyIwAA#cywQ@2W=)MEJZU(@F_5 z9;q4S5#mT2C2PetmTFdu=D0QBQ6*FQM;nCK{kj5UXznk?7*>VW7D54|ij@khLM2s^ zBhaqmd5vc7KpJ?^9If7iRF4v(= z>8FW6_0%WuSVo|E_1gTw_oZpI&=#!FC_VJ@)_gZd_M#7rXO~3b^f0TgK;m1wx-F&B zNxxd(!Bx^tVDb3`6(PBH8wn-kl6A;V(d>7J?RMM6A23CMqUjXCp-4r^B>Hj25FEP% zQ#ogu9d>JPg=uSXthb>m+j5H5X;5)gNhFLeKcnRA)M7{iGw?eegT<5I6s)Q=IF2PM z5|X5%M=lpRjN)|9p*fAZQw-j)x;7t~(Ol4%4~<%z+3~mlq39w!qfUb{8(Nmjqna&> z;x=y%+~)hM@FrIq=_ED3=r(Ywd_Y|#dv1rf^T;FZZWsg{Z%fDgi>I^ zqOBZ-&_j>(er?lNF4@e%=T1UK&E2+HT3cztDp{>1!)$ETSC=+$j}G1yRmT9bT^9qI z=B=ljCzKPfh&&-iCw*%}72&r?m6%3IYD>I?cPK0^3Xx$Xzq~T`kAi-{23tcW-mTun1jEUNNk1@UR<(mukZ1wy1 z9hBRZcZe_&7gCB&xK46d@<^|6`LZ0hCzCHs{{Y?qfww*1=~2DXgq!fIQje^uvY}*^ zSAb8TDP()pf;54wl1A0WCuK@*4ZqUUl-3l;l{?XvLoommq45W5b=4cEcA2c7AG<$i zhfzBWqV$gJRWsl;`1Q&*Rj=9q0Lvfp?eI#X)+z9rh!eZ6quU>?p7zP?_=^1NA)qI{ zJ`hZdpg}S+B#yI?4V13-mwTpSccydKUuTw*W`54jY20ZY?@7O`Z)u)+Nc;T!wwK}i zr4?INwpVOS3f4>`MQL#rTqOhRfIO*!V1OX{xn;Grhb|v8Kcy!0O>`o=aKF}tRO0#v z4g%lN57I=>T*h-f)$(NRBeYJ)aaI?^*iP)~oBse7)YgZV_vIc@C+l(BvE!rkZf)6a zWITdqM8;m=cE{P4F8N*Da9$x@{sFdPiY&_9ZneBx+iy3H&+z4)dy?;1v3qb0-Mjg= z-1XzZ&V4%JkOp@A%yylN6}YvRs^ zpsT;>H^%P^(x1?Gq7w&gr-2~XGJB!B<`Jbywe1fB0WC&yQ7 zgVy4z_-iEL8WoxxdUTa?kotu|o@KX=pJ!9MJ211m*GqlsmE)9(TZ(ihM97^cCNqLg zU3!1Bzm)fCJ(^%1(t9w%H(%zq5$(+;t-D5H%CuIS(+hh1WEd{-;tm`&Y}L=x8J2PW zkj_q1O)lcQvWDeaoz5I@6DDAawaNg9Bx|JPHfBv~daSK`n)_!t{{RoaW9o3h6zHm& z`}OBf)qV5j?d!88!m3kp4GRrq0qA^ppbESIkSg%*={j5P%{RA-!ht~}KUQ@+(Uj>1 zoa0=yJ!{#dbILh1@Zm~2K*(f9WFJ()BP{xC8u+nCNfhgfWFn<}E~=&h<|8O30RkYA zSGo&!cC~JYdwzlnY4`$ZJKYTB8>U2=$VSn}qz_s_2S}aM8FbE)2RWJ9cuk@OGyuj9FI5h&FEABxq5wQ+>i}tdg$5`jEF7!E3tFx#_(b0w9tv zn+___7w( z*H#AMV_9V>P(MeVaL>^4fQGW=v(XPSISCME&$)J8+O@9=SZ>rNn`<51{pPRwA!7Uf;47LKa;YZ5c)aIQe>BHI_sLPN^ zTApyBg3Rs?3$fI^1^^jAHBCE?J6v}{wi~t=UWN$ z#!@zl{G_`_#9grC`@TQBUTt`j5aUFeU9J~i&+Mz=)()NA8C%x6plp1y9I$P2`WwPo z{Wnno$)_gve z9ixh?g5Hlvs$5iH{XCv!(`?`XK+Zq{VztFcq@uO2P3O{9d*iPZPF>7TR0^q;%(GyU5*QkrUV4qGmbX}V4pqcW>!X-QoNGczM1W-1VKje>aM z2_Tg8j}S8Q)Xa|qR!6%#8r9o<`UHY0)(i~iETd9-5i!;yYeig9cD*3koBEQx*4y&W`Bv@of<;HCO|5pgs}tC6O4xA?e6@V4JQixKj}>INC1x%cC_s-{ z;DAAyBS1Qv4|cnXF?I-jZ))Xg*J9N)4GA^ktIDdl<`)hNkD>x3kKd3SZ`ukAgY(zT zDJM&ZN~gG@s2AZ>Q9)(b3{pJREqyuZImdXNs8@-!;&aEpz9zLNyO{%Bce+f0KYC5~ zs;hRI((Yv(QSJ7BkU8~{rs}HO@YIR{_2@Z)2f|EzBX{+NxT#uAG;r(h*E{{+Mfm&- z3HRn0UE^wr-xO(_gCvohifPg!OaYW?3Ef+TPaGOI)g2*8l4;LPKp)xieggTVodEV%!a(qDl z01YQE9d^>1o6t9@{#Z%p8&8b~?JI6|@ z-6l0Lx6VZQi(sYqoV(mT~rm$f1sM3Cl=+AYS)Cta=o9d#kTYoA)XFR++-@tim zB~?jIyyA%-aZnQwbMFkKOboNjXEw`H+Nw0&3wP`K%1F;o3IG{ShIBeVHMyEr?$^W9 zzxd{QYmkp-(#u`xWZO|yRqjmggwpX!e=+;^xGWd-Sh7h(Yd(*Izgl@8OQ`}Wf@j+! zIIAtXcd@SB7VNVZ;oH@AVTe@2xQ>-8usB$h#|8*m5%GXxNs?fb5n`c;aP`%y&lPx* zwH^_og<5jFBvn`*Vgb)R3}{FiPS#z&w9Gk%unkBHU9Ah>J}wB})M(1Pqhy^k;Bs95 z0047sftetapc!jQJMF6QwVI__JjAP0LaI`NJU~lDN2WlplmiB1F1Zu6Rf_vXxnCMC z;_ms-E{_tk!aOO=fPs<@W+zGWA}HKhaC2x%j5^A-Xs^*cR*orA7+2FE;dSRD%Q(nY z*F2|gnfV=db&Wf6?N0`FSJ~bge~Iz)oJ#4#Ya$E_r|zvx8aTOKkYK?h4$E6|4;}y~ zGBb>iKz9Axe_|L93S)d9*so;woEO@Am99OXUueI&Mg6U%u_10<8|EEw$;H0-*BH8R z-tf)qCv4v_V#VwNH9lWm?u@TARJ)rX7q!&~ikY4l3O=F)x;mIWbI&q2L0T=~xIqJ+l@FH89QJag%=n;Tbmf9z-n-hGGL?>hqFx%72O$mLW?1jK?e z20+wkM^3J-E26Rhl*m53f4Sq9EhE>!%02#)2!7+esy`wAJ4xdvMW@%Bpuz^ z%6-3TPpm|Yy6cw9r&T-KyZed$zvnL=nS6FCw;Vdo?O<+K>TyqnUhl_gb+N<5R&8+K zT7?3qaY+zlLEb*DLUMy944K;N!kF(8Tdnom87Qp$=%DO zQ+Sr~ep%g4E=nk}l`F;HQBtC+>7pPBfK_C05cI#Y94oh?AR(6{ap9V zbo5`HAo=WSZaoz=?wqPV-6@ZL%W}!pxOX@D_4wsqmVXVCa|X?wH<+G|cPPcY%{XclUPEr^-F< z)mRk|Xv`lD^q%*L9>Y5Vi-g(&N}Nj6%Q@>OP$yDk#1WT?Aa90U415S6`)ZJPyykp0 z$)PRnYTU|X(w7(R1KHL*HZ3&`Ndvh)k`%AUDt_TTa?hs0S6??s-m5Akfsq>ObQy>P zJgNZ9>es#ApZEU&A1Ukm`fc|Xztukv{ySS8h<3M(dj;)V19s!tjn$o#wEoQTm6se~ zw>L{;8{MAScC*n+ zwv|=>6lXbkWtZMP+isGZwHkBr{QEyXAjKS0R}9lewGQXHqaAgh&qwh2N?eL*uZco5 zBivO}>3fxChJSywjP`p^6TD428$~ox_UlYh{cpG99)9_4rxR3~VMP|)uC&He=~kWI zxyPPe7^^kHLtOI*tjIDlk{}5#oMnV6 zwAm>__UeoM3OZHNTrWf#CCh|D4kzhuT~#ILiPTPKMt57@EOSm9!@RBgnk^G@;ld~c z*tHKEqsJ8;=Pg>Z<^@kVPq<`jp3!aT;7nV8veR<2;sotiS2oHo7I$ktu-y?!Vvj#+ zeGQztYVJoTpRJy{_D{&Wn@P!-(UyVU^bS z$DD9}Yq|?3HqOs@sJ_zWBQQ4P`|H)8sPg+-#dVfdWmIzF0=|>eI0kZnH0hWI1WY!E z9JMrX=-epBaAgxd&}Jt+caHa6v|8m;1b20y4*ePvQlED4Aozd`_d&|PxUEW+ET`M< z^%$r;o^Ip}9)X07e)qx0qN z*Uw#-QEHa&RqxV&{hXuo(`4$WcgO0upNHL^+SJ!@Zq-F}SM`VA{ocRBO*SP9l}Y`L z{{Zi6<03Wt<7d|RopDHo6z?7l8BC{~34xws1X0u0*j{R-o9|U$ekl6S9oqf?gOY6+N^*yqG z%f$F>ElP8=@904AA_z14Qb?=MUWZI?RlB|GpKN9A+j^G!G|uPW(_a{mEu1)&0p82t zXI}tz8O3|dOGBX?5Mn&Di1Ppl*Q^O09s=g7*GG#$NnYVtr3$K)H<&u=b%@tw6jPqu zs;rOuIh>3Wp7fZj%t7(lO~*d1OVc&l$r}l-=)o@#=KxI(7JXY=JH*cUoyEtpUj4teR^dNHdJc)QQ$a z#$;8JX;qbfvAeF2cfWD_hQ&3ij_-H$W1#(=lnahhgO;PO?UTK)t2t|?bJK6C-KwPN zt6r7-b?NN#5wYmgN_+JB`g&6P%lC{ucxx6j+7chB4T^3<)ubfr;J^wC9s zZfnom-^)EGGuLA+?VMFs?ImjJtL$*jnlJVvEp_~MneH}ua;~dAVKhpomQ6*{Cxw#0 zIsIwtp@JtcHfviDTdDfBJNzJ>8K0{5Z)xM_<+DjK)#@-y@fggzaoJ zt=*dqSZ`K(olv!Eq^b`SDc?auc90J}WA7Nyaoev@-OBVT6s-#%M>;pGE*iSAb*p1nj< z@Tf^1VCz=G@jSl&0Px!tRZk9zX`^z#N^7r+lCUx!C$Gcbv1#*J`rSWQ&ty}nq$jx& z2$_u_p8o(sU`XnJwIgCE2{>-DOq(F3YjDCV1y|xVDSt=}&_qo<%+8}sh$Nkx$=saI zv6jT}gl_L=uAk%K)8DqWFS|(OV4k358VCljc#>nKB*y7Nnzx4Y3hDI^3>`lI0JZ9C z(^Hw-TB?}u{{YSYTMtcHS5qm}6Z+9UT_#8Nz2{>0DWx7YE2YNb@cCi&yej{vQ0cE@}S9>Uu9PyTry@3Y8U&_C!enJ;%KUem&#*qN(q} zOFmgTlRNu|*39GwQchE$8jUCWNmIWcUH#49j&tqSe+`yFwNKQdoVTmafe0DvsdQ#$Ua3bRG(?usQ zD_OG?QEtsHCR&$E|x`8yrsq}E5(%SOv0#%js|WAKs7o~ zT%`9$*SA5lyVJE+trn>>Y+fp=il=XF=ulvX9OTic%aS=|mp~b=6u78&0Is<7bP0uZ zoVtkuWM`l3Y|X?B>8`KR4EhBr0+HjrB<~$&eWD9x$kDrB0k8>A_N1#%UPp(J^Zezs zNkt7*R~`AN_v3^PL-F=()m2qhROUDQ{vL6zn>I+N*7#;WiP^w+yZfHYAxxf`#MO_ z4L}-o6XXV7X(wLy*w6b@@W>9D?4>Ov4t5Bj9HSs4jDY}Wq@5sUbl9Sj4dyG(29P7C zc#~FVE|OqRL$}TMc$_sW+@im?oTaahpT9p0zEL`<;#1?MzW)FZ-=u7w_jh(bGdm`> z^&pyio&Nw}`(8T|w=E)}Gv_FofG(_=Nm>+JX|-PhOsf5)%q+qP+{hjnj3GoOhBo|=v6KJk`gVV4`bo4R+q z^Wz_p(_$3i?+TOR_4?1PK8N4s@Q(qo?c9pMf_ed#e#*1<0UpwHJ7Hl%u^ry-yU;yv z_oqWM8BMV!lG1`sqyiIUp(2Rn!n7L5z+@M0-)-Q618Jvk~_Vi%m`1{{U`&69PJT z{{V*5+&yZ*@9a2<_9Mg(@TJe@qW1|~8``=1ej{|AYp{H)rc)zY$MG>U%u?J{vu?P31J! zr90d7M{!Nl2_!u~1H-3|T2D+lR)sDelA*c9nHq46lfcYQMyS^?B$eRN>koZPg9Bw8^3f4eg6QH4BX{3@ad=a z=fryP@cZ-GiQ|UK8D$e6W_r{>j+@y>fj<4CD^o>V=4#7bJ)RlQ@Y8H1&HcS{jWvNA zla}mEM|(-zM<`Jg(%BnW;{Xrqy6cf?|0ya+IMUA$FPsjylm#B zMd9le{k*dg*YSmemCq5=mtnty4R$GpHTJKYY0NCQ!zf(Xha1N?uH{t&o8so zbnCX2bAM-t`S$EXt=-xwrjOR~Jpl06Vb+-c07u8!^}I5O-pj@#_-(QuZ~MRVY?`i& z42@$@4y5%YiGiM(%=`m#;M6;&L5V4iPlQHM8Iuzkd8PYw>G*!={67f=J=(vk{eN-K zU3O~YrByxJYWNBFl7D`}Q&FO=@9OoB&z$GeXZOx2by77ZiYV?Rl73!`T&K={8tl%8 zsDHG70kJg0L!1&uQztTiy993e2p%!E{{UnAzq@4VR;EL`Gx8t>uOf?46wv<@Dk2*Y+##{69Y57?odfZQB$+ZE?i!mATvywOXwdx>XdBRXbgp z+J&Cd#By!hKxuAn&6qiL%4cTO-dmM(TH@Z;){StGZuJ^erbzXUTEGN}nK2U^KBS!# zEq11~lDVv4RI_($qC6v)RJbEa*C2c{E{WWz-L9HNR3S&Bpg{istHb#0D)*LXIaBr1 z^n{QEC)Zdq0WkwpCsIozhm|Ycr`OBRKgK-Ap3U6SHPAwfaC^&d*TUlFDW~xK`A3{7 z%M-m{60db2jEVl>1I`XvbR4{^LQ@{Y{{YDS+c(y|5HhP@0pNW0Ax8suLmR|^btVLo zbBLcgpMspg{{Ut8f7P(xwN{?&{{RRXXZbs8$Ni7^zxeF)#yqsgS^oE{dk*Pbm05gc6IiG^aa9)MM{lB~Zow8A(sj8Fj%fRRkxtN0_7zScC(}r11PqpZEIS#Wr05t%Z z$mg~rv0HdFE$jHWHYM{k{JH4=?cR?%Bk8pQ)#R z?;3UfKG|=ot>0@y?&|^VlvTT}429zv>$}RL;7p-r=Sr z)ck)|J`?;#rIV^@^*f&5hwl7#EjoCK{Xgs(6^P!~_MDG_{(H&_qQPiO!kO zgWlFaBRKNP9C?F9X0;-!zzLEE2oC@TWRpEf0$_PRqPFcOs!sZJCO5n|z1Yq?BjYl@f11ZZmjPepr zc}~hwy%ecTitE#$pZ(i7T^F@1<;u*PD$Sb8ETd*>x-OMTDEfyhmhH=`XLx0hpJu~y c^Xml2_C$I5qhvTqPPI;xB0mqlKJCB%*=mNSH2?qr literal 0 HcmV?d00001 diff --git a/docs/io/fits/images/Green.jpg b/docs/io/fits/images/Green.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ed73ad5effe3c0b2f235c7028a7733a8f4e54e9d GIT binary patch literal 33315 zcmbrFWmlX{)2^>!kiiFcmke%$y99?|2@>3bdvJog26uN0?(XhEf;$9<1P#f1Kl?B2 z>ec>E#TiSK={_v+7bX%Q~+iG03iSE|Los7 zApJkD|1bO32cScM6Ho#OL<{^I0x16rjPk#T|1ZV=4g@eF5;6(|1fZh*cdr8l07Nhn zA`%1z2@wSe1qlFw5pfWZXaHn9E=gQ`brTd?I_E$FdTySt1^w3$xRiAL{4RrxhUN__ zp_b`eGZ$C4pkN~N#KHzfUOtPYrs#BeuVYGv zgy_uqlWK^rwjEs{&NoDF03!4?fD;j%^gHKKymPc$0*7#{_lGkInw1F42I+M}8){6$ zB&P%)-3%6EnD_TKfj<&^<3ZG{rY13$oP~2YuZ*R#t^RePf5Ute+ z%Q4$YaIy$jBV5X_OC(5B=I^|hhtnm}mIM%aJcqkPST23f%#RZV0?9??&PNcMaHD3r zmPYgon&_P zf5J=1;gp!8xgQW^$bzHmmv|by8@1!4_jnzV@6aK9I}t9xDt5fj%gZ_OB$Sxx^kpFB z8^@SSy}xbklkZ`fdk-Icx-tOU@SCjhM264DHl%7DsqYaV*PdT^>f8&$YZ|Eglm2F$ zW_@h>2XOrLA+}snNB;avG~sTy&$W&Yd*42sLU>59^BRkEdEiAa8mnimDuusq92sD)Qy+17G1J~-*FPqxb2}afYjMB-!?Zsde|gKICXLgzHm)N={QOfrM8xxxwzso!y;|MC=8hQOFA{A3K2`y~ zllfqAh?lFsMYluClG;#N&BD3geB_ zrw3PAxAm-b^Kx}xc&gST3pjj2u+GQv6v6v1^DH&ndC)p3`kS|uV<Tc)61BG z5RvX%Jd{F)=T&lKLh5P+I*Q7%77=cHmj02?CKi#qEH+b1VTSdH8<_`ETSf?)Blbel z(HGDSSQ<%npd74oPp}j^RQpqr5?rqqZuTvIg?SE<+-+ZCk+beYsZOM6@)QRI!muo~|NEyE@d&_! zp9QgRD2gqA9YE^^jF0!o{ZbL7sY)7Anryc>uTAPqeQBx%d(3K#IuFlvkck(XmNO&P zHre*Fs1s-CVc(6%uTUErKSAo|v%0e0@JcR}iX5y@7<`vX2|h9fm42{Wv$Utk%k^b7 z=Nu9o8K5$jYX5?p`PQDknOnNVs!w{2ZML$?yD))AfhsGp z6%c1IJ@73Dp{cR?FtZ?oU8TJUe>|y#iI2)G!n*g1-)BDjb!{WM&K8EDQ{;MPYUNi0L&sI0&s-{?u2vp%EzG$8Nu0c^B}Y69_9H-XagN zt%5jXtcMy2`#}7B=}v=Bhr_d8o^-8c+D3^b|K91j+KUF=JE^9^lp%$|4j3R$maI?_ zPv^RP`FTLbDQ>jvM7xjqA0YZl->|EuEXT;lZ5;j$voia>;)H+K)y}a<$xQWe&4P&KE*sY~!ODIyH=~NO^hFp#2E%uYFg5-^q$lbX~9- zSOPEs@O>idhZ~8tg)~1*QSEK8jV&f&e}GYnP=B%IG=YP6;^Bf{~41OL&&JzxaCT6w!uYtOb~?gY3?>bCZdV=n~dM z)cXHA-oq$GzkeTHHy&)n{y=>hAMfcau0(*rfQb-5b^fP2_MDa?!e<}lTN~AXAMbXJ z83G=9xQ)0C-0)eKpKWfrslH~K&S8Aq0W;j zu~7oOT3&!Nt4b>7V@S?gFQiEz$t8qA z6UbyCM{Js?P!#2aw+eL%3Z(_|WpkI&>FjgAVf)x#&E^SX2+f`U+#*>O7U&ZejJDH4 z6c zhhjp^>+}!s-ROMMF~Lx#W^3r1UpfA?y+No3MP?c9f1K~mXeEDK&+|c^Eu|K+P9yqq zZA$@sZYhEe%>l$@9r+usC|E~2x+uqCqW&$j-<&*buN#6ANf3Q>gL}C!wi7I$!1Doi zZOOfz)tAiPrN!ulcTN)F?f zjVt+?#jp}~oPZf4DVBs-C-V|QzzznfPpHR(m1?+o!rd6EU*Pvno0rDfFe#8E$GV0k z9Qj`0qZ7fI42mj=fLL&U^?^6HMtLB&%+Y}HUKWlez~30%CJHAi|6xnV0WGIt?T(-W zf-quT{m4(tn}uI4l`7cZA#|5mh5V69J}+h6kAiDutD>SDQE5HzgnD9Y^KhK=`?i}P zmg;f|nH)=#Ffua0yHaf&5ty0#CCU%&T6t25b;F|~_)wbHy<(rl@DlRa zRK%DW5Kgi`@Tuyr>E4gWg<%;5b>wN90G7>VD%S^;;5FRDBA5x`RDM1`BdHon(vqAo zKLsQro!TX6N6p%Dum3D|5N_k`?NxphpM!{qXr~YwA!{XN6I8Fv&B;w`%BnETiHWPS z=)UCD4Zx`JxPc&QXHZPvagFG{sWK?UI1DxsZznM}(4F=id=q*C9vGh@#0i1pJUy)B z#35X#k9BiJxKo!|s9{lsw<;m7=7(`b5fAV0s$%@$asKACT7a1z%vO(l0ANsV1ogmM zi{h;Sq*0ST&l8c?Wk^JvJHFb5miK<@I~AkjG^a0HbHRY&sWFKN2;73?&QUe;@SM^4c)sX@L+IBQhDr29=(~%knVkdqzWXQ{1X_1}_ zO8!a$O$6Vl#mg#uMbsc`G`3XR@+w1IUl9-~G!}5iikba@m?*HvTZODUpOKrI@GEIP zboXS<+xqjaVh+7rG9#MH>U(;=+E5n{)e7bg0D(6Ww$Xpay%&NR{wMBvrDjV!C605B zI6#8jtXz;*jqhFBq?O=T?84uU^?Pg+dC-NCVSHh=&wOH@C^Py@C+-E&mN0QNpD#Y? zcD^zh5TZH|o|u_>(OsE;!^nW4K#>gQLN1 zH`mJn*w!-I9*sjh_|9lBUWB@4py&Yf_&sdKD(MY7fdy#-es=LUUNhD6*e~04<86yv zz(lHF7=(C9h{hl3VtYbf1_|Wx8L&L|V9%?dRLsr5J(A0n`36NX9 zovfU+3-sWANjzHUTys%lI+@YaTJT`bey5N!d+gHwL&R+6D@_KPKQl9%8e)zO+|aKV z-eV}(&Gv;|uec&sP$bC`)e_gbVoMILLcgscQKS1(Svyes3#AIF5;4tgY)LKx++r5} zDKvV63w$i;L0$=Ou+Y|7Vj9*sw}J%BL9V;4-=?fn{#}923W-V^xDkn@ZJf3*{=P-T z+sQUyS$8jz6iJy>e$5a|@`6WJQo$ydyqIm7)$$38bmC`wpJE%e6&gI%t71%pSGao0 z#Z$64f3gC*N*bE@)Hbd(VJPaMtK>-hq0;T`yjE-=oot$Q)(w-GzjI@}Od+4=J`?Q` zEX>~}o{vD&Emub_!T>A+WTJ^?H0OMsU?sOwA`FURXt)uxak#PE?B^_6R@1fBU{jc= z`0w}S&Q7&oj(ogv%>?D&u=4F6md0SK6Xh?Fdxm~3S2S_9%OoR1dZtORP%qLHmvP0k zwk*0Ir#Epf=TWoS{V+$;u4c8YY>tH=OmjVX91m@9RVMDneG$UuA9H!c^y|xIt1S<3 zgCxjJHH;^`BcuPp}Dk5!G?1( zMMe{}qmkgd#?nNepo2v(_qyAEKf2LI5zFzrPgzUUM1;5p81%kNYc>5+#Mk~0m>uYk zBi^%mXX!hremupJW%aD%r(;@8k=G`7Tl6Klp7gg-8rA3IVaN~%vKI!z_r*Sc3j6se zOrt+>e@T2O{@8DbeYslhg9y#A@8Or|%W+-b*o_LyAMlKar0CmfkTYarm@@CZ-L1 z72<~y&2O#^!Io_06;kl6eWX!zSKIDzD2YjIXWK7Wr9GwuVH9X)ssVW_+FI3Yrp8me zCU$oiHM}X4>dcBIf(b&87*-~rbC)G)E*wf~A;=M7YH@p~DSa5!KABO=nWfKW4Y`nJ zBx78@y$C~iz;hZW9#zECf~(YDd}&D|BEV|RBPiWlSYR;e%@7mX`!KS)Zdq9J2UF2N zdYH!ak@o??l5fz-#BLV@wO;w6g&Zjz$#Al0yeTnRABq!XkqtS}mht)@;IvqqTGr8k z_aR?}I-^VjEv1@Cec~t_#(;{FD|<=Ru*lIwMx#`*fshO#E$&#pR8#BB{z>XLRfTLbO%;l>=LT2!}aQ z^&9J^%1I#ahJ;qCHe))6ZV%M~Tz*JXaiK)>H{7VL4#^%TWveIgin-$Z5AJ6}9{E!= z?^IXsXifN5(9LY7Nn35^UQq0*T17SsMmorCP!ID$^du0oj|QV%>#Noqr73m;0ToUb zcZ0)hw(C94cXxRV+Eb~PBmtNOHwNQFe}NPQO=6e-ius7-VKq&Hb*gmF<1K1z`{B9Q zg_BzO|R>raCB&!X%uZfX&-$u9g=OIG#By?KJ;EU ztp-X;f`PEXz2;nXZGsjN9{W{>>;9`f)>d!ypTn9CH(^i^fbkD7md-(H!uY*I${*V& zufRxhR&;^Rkd0n`KN=q~-|TKn6EZV578^r1lpmQ1>R9RAEHMhrHHD3HrVr7@6h(7_ z!W;PO)d>CpFdsP~q{Kn2IeRV8bDj_&;G@Iqh_#5``eyoxuUEu=H1Llv_js?Wqw(M} zaAkEi+Z%uEh@s9C0J$C$IAYr_3+>{vUy?Qn>LI7%Kb{TkY|y8xI!l}N`NFp&w!GT& zerL_c4Y{b7t6qzp4G|FD>Y+*zT(czS97O#1ImSD|<=ffi4TwIwM1ejDnL8<-2@WMX zQ-RO-)8!pp%uF8LR)pkQH7E!w+n^>_cvpD;vVqyWKULjWDHAsbeJ|0#A^ugFwNaG}2k6z0F8So&IT-l*Ck3urFNdNlq0 z$Uv+_Y)W6g6WzGZSq88EN$Trsxcq*jeetw#p?IO`D6N-N!xxXMEm%kO}Mv71y`=L!N-&ij#0uR%?3hZ}FFCJ`(?yqSQWE@U2X zd<_aN@2uKPp=du%ClBpm5`H60i9vntJsc$^N|gou9wn{`lcjvl?u=8tyGSR2+G_v8 zn7Ge(xd{zZDoBjsO*(8ljvj$Hk>tm8q3# zZdT(0b6Vw_+nyqUCY$e>fo+er@{JlvLnafZ!vmOB*LO|n6c#S!row<~cz*NlVtw`EzV+M z?{g9!@FO{t@_~35d0;+Yt+#?s+?fZ=PIL?xw1TL`&t1kAJ+vze9~haUWZu zfZ$pr+qBDUtXPCDjDKv$=Y+927>)d{k4_1ZmYb6qGmxB>s|YwZ`JfX@=V8`IX` zu|xO6oF>Pw7nG0G;Z8EtXM%GP4CZ+q!^YYLNJFKS7@!CRKS1`>lr$1{r*bF_cqX0! zG$>QBeMx|7r1{t(l8a_lR7T9j`J>fBI%Rp;@8d@BqBZ50pS0=?tCk|pd#GQ@Eem%O zTrO?t;C$g$U)R?un0c`bpX-hXhDp!|0TlmM@sZ)Tkib~k%lYs=CRtmbrs(vs()Weq z+kaD~Zw{^|`Xbs2BWOMKvfU_{c5|W~rDgxPK4OM8F8v(F(yDR&7Uz%d)<_e997h^v zCoVcb6%hn8L1t(XO`bw%8bb)IZTZDHr0+!e@iQp@)4+J}=5+I?R&bMNfCx&Pbg=VN zO)M05Ck%=bI4X_;{U$1#zme^n>hN9YVsnaEf*3#vSW#Qv3?rt2j&kx9Eqz9Tl7@A; z`J<@ma=rZ?hS?n}x=q!8-UzNj^l`8pi;x0DYnDB;?D$H zo0ncumZ#Y42GA+74iGT%?3!j`<~>$wU(YNfHeAw#gp?5$^KC3*#h^jqa9sF`qUF^` z-NZ-|oVU1U&c=*fZ>X@plZB9EZ=9CD9~HJTjFj)ts5*2%zN`4}X~k8@?r}!!uuAhu z`F5+jz6<$KAbh6qo=H`BUyHj0dAa+!7xh>>)x-0Ei@;sIJa1Z?E@|9@TQMc3-!vsi zibgiqsH0TAM7nrm9&3MM?bfB;O>rhxVRe4QLmw_MKPJIV)dy;vAGk0qY2cNsW#kUk zvR0MFNmobC*3h)_{2Kocz`%F9uE{MW%C!~YH&I`WQiT`M#mo$!T*AjB2jAo$fu$3- zN)9Hq_eQv;@@beX+{+-DADCl&agoF>rq_mac$}M+M6+ZntWZag;X0k4FC z(SC?nSDM7tFTwq1Bi{DGz!FOf@6#n6q33dxIDrv5aczC{^W`K0{KE3`F(*SwXkfDu zDmgl-;Ihs)rSFtf%nyg`)Y6(bDFA{_S)-aNO~po0zn0Yew8Aik@%5P^xU!zJWnf!r z*6mZQZbP-bC+>$WY;wY1Q ztK_W`>pM1H9j48gS}yx`L7I+UKJ|(HmvZuzYsS#=myK4Os_ydWmp6=R6*6tB{ix%y zxcB=+EU4jEW0p|ljay9jh8EKtRAb{Zh{PC|oCDyXd*@VM$JyKwrxS~dnye`$lG=dS zd0vZD=xnI><`SA9_Z^O8*5icsJuuF_>tO5{G9b?sC8iOe5^JJUnHVU?sB_{!&=Inv z`xmL?IbbbJBLndukwH`7fuSv5zu7A|owUD=oiy&Oh8!A-h-hu~>#t%B>Zp>Igd3X1 zt|R>jtr|Wxc<1z0!8uw{u7P%(ZdSKt&4z&73Njv;d@`X&1c{4 zAVyR0wPmgSyXbEFv8+!tXU`^0_}CU=BCHhUEkQ8TtHs^Tef?^&m4(23s9ywVn{!Ua zmj?^jci0^~86@1~wmF`1`wyj}W1HDfn*MtHBK>OTgepLTR|g1i-aD$_eEIuaw>l*h z$_K;T5Q?WD30L{AwKmv{Pw}PAWB&9m?WhAx3z>?TGKe@4Fp-_e3)BBh)KEM=C5A&B z^AkM-_&(Z35uVT^0k&?+)dS;;O2)pLBVcZhZh>}FYVb8QXE?0V`SP(J;8z&E$j53; zKQ&oI8phSuozDYCkuhBz`ULz9!PX(%zhwqN)&=}x^Fz9Qa##NVOrMc_Hzh)999+%a zQIt>QhUFhuJg>`zI{|YRi$#kt(oDLYQrN}FFG31L-E1Z~k2$0Z6FF+y&-j&szD>K_ zpN{rqiE7f#C7`tDzpz9CLue#EfB%Rh%$WZkfSQzK>mz16u%F`tk4F~{MG4B$BJT+m z>Z6Q@T)D{&sCxNuW;sdqe3;stXBciW{%akBA~AGIymQBJchO>QhGf~q%64c4KTQt8 zj99gtC)7o3aT~0rV0!_Xd_umM8*)FNQ0l;ZxZ^>vxM<@%Er$(cC}SLv`uqmlG(x42!^uEd|UE0@diSF?Mp1R-Zru_XAY1dYYY&Xlc}Ns-mrJ{m9!66@kRa zJ0}xv?(4T1i+4MtV`egOWI8|*2ul2^_%Wo9+ zXZ_6YP?>N85Pi}!b9p9(@d-#3hr#iIxsnrc`#A+~rc`L2e+NGQZLjmWJ-(&E%bdo} zC9EVa;)DDYHud|Dvl^60e zOug9kc+?S+7~g>F-g4<;cWv?V)eJ*v zw)u!w4M$FimbU%6F*@3Nrer$3NK?)YZ5pqTE{Z;!f57VdYb~x?_V?Uz`UZT%pw@l)jA37^du z#$Wu;H!?ou8Rq?^Mos{{q@whjwfgmKwW_!j?!ey1va4HdGN7P9S)wIZQuJ2ibiLdz zppP<~(-EcC*R>>}94sS^fl~f^Ie_|UjyqpWKQM``5rVa1RL!>c$M)h+b-Il(0&hML z;5;Y!lG#^mMr_B4vRWhg>zAizcWe~h2-lAi(dMd^UN-LKAt@t!M6S<#FWEv}V%8>N zW4$`mCRHn)>$3yHZyWCmKwl7^oF_&qLvegsF0Ha!lFOIsKM{mDZhuYxbHv-Ag8&9w zk|K{mY|FKT|G3uI`+dvXX)OpN)@><&3OM@*u(dZ(jZj|A3c}Qw+i@#id8JfXq>J%L*`%e#7^{=Ksw6o>^lJ2oU&` z)8zF^x{MB9=yVtJ4ZO)m2gvR#Mb{^kkzQ0zNK_#VlNd;Y}qs5laF zf~gkG`g8;Xu@qKqUYgAG4{#Qg3jegRfcAsk(%9D4A>2GqHooZhR_`ME>)cRq>1oQ( zHoOdiRmNN9Y+RM)92MXL(j|55y1O=C3ZK-;S~?X+-a~7_=XwLz!qkbQgR8Ueq_N!G zk(`BzSzV-xxJ;!t$H^1bf^keeX~;HebRh?5k+G7&b&3O>`bHX{^_HI?wKAu_DZ5ahC5Cgx${OW*2;i09bjH`8zP~gR%`EdSB#vlM zv9ed-?4@S4d^7dL^*5G(D(wxGtT8tzq!H8`19qd$h{pm(Z|D#upW3X%2osAoEmlem z;5vcym$J&M&3H_>!78j3m2%*CDfi<}CdA507Jf0o50Q)`BydfxY?)ZZrqHt(9p$>> zkP<1iY^Oj7v;e!ZM5WK*gc@wq;De|WH5TGdd)~5RH&k{B%@6jpypJnql5(44iNgE= zew8uPx1+vuUSh^nZXlc^RTjvwt58NxOl5W4>Zc=>?{#2x>donfkpxErA`Iqz&FZW) zvV_m{$$G{Y;XW(W*fsMZ0z5zHb-5C``aZ@O3tLK<`Cb#DVA_y0vTNzi<6)?Ys1bxQ z`aPKc?ZY7tAJOarAQ@;l;0tPKfYqy0a%&ZnOG!DiYh z$c@%;m1cQ5)10abT(4-0A-hJ59j6aTy&XwBs;WFpB?}5&Y#-kUww@wfaZ=1&O(7d` zY;7w!o|m*4y%mc~Fsdq`X?K0%Mo6yF3v!A1jq74*h4_J%WGY`*laID)Lk0zG&3>5PFc{@q;RALehRf2R(b#uW)DfIP z(ohkvHCb}C=x9;#pT~C-GsT2+63T`l4u5_MneRefeH7+sMy7Ile+O4Ja;kkXXh7FV z)HSBSQA-B5Qhh?i0H~gAM1tc(o73iJPyPY^f4_>t4A>)ts5>pUwi3P_%#J-cD0C$T zdus>Bcpc}~OSz3}lql!<>eU}&Q^yBSZJf#&J zXP)y?RIZn)cfoyA{*YW5q|Hs!H684{Wb2sEVAQ$j{Y)iXWIdEBC@x3jVx=~AgxgT+ zif~ZMa`BD$o;ruSxV6w+y6QvY!!D`U+dSUEd3B!C2Bzu}FCL8y#ge0Qle|vcqmCeG zZlm|6#^v}tRR8q~b-1=q4x(kH%T+q6G9L6e8?SQc5Ri?Fn;?uQhUuI{jTl@fVl&^;mOJlP?Wvk<6V zEE-)hYLA^lVwXlI%_!!k^r0%|cFMPLiq*WCH;IEsD^aStVk-Q0?>g7l8rtuV%0OuN zuvzuBE84LmB}7mFbHDZHl}@6{`;E`sWa}>9DcqvrYqXP_+=>bnv0aT6JEzuX$>cO0 z722KgiA=`>KQtC7qrEwm7~d|04hbUuyeXtLq2rcuJ7l`kC^1i#An($w)s62cv1{eB z2Yj=sAHz#$0(QAQ;IuFw*mwLI^?YmDZ@f%09e3k+;o`xVERLBJjQZn|sAX;D*%^y~tqccoQe@Dgm$uE9JAvM2wQsXxMNMOxy;vCev-QO>C06vl;)89H?dueqFX*E); z>nyB05`|PpX_C^6CSnHnw~*ubRQ+4K8eL_^sKi>o4cmfjxT#cLw%^8Xa5#3UfL?dhN7d{sFnX`MP?6yw=wsqmK~-(Qgr4 zCD1t+U8KDJ0eb;9(KpVvf<6`M%C@&nq;q#Pz?G(t>&8 ziKy3wK=%Z+&jBklcrNjfUORAs5Un{p!g-C=d zXqWH~-&TK3$FKpb3K&sravt{3>3p55k$#afQv68DgoGn?1to-g$92S#$<|19vCfmL z4P5u}P7syP3Hl!HC7hT<$iln>_S3Q6$(0cvJlUG^MVCr!SYI5ieR02ES&J?gM9yqP zLC3*&zAzmg$(f!^i)X<@UiGFZY6;@lyi)-SdydIMzf~RHZiujorv3^IZ{N-}GhgRS zLAL~J*Pv{Va-2DYd{pP`wWOt`KEI^f0eG?8g@y)Z@CB9AZcP6#Jm1SS*9rmV z>VE)}^5U0S`#V=`F)H!uK0Axw-BIj$a(CzfwZ9Z++ma@y8FN%Ixm9CIXtn3_Y3b9EzvKe8A~$K%bs#SID)F+L04 zONsr8Z@eji2&SH3Ru3X9pXY|5_9ILU|0Fl7?+#NI3~&5!uu%REN$0xWhgyrmw_e2* zjzi&z<*o|bB?vD8a|QA-IcI@ix3?i}5w1j4deZc%B%~qEc`~crvdFM`dh%P_GEC5_ z=lZ4iTv0)!$HI?a`aO}RozU`Ac9&Xh>cp)_e$jykCA2q42xN!qubniN!mVBur4avn zjcb%-i(A3armOp~e}Fl`?`M1khvB2#BzNqj>_nu*1Fp*3y37s@S@^unlq$vcxJ$Ee z-nqAzyZB!t_SRz)gOl!t^+V56n>R{FJS1(ET;9Kt&6o|XtGDJQK4tFG5Ct|4>!$U&1JF`oy4=>YJ$-VL(z6(6c`9;=|kUo9MlK^Aw;l@HdI z2l=VSpxw<}ce6o=w2+6E+%E+zmd?Tf9#7y7f_xTt2oJ z0phoJ@xgqV5ia{5y{3P-6HZc3uI*no$op}=xZPm={7aP%*(Gi%3Qtuj!jAr9ZGqTl z5}Iu;Ko=yRZb?(TkK%yjP6|Q4tJV6%$kF(B%-7BwDr2F;JxN3530{m=V@m04U>t!; z^=+gJYR0#6?V2DWOXJDFxeIBuFMS(pxG|~)0^B?D1vQ~ay?ndhDC~TZFw~mGuLpB% z%f$HfbUErk6ZyPzZY9z^*(0^6i&>4JU__4QrR&L~TwZHt?%fz__!!!M5Pg4^+S__p z)yx+8TrowNjuGFgI-=_s^?9MC~(5wW|?o^aj|(X$_NnG)^uEg1l%tO#}ImZods4bknoz zujC!>bxk7_?5f73(Z+?L>cf6!w5SC5S|y`$M8;?z(Of)Kxs54V1sce}(+tX5cKT$Y<79iKfgKVpEi8>fg@~uf?5>jWs<*_PwBGc4AU7#negeTv&?AO6l&ph zCWP#2WPL`Q0MsFEw3Bb_tQRPr6x*E(Fnj>hvgsSwn<^(qj9r<@E7l_eQEokDTU?od zMFK`qCgv0ejJ#GUuOS$2D97vc9KEF7+k?ws%+NGKKT~*Eug!I7t zfx%!5Yl)r98IUSX!kk@-sneZRJu-qBXzWK@G_p!=E&?B#uChAvfB7U_Vn5l?r*hnpUCp; zW8mc*=vF9A!+t@dZ&v~0L7HPouha)5PqpotoHK?>U>G_2V#eklHAv!ixF(P2o}^AG z50vL%S}CNO3c93?vfL`Ly%c(QnJG57Jmo|F)o=h(jdBuQX7TDr0cD&NP`5k=4!;+? zP`b_|pZU{qj=iq(;W4qvNS~5DYPZHJI!jvcajpZYo|vBJ3RI@)0dJCuPv=twOh!ju zJjei$DwhpR$vWbM2hXx<1;%CK3 z>SDjnLd&IegX<%)oYM`^u2w`@YNckl8dcUO%P(Iy5SnI>TIh&PqOHTQNWd?rX){4Y z5qxqq56ADU;;F{4@zwgXq2|BFKn;t%@eNyHFx|afiT~iOnd%o-5j)x3@bLD>-f;f9 ziWKh2G!JwibO)RtP>*Y4z|peHbJgj5K=cFYZ3vb2N-xb-{i?dXMGL|1qqrZKjn$V^ z8pnBPmdc`j^V`^&SwGy=kTca$U#QKmD)F3V%IZSWUH}J!^E|-LH7?`78iifNuF8@q z`dhxQ%OAULax%`>G71+$M_}hat!&wfwX?=y0!%-qBxHyhN5U-SEOa!S5L80;p{bHY z&pKon&JE*2jhqTYqH-gk5}>LAPCo$U%PqYf9Gz0W6UwH%jG1l27Q_k+3|j#zm>Y>m zg5uo>J=8PgncG^IQh0|vYoVuEN@?Sai89Ew8Tc6hHUC6>kAs*Z;(CL@cU7wgt}kAC zzux?v?9l3LZ7j|j^eO8mHAHW9Ejipk?=r>vCX-d%ldO{chS7&t-?J<&bS9=D~@9*^I{xC{Y!YI>?)2`#ntv;|V9uZm9w<_K-Hu5n{w%-JO@bquJ8drx&& z3K?`iX-ZA*6$tj3M1Ll4j_Yx!P9&$W-ioJ`Wo;+67D~vZpdjXpLb3H=|?4m!T5*b3eJu)JbiXrLlNs)E7@_w*b5hP*nw-xzB{ZeQ5$5}&phfWm7! zMQbrNVaRYejQ2V*@UL%n@VNR>buyvXCfvQGNAs2WSubfCJIP?k6}PvO-xBsHexVRXTFu2r~)t-1xFEH3n=17-jOs+H%Hq7D3 z9CKKZREl%ndzba)TFi54I`qei#e7F)2X|_9Mqze(rb9PLNrf<(`&LwDL<2*`;F&@t zEoK#F`QK!YDRUAMruzJV*M6y#J8&egmtqwCZ89`eQn=@gMhm_%OliDCX7NZM?fGU- z#=0+qJB^(u#?02pC`gbAG@Fjj$=eh53sR`&C@acPtk!T^tPx3`VMpX{mCR)sp09J@ zMGX>)y(oD1qNrBuOnW=VZtWAM(^w{4Suc9qg@@YJ%=;te;=2s47syaC#l86XR{HP5 z?|!IFa6oWu1-PVS5bc&mvqx!>{mw!n%N;()-*Pbya<<$2U8-Wi?=6vP+=5UFd5TvbJvGqaa7L$_s)`sy6G6tV-PYrh4 zPqk#*nKa=QpF+ts-hN!Ztko+~l}eRL;m<9aolKrFq_NHX#nR3=$e`=|wJdwvcFhB~ zx|>HZ&@o4B5tc&0hdZA@}s!njFD{G52Vi}k;~JG2D(qfN@jxU4NqV1 z=7p3~(2D%s^}UK=8YvnE8%#UwgpJ>K}^Nd3X=(G9Kq=I^bHclRYL{OkG9>+L;xYi$lC3# zn?D*`GY;-}95{vrS}=e?Wt6wvgiKh{ICZaO*EU$BfV=@EYspWmXvP9pMW~l=5ghzA zH!b-OIos74pD&VsvO!Gy6MXhtjmVPn`3HW1Vi3MWDohqKc77j1)_`O88|_dIdPxSg z`OOS78B9p4vrt4SMG*z=Vgw=<+eK6yM5HQBYZ zm9Z;Sjzdc-UtVi2+gR$$L5Y;#DT^pGiIxr<=tb0h_L$A5>88Qcy~XR~kC@T($5JV1 z&)3ImEZ<)XO|7Pb(anRGlwKBUN3%1d(#`K22Kjf5vdw;c7rS3mIU9Dl z$)$G2Y$eP6^_IJkgMjKbG(Nr{?$8Pq8DAq6M7UGEWq%{S-@7JOc6PxPvExTTS6vB< z2`)284DyXC`W+S|^wPM{!rEp-Yl|e%Z9Rm-+HkL{?j$oaxE9fja`3sa(wX>PiV-NC zdua>4zQkmElSYVQCy9QMQHA0s*<9*{SsR@~4Nglse8;P7CZ}1a09acPVMb zoJcm%YPcmdz;{hv#3!qLC7)JD22DrE&<5Wmn8IU;4QtO@2`|xFy9MF zRB*Y5RB9<%TB1%QPrt_p#dr<;HyISEA-r>KT(mA~r1V$`i<8e4T}=cE#{-3=CzREx zIaU*#v}atM{X8+t1!H$by-OHSOOsI&nU-X93uL~nfBkT$zj>O5E?V3LD@;{9WGG_s z+8*{#1wNpaSgSAy(kl)nB|G$`a3^1~fG9r1!u|6NAnyn^rRYvEac&#Sv`rmZxZkm5 zP0UZ@R(??53JMu1r5au8h&vAA)YjL9=AOwU5fY|Xd5R?wh7?=|7HDp%dms3O$`S)O zKLD$}DGrdjU6WnZ=WlBeE||gqoa3b`i6qmCQUYlh{&D86?i_*hdtz>r`c;Tyaq z1wI0Bbim=C5XrRp^?onngv!qnQDLh&g-IAh`GKVd2p$KM=Dv087jO#FuaQV5*NSHf zq>@EP0jmwinV7{THNV%M!K1KoZ~z?jRo7cFxbijV6kd(N36>Rhzs?N`Bi#)w5W@f<| zk(y&=5ERB^|OB>{M&K~SLW+yzjFHVzpLwns(X zEw1RuyU#0{$nNdvyYs?{KJv?ErxH|il(M9A6l4)2z9dWL6B!KCd6@5@2!+IK7$xT9)7^6Z*Y&*{Ml}`D&?tWpogL1V@ohqqf^1%tlKyPXVECuA09;5;P0Cy4O z`8!YqY%9xrF9hXyO|LZJda0=drPz$Ku#H}*q<9tDOaN&vc5c7{nACF|$1%#;=JLtR7laAejy?Ne1aiv( zsP)swMhj<|^qUSWLI-PYf;cgIHxSE)PC5wi7YjkAQ3}x6GpHrD!OcKt3<^IH@TUig znpuz^6g3cle(~6A4-u%a1#=xh2Fb=gy90*MgLu)!hud!`Jz4ORiloB;l8R1FA);;( zMF*%OP1e5(yIC|b2UBUi^~`wl_v@9ELl@bu#Xa8k{dm}jfO?c2bRY9uj}Czk`6@lt z>vz9}xG_P*AQz+#AB&E!Wf0-T7pK%})-UVGbndda1NZ@-#B_IX)I>CWC>CCvov~nm zHrRG4z;gnvD>t+k~iY02M?oN33O3>FHE9oF(D$UZP1hM242hTvQo5U_gyJV6NsL@e(t3> zI4IwDArbQipw+Cuq=!m`Pv+9wsU#HvbS?rSWHQ;38Bs8Qc-N+47fpBMY14UCEYV2$ zn#E|ra!DVP#$inIE#mSF;!8S+t@bJEWCX02?&TvoR#H^UKWHAP_D?c@CaCj0%fvt) z)S=*93E)u~hd^{?J_rwhi?BAi4s?BNn(N+QCruJguL41zK_@ja1j*(qmK2$K1gFwh zj*UWfDN?CQF3B80yX~;LAL$3ui%qoSxKN~Q+fVojUMXFgu?YKe5Fc7|&qm?xQCv=?nM zfmn?m`MA9){8N)CKNm^v}PSN)-zxmm>Sl&_xRS)jU9{DmBssf~4)zW`=r*H-zaW zd%ote8pL71LezIk-%14mcui0IU# z1+iW9=|=J<@`=PozHEGcZ-a<&s>&#?+PEo!k^l&5A(dz%6_kkU-G5@2%va4)$>fKw zB(tVQ8@(w&QAZDN8dq)Fjlslpu2Z|Pz0B?6rC1j%P9F_!0j5MI#%>s&2X~y>CZsf4I2s=|=U^x!^bj>uK zf{0YDHUM$h*j6G_lS zVQC3pBvyiBDm_9}OVc1;m11>>58PHKP?=BylMl5lScg{kttERF7s}|GGDB$qrPKM5D=^kK@#b-$q^@#WN8s(M zlMY!Cq9RY<9(e>%eyji>01OoXqX5R&^iwh*kyS8{sR6j50b)U>swr1&2lhZB3;-@U zhM6!{)2@#e7!a0#lNBDMaPUN z7kYZQ?;}R70m8JDD?roJsq#>W`QknHs4CA;b)U@)={je7x=D)fNv^VKz2g;dNiMXm zTP?e?XjiPp6i8%bsr{EKOS}p>KlWH3N! zU7z0e-^kEI#68voL)x`tqt_;GeC#5Rz=Y^v0*u>nfWt~`zyX^v$OYU7-(W-bzu6y! zxeP_;x2O4o->th2)pmSx_}M~eVlW;Y->=2|d_mrV{9IgMEC;RVY<-P6Q-=;dG?qz` zrCxR+kR1SF6NX-+gh&gB16RODkOi&@HP4-iG0YP|yGCXh6BtwkvuXbT-2;YM3k3nj z(HY&@nnGdls(`KNg0Thv0F)vgamAjVIt8HSqcFI)ZPkXG0bgLh77$^~TzH836q_zA zFh&9Bj(>wSbJRHHXrWM|#kw zKuM_$OD0w`i5QT$;2Ek_DU^T(I0(J1Vm^}>-tBZfczOuAXTpP{j*(ZOyd5Ik@(wtX zXVVERut`qy%Bf11d3AGyX#B-+2b070Z~$8+z_MzpT5Kxg*)BXIJlxU^0*NcO5I&;G zcW?)w4s@VBe|zB|$d;&0HB|&2t^q9F)C(zBs6u#?O1N;y2Y_0wnn4@}07PcNa5mjU zO#wJ+1Nhfau2w~`QUj%NDlNhB0s|c4vv)jn;=ud{mYagoi%SH;Uk2?NCbSS@oCh2A zfdjyPtoySf7YYk;8(Mcmrt8Ue7NeU=CPybbb}>9q7IDR5k{-DU*dQ*KH>-|f+1l^B ze@dIpBvi6=k$Ob41JiZWP6_g)tx=!>fCrGtyw!j}QLxvBx6ejjONkBPymC~XAxKlA zM5QY6yf>XZ)Z-vtX*IYquJORnJO4f zo!z_YvS~7∓Xo;~+vry<&wD)S)$1`!~xdgEj!l-!wW&>1yk*h@h3rCJAJcRHG|Q zx@u?%ijbfj6eM|;?wfi`+%y_!HqFQMpjnWwTQ5v8%TpS5*&<+pWg2NB=ds`L^zR(1 z^xg33MqxDAPGY@?2}Ewd6fQWzZE8#A{{W^S7T-M%0M^deSLl~~jv-X?V@oMI5KAsePjGe`R4(ZB#j?ee1Z zbO|@|jP79}haDPW#X2P8g@jfpkO5EKGNyjlwr9O}O+>O4QuUJKNsbIqDB}(6QcFXz zr6{7jKLHCjbZ;@e5zHo1Yq+5-wRBf?!77DP!H}zE>ZUdM*wt9fsi=`fn<~LUFU7O5& z>P2{oyrDD*3z5wMbkag<1BfRA@*ot+f>x$znLOuj0^)7oP-2w@0;@)@;F3mN(bj+ON$_@g-4h_^s>at#Ux;&5)%Mn zm|nuPq^V3erC|sGCjjtzh_JH&fHjdAOjPT43y;swwAeba*U|_>@do9cl)xJUu zLzD5QWA3jPXcC)%ce1sfX@@9I}04-?TsWfIxlHsohP?N8DF0HLd=I4yD4l2Ec^c2iub(tD)}CGy!?rClPMCsE`R=Akl1dTFGt zS39NTM2t7>%CBtu(E|LU6DmbYeP#923%HF02C?tug<(w(^QA(vd`Q=0e z03SPIS=vctDaPzfCFx6=z`P3?+I2@up9#rx+s(INQosutovBB)Cc0jHr1K#+&Y~nc zPrV*5UkyJ1dw%DtI%yIBTNx#ild~of8n|lUJbyQj{ z&4X-Bf+Gtym}Y>3XdA1zk8t=l#eCGlrU;^p!y^+$04@L;iO%85qT2V4R#`rQJWJzM z5~EH}01^epUNJ=-m}8JZ{%{|k3bZL|hE(S%IxsT5qlme=i@1iGwyYGe%}i>t$r4}_ zqslTw(8A&X4gdpCdgk@0*x)`i@p-ST!UE$YT;A4<&ECQ+EEEw6+Bg$9v38(Mu^uRf z&STw^am-os3n&S~g3b&>s|RNk5H}WQ8t3|OvZ604vfw1b$uM$D(*Sk)sv5~zkaBQ)g>?b{ve)MU?0 z^%kH#qa!hbENlfK1b((WGek7P9Y-+YBK6A1))q`glM17Yf;h|VN#JYif*Mw#u2jrZ zNTq2_EaWmGr7*@(MUxtWfmSXN9lDO(1uC51URnUc%_J5m!!HP9fTMOI>|e6ej!mnu z*OM@&Gz>NfqS?a$G0d(uySje{4Ky1*q=>u}`&qE{!Le~or#II+_XVu0y0tt&mJ)1QR+N<^eu30k8>DhY6v z73;x=_pdhK0Sy2h#@a)nyV|ZpcqE}LP9*_&Db6JGKvK?4#_f+k{8sAXJCN3_?U&S> zjdmAJR2_Kk460QDCd$&yCkkci&Op3UDm65QYAWTEg$cJmDom8`i4r4PD}4NA{!j9C zW&Z$L`Zeg@_w+G1x{^t}=23lD-&tA-ocSHiIOXSDj3t72%9hQ1mYp=wP|G(q890jo zu(R9r1<_wdeHGpdv3G9`?3+m7yUByfqKxv0OlhjkC6r_12J+jpRm4Pn{{TNelpP-Q z8-0$wG6{^jZFf`{YEip$St+~P353w8iegZZfamj6vqb7QFWUjQgEx2Q^7UQ2kLWEXagned0hseBTPw>F-mTPu}}=4fH53dY<>m2V06l~X_*x6Q}~l?vLP zRWms;hQefFuwR>Ds3>WQ?^gk+Y7}t`L8ns5eAKjQ4mx;Z{{Sl#WC@^3a-xj`yCJA3 zpe3y+4NS(=Rrg&myF|^di7OJXO*I6nBRITrjX-KpJ><9y3ErXQfNfY_wxb>@N0y@i z`zrh$vd92NuIcuICxpiU8p0pcGVtd1BA@s@^!!1-bQ{O$Y=(ct9sbRD{ub;7#4i@W zgZoFil;^A4rNRynklu*1duy&u&c;IsMm=aLu6pqhZ9&ER``igHYte&YfIqL~BF|e+ z?&@P#1Ss&f;Kjs%k@YynxjcQ$4PF-5oef;{>Sw40N>buRS*&3(xp=+m1ajBeoK6<~ zL4Rl8K`>t#aDv;a5X(qHWaTMD8hWudaJ-xj+<{Ihol7H}GXOuC_uhX^pQDNsevZnn zv=@7#GVeb{H^EK(S2!&KJFn6OglFd zu}tb_dV`KkLWRrJK`$^z2`Q#AJ(ff9(^y`Dh^c`@phlo3P)gLCt2_(^WAj`iAf?e76j=_DA>$5vNS+k570uj z$XkF?$*yV%#pRw$uIz*XOv-Ajb3jU!L?tNGPW1wL>l7zUlzA=Aj676NlFW%d1k-3R zwhgIn*tPZK9s=q@weTJe*0A_dnE)p+*uY1wQG?p!0Zt>1*+0A4V8=c%*kBNks`!!i zy{?~_&{zjIaT_?@+0=;X-YnrVr81<+m-P`ZAu_0xgtbQF+k$lf-csDvtj~W!AS1vN zOQ&TkGBAoEftHvMMhsRFtOo!-q0g3dvC2t|$VB*8Y;DsJum%%<*RTU*I%>`%n<|u% zogOqXl^Z=ENP`a20n`FIq3RjA2J~{!vYLgyF4*(RY3-3XRdj$4vzLsy+oBv zK?#EdaQ(9YGc}t&S#cUt&GF0x*kK4x8XeaG42{dU1V=`GbrG^y<^upkSYc!axa;7; z4bPW#nZ|6ziGX%7k#>6l*$8Fte5efW<7B>9T@+Auy8!{XuysSVug~jer0dsBw=_hS ztz^{YTe&0`gMw%(I{<3160X#$QbJ>9b5d3uu_y^ClbUfxB{-w-K!y0* zy%SZ#O6DV4=4iLhzu35cF z=Np;O3=#EUXAGNbK+FZiKr^cd4YXy;SrVSy^NV6iHAE4pFiK%cP@KRV9BZJpHMW|& zJva21c4Tu)vmm{bnFONpNM^TQcPd6_n$ND}{#YFPIMt zWh>)&S4!tsthr*tW~e60jV$GJ0&rC?N|k3|cA>n3j!YTQLYCyJ`dg5$Sz%#}-i_#2A{7q$!OitwhI&gE(*38i;{ z6GTe0P);|)3Q|%7lpBh@c{3F1H&rwRfQ?`-0sjC<1C6l!gg_NDdOJ51aD^EFeBFm7BUU z=^h)OGn)}JkR`qer_za}xdL-0YSSAiTQDR^OFl$;Bh#r=szRgv46qi+)}VhVOWu~d zr9BDWPLbsEiS@{m%&(MaQH0uXbd4g#M#vB(0uG%`50y((s8W?ItM|p3>zG=Fyxkq% z7%qaF#0X0wOiHXYQAVmQ?SE{gf>I;rDz{zT)-wkX5n&uZdPKz$EIC|fOb>}yS~I}o zO*!g&AFjQ_u2!s0>clZ+m*G##*<|KKBUT0)4}Z7itX)H^6FdRbj=X#Z!RtfAXg!}T z@37%2D$*=UX`=>A8FzTwu)q&c7(55tlh{SY3^5Ee*RSp4+}=FxC`=GxfQ1^o7&w3i zgIzo=J-blt%p!^r5yJw9VZxzT4)G3REc)g4=leW5BbW4!uJ+$mklfjdevFV4c`qb~ zqzUx4=^>v;MI^xWsBX0NWZ^oi!O}49+vVH2$kKWGA(+fEmw;RLv;C3RPonF?B$>w< zBLrv+D7hgj65$)Vq$x*kxb_eQTdJ1|gAGO4ZApJ>>U*Cpm`#tBBS5RoRU$VJ?uEdC zI{;LXtN6dAui>NVF)WvtcqU4dz*fSQX2wY9X;lpJ7v$mtK@n#R1voSk7-?r0uuNw5 zd-!KOU%h9iZT6Ts5AuDSDV;I0ipISalA*1R7 zBWot?eO@Dpj8mN~G!#HyCMf!k_1bf$py6g4$9twT^~)Dc6yqyD(2TCgtS4X+6R?8= zIIsx&Eg4%kHDPe5i0}3=wQq3OM`{DqYWP1_ZtGy8qn*>WWXCoBL{9UMm%YICs8b5g1`_jTlJ=C0KT8aM`r!!l5X zSwkFqalqy6g$p=CEb`W%^wcdVGGi<;DKQkhO*){a2+BB0Du5nuL8`3iAUyN4tc2;Z zsFp`6gUQDeC^!yMoZKK3qIz-<^}r(H3IH4h+Krgn5i~$~N_I?yNf^obG6VCwOIWV{f>#I2>Q;!hE2s!W<_ojpL z7h!#B`#KLd6w0Ab6UW!UtNn07Oq?jCj-_wx}Ab#iaXsqXnD%}xU&iAtkWK%mpGi^cW(xWAik zNvImrlUgNuuW-%GQXBY*XA}t_^(K#G#mP>3q3T>z^@|g1HK5l7)!> z0K01uZ_CO3iRAg@#fIMz*566SGHkhA1M0M=IC9!JI4+2(tkH0I0$~>TB<@&=gM1 z2d|4>uwh|*_-C!9suoGiMJ@7d3W-t)aOS0{Y$I1=s2p@SU%GC(X4+;paFnhB%!3j! zr>12&hZc7h%PQM^eII&CRda;3qO{UeLT>K88D^h^X+?HEe4T@NW_;LMnz!t;`z!vL zJrt2EOLdP{y*=l=@u@~&VG}S7)Z$Z|Lh7j0e|#T4;ZUX~{?yN<9L_$Fr?*~NH!5o_ zB=ch56R5>4Rx@_Q9{&L0lvb_FD}I0el=qV4Ok~|3Be{~Lsiu>hqhkT_@{6cDad3kWA-jVuYFI1M@g?c*#vBSSH*JVvfGV6B96@UOZkY&Z)!|ermuw((i zEg+Oj<5vRHiNXjG6TB(KFp|u|#&Gqmz3SQPOHQSd#Y)rTkU^(FK9R*1Y1kn6L<#wW z&~@+TQRtAAUhfGZj%HBQtRN||!2L7OY62Rt*`Tw_gkjo@n83ge#k*X9Nd0royZE5g z@XN7F`r%L4E}f~M*a{he=tpy;_qJ; zuNI)%I;Lhpq7g$F1aGhMgWIQ%Dp`=jP9MA)@{982tJ%_CxX54~xPi0|IRg(N@or)Q z(n1tN38lttc#IFQCh8o(J?sn>cNZBnrfVDqZ7||M9+`pgI9o6ddfP_b3‹ZTud z0KZNE&^2ald&T%YH3XI!EZDhvg}|yAY)XnPdGh=dRg{z;9AHj!Cf@g9rue`wn9|Jm zs_CO9Vp%f508KF~53@8dRgHUgH~|gMu*NWB1me ziF)-nHd<=6GBaj^NlLJldiFp{iGWJ{;XP6Bx`e-Kd$XosHVBp&TU-D{8lj+zYR&k7 z>2E>-2<(W9XRBrT=+5JlS!oeLSb~wzS-6d9Ou6&rXy#OvR=Q*`>AWr6Ual3Q108(tPGX#3J3+hnqSF8`{@R@OEDR}+ahdynKZ>p zb%#q(50Eb8p+wB}(o{<(8?vQ7O_wZKtT)d;Uro;67J5j&=cx(O2G}^2SWp^Z)3G}k zha)`$Ag*t%vV`7XYPE5yRrdQqj06CY#UHuUGxlez5i4Jsc)E}erv1$Aqm2XvZ7m^G zU?~G@uS!vwvT^0FFmbC{di4;h7H${NM*-M4 zTM@pzKU%hLZ5MEQdA|1+>EC;icP6{6i=;A=roIyJN{rtvAV798C=Xt-I)y1xrBml< z_d0LmT>{lA^_#}>d2#gwpbfe;LYaFVfCnI88#N|V9S+7eJ&eY@hQ7cWQJXijoJtZU zE(oEIv@)>guMrw)%yYDHOv)0}_GgB9`kq28Jc6(K3FxN5wX`Q}1Ofm(+3Q!%qx}wo ze-tHwK&KB6w18ftz#WFX-i@UhB<}Q>r8uRFsk-4u9u7cpSs%ayB1l(`0?aE4PpX4pxA_2 zzQA+hG#>!~0Bdk#G&+=cGlH3D;?5115apGJ0lP6k!woo(fHicZ_vxzSY9Tf#F@R$j z4eN$0;)Cbm!$9M!`gw#=^`VX1O$GbF8lha7-S}AddUX^SQ9~G2{{Z?`%s;OiXnU*D z97H2bhYMyj*t=>=G~$g?^-qo`AG-OfegViSZ4)KF(%Gb}Wzd{aP(W570p%Q18|#3M z#kmDQbO6%Qi@cyzM7cs(0tnzhX=$J^(t9OFoSZvi463g+rBhoS+Jsrdm`{ zk5U5)aWLBO8uQajN=vxYBh>^m1B#Z*X2<}^8K;^DZ^a752>jo3a7Y`{b&b9N*MNY9 zr*kWjeI@bQ6H2FLVw$kCkdNEEF?*c6zI5R`8C1o?^R<_b5YjN`sA05(ANq;H2x2NQ)1ykSoS zvWa0vl#0Wf=or-K&e#0 z6u7Ce9z#f+jnop%hjd%!LJOA%lt0n{(jJFqWJs><^FDlx`JC}x+)XFB@E3xZ#0Je4 zOV3)hX84^>=Py+x6_RjmS8xhuyVgfO!{m2(Ovl?*SZMSL+SE&VX7WX6N$0WoE-_3_3NsRsjB!{YGeUXT4gUZb4Vwc1 zi`zrUdPSRF8=i7Bs!cHIG@zv80clbiXeBhz-V|ej?35oSpa@$s-?&wfZvOy=R1hgt zX?rFbKrRzYB{9dJi;u4Z>LM0(DtL5sICf1@q zBKo%rDO{wILwFeLmI&vB<6aEed)zG%QnZ78?+0n^2C{feyp*2K=&man8Vm41hMF5cRz2?(Xrls_fm_#p|3VlwFIz zI-Jhd0G66fC`@$0dZR(uP@ze@DOL;>4D=kV*E*BTq)Np!%7U14htRCp!dKM`KXVoz)i(UGx#ja$}*#fROYt zT*#_-7XY9(RIBo!T0)X)B*oQ?AQ$xX2L}-s(=cFe>sZo>@xY=8qmK{xK(e^I_*RYa zwXZ3>)g;1W4$Ml#jIamoYpCUy0r3zUXr7HaVmj%}rPC;g-=jjW5;*2(IbE0U9gev$ z2nK!wY$dKQAi0Ym@?5NiVLV+h zCUjy{M$HnGtr(HGitXRdlVzyr+^dDXG$=Y>8O5wva!5@#0GwwOrcmVg^sDl0fGd@$ z3)O!nQ=$$~eBy9B$l^xj4j5PWcBrs0jbDI(09vEHa|MY+xoAdo5%w!hgBKK`#dFi1 zdEgcEbu@hP3b+Nc^m)3s!=%gq0C#R}*t=`z*P0cIlqQl!gL;iztpX$FQ&JG2AQ@1m zmZn;;9fvVzYgyeYbfM9W*t?VI2S=ABZQs7CW=rm@`WwKgmGY#rvb>&g6U(}CIjKaY zohnkEaE)3;h}R%Og>u1I+T%FXmoZNog$YCi(?oHL2f`BAL;mG4YiDm@ zzvKC*9VzXzu}g%r9tgqU4?_?@0om(NdU1-pEiad=$2lqSL}_v2xn?$_m1r8}`P%G| zUsckSQIRb$<~(z*R7jOnL@UIEeF!_8GWmaKR?cTHFxbGuD;bJgZv+wTiu*zBn#vuqe$L%VdMZ2{T&pOyU>00w#2zLrok7~p~!Vl!;A8(>@1l=R)g zx`<}t59|?(HeLFZ7am%(xIS+DtT?778dk{08Gy4e3(}#L8;uxr7P3QBLU<2fZaC}k z9v0}mv2*4D@op9%!v-SXV^%UgKcGIe zu9gW#?1t1)`VQbd{{Wb9%q`gBtMlzWf;^vJjfaaL@Q<_q0P}16=*9Go=^N;m(_crw zNbZSDK$oQsil>m?JA*ZxOA$|`(z*B&rd_`-l+E{qvp^KeJk1>5nPd%UUrjwNUT&TG zO21t$UxkQP-&^~5=9jYdFO?ukCPHlfa z+7@8w%sws|SM7ImcDG_WE&<94y*W~z@bIyv36PvH>znvy@1RqQ>!>*rY%v58Lj?w# zF5UcdaUUG>XY|t}3WQ;XJVMC=oU;w}uaJ@K2kvmz(nR;1(cGcT-$6lpL;R10A>QHn_doUlq>?}37&_oYgN9cLX_94y~1p3JWT zqomR#$yKl>P^n6iNWzz<-%}Hf{v|3zyU(5iHVns2k%M2D3%fU(eIUm%&3QNTzpl`ak4LZqsL>QAEp=WR#yVfsK{ucHCCyQG`2sGywQdxJXY?KH_k z1ap(F**EbX=QNn1-xPk!ngo-4M<|>KuBViBwsV}UIdO{SF&&I?#~Ng66ngH{wm!55 z8fZE&@n=M6oeTpJ*lJ+=zlhWWI=DON;uTh+Wf?GxA7?1b5ej0D9KT)=W82pWh+G0J*~)wg1XCKh=PsY@=&xCyU4DzvZlGog7gyaR!6<{{SWR%lcRlo-oG%VhD|L%9W-= ztF~hF8V!wv=XfILrnL;{1MbV(0Z|dXL4uS3ci;lz-xt(C0+#efF*{Xf;2CH0t+}-p z&ex_r?N0j$#To;8bQZH36K>T@b4X&&_Hl8qTt$VK^;5>#hnnae~@bY{Ao{P1&WF&9D2HrfyT## zJ$NDJ9?hX2YkTeJwql65V$VqZx#``=8p~sb5{twhzQ74%pApUI;@Nu!#*lPm8Mgz+ z{BUQ^xl)h#wx)GViK31uAdl^^*S?glPf^y-GQ~>Zk7YhAQAPK7HQ;ul97JncP-$sM zZYHZTBt{GfF++EN!-)X`s(+kikW@mn*Cz^z!7$XQnmiyb0cv366byri(!7SvhD}wd zJ7ye*w19O6<@{6{cu)q@nL&!>QleENjZ6o|X-1T=usEWLR8~X~cJJpwK?p&qo}pw3 z6Pr3xFsh8zWMc0N6qZrOnv|?pIQ#()LOFayX7hw(r%r^e@o^_B5{!*9vv z_?TQUfCxl2{61*>GpJa+Fo&w+PU^O~bI+eUdt^#4XlguicK8K3)%CGZ#}>;s{Wqi4 zwC#ZaUv|D0E&-2dVGs?601?b;EuOS&hmCA(BNZG#w}zYmXd{=; zzQV@pvsWzi_IP{j4XC2aXTV|n+QNi3L?LaBSUA?BK;Q$Id@Nu?vdkeE*H6{`yvE)U zh$=L97S6d2dGgbRg7HFFM8f+m9}MalQ!jUYx@ntG1W8$f*dPE!-QAm>Ug2sBwO-ygsT-MB;NOEc&Qz$%4WQbL!4Q6~k5I|A z$*Y*>T6QRm7VG~2>5KYUFq`WlG(a*J&B2E{jebVm*oG2up**Z?Ts_!5ZWaws{{V&3 z!EuL#dU_-B4c+ky9cp@+c#e(itiLsHR@0)R|2JI z&fFK0ys2;|-NY~tMk*63N;|{21AABzubs@MMG{h}v8N|MQAyDgjoBn5uTVw@aL_z+ zdp}*(ish={T96III~TKznixBTc>>d8U+sFkq)d zK%C>dCI0{fIk5LxA%a|H2*4PT*O! zF=>*MaAOU>`MGz1ql9KIGaZa=cnZHQ<;o^-F_TLI!00Igo5uYP|bHc+W4iQ|aAutr$nnV*J# z*OhEbkMpDV+pz2aFuuUV&+HKw_wj3T0Db^GMx)7tEP}c6w*m+osKc=DQSr-wfHinq zgAt|+fsP@aAG_@Uac|Pc!Lx`ZnI6Ou!|Bz^^tk2muZN5np~QvZu37l%9~_#*?R9or z<=)+gY8M!Kp5XWV`>oh^>lAGK-l0x83xzr7#@^eYxH$09zJDKKum&LQS1@cG#r5|F zUF~Aw5r#P7in?kJ!wr3+!}!tcGUE-Nxncq8^rZkAg6)u3yNd^_2z5hVzvr%6_j?L3 zxBv(33SnPF1ufRRkY@6T8&N4}) zGP%sQWfC;Am?frxf{tQPnvmxHdUna#hbPjG4W`5ZVUU*Cf5*?lx#h<$17YopbwVCl zx43L^YBpobuKaR#2FN)6mc2SSx_uVjo2!15Ut9FP>fX5=tX5q)X%ep}A-qaT4&4gn z8^V7jim$1oSoK^Xt35WN0%17pIChD}>|ik5Tq2sd_#*1FeLn}}5tejO?)gXQfrj=~ zr6}2majhw5v_zUrCL0QFycF0rtXW@J-SQBB7Y;fIU?+GD<}la;Tn8pAKp~mg>rVax zuux#kV@#hGG!gcRV=$mB(-P1~QGnTdIKE&rvJodO3I)V2-dCIl7;$Ce73A`~;GiS^ zC`Fcem;f;9Aw?kr85|}+Y;i;U#`UJS4!N*UIlEE8BH=KWEX?4=4Tlb3EC4)!L%dn4 z(e>-$f#HX|c(~*p!2ax*EfHFv)eI*1*DzrQsDl6iFuq}E!O@i#*sxPN4|j|HSy++T zc%8Ece4eb&uj#AXgR@9<(X{@*uR9l5hlq{2V0B~fZs8A~5VDA)h)@3jX@hTE;?0?= zHOt1uj4^Fʺc5r_r*huN)cVCF{83O{iCgZS8}c6N4R9X&d0^W9o<6cOz(0OT#~ z0dOC6xFYnVdN>Ym{{S!tgO3Xr5us5R_j>}i97aUdgCSsz{hvJSE~I8b7ugRA_qAef zL!OUs13LgvYR=g^kK?VqwY3jc4C)>J0Nvg7rJNQ-f<4YDd@B=`1iX@$0m{K~hRjJM zK~jRk1jqp_wNjBC%^IkpSco%Q5bVSapf!D8=jU4)wp=l08$ucA1NpFUde~6n?QFsz z&HQZtVuoTkZaU+HlKT4+efG&s{ip@&u_p%^UlUnD0-kC+W3fv#@>b@ zf-SG*=HEQtx>)sb0nm{VWRy<9MHiJjEW~TiF6!tABg{L#BV!5>h+^7ofC2K?KiBfN zKF9pO3mRF`IP8FiOaL#(m(47o=AJq6BBGEQ3P&mjUx zijTA1mJ^Hb9^o2l;^Op{C0i#sIc6CsDihp0bn1;I^8MW1=7Rj)^2ltoiGb|K3=IJV z^&kN7_;cpgMk``8xboXCuI?bki&Kwy0E=)7GAvmUu>>&sY#i$2;@MO=*?~gRN2+P@ z6GKKgz`R=fhM$*&bwPt1L$w%~KKB+8=knHQ5i=nH4&Wcx-EKI9ATAkzQ@Ch8@5h~m z23|h^u`C<|*cL1N(PiOZ!Ge|f8!K+R@_&ViYQHA1KpYq&%a((~Uw(oe!Wg!MzTx}= z-5BDC7_kR&IbW+ z#v48#7kvXP?C_j6;t>cl?8~NH_3rS}mU~Jf5p9?O984wu04e_fEkiynV-GuIxT1u1 z%Yb^^F>2Lr4WAdXbW;+`i1?0A)mllVxsZ=yZU_!Rs|)1e!purni|lAv36Mux_&>A6 zE1jceAGHq7AiFb;dGK(^DVTG7teztt9ihlO{CMQnDiH`p>NISF^ROZi*#U*bN7x@c zd#pW?10Z*eyTEJQ{48>_)10YE()7loMscSYof)ckqY5wi2c9X@??d=IWm_WK5%h#a VJK4GEW85DoA17gNo^9^8|JlBRL2Cd2 literal 0 HcmV?d00001 diff --git a/docs/io/fits/images/Hs-2009-14-a-web.jpg b/docs/io/fits/images/Hs-2009-14-a-web.jpg new file mode 100644 index 0000000000000000000000000000000000000000..42277a0dbdaaef6c68a0e04a59788ddf3d6a3ab8 GIT binary patch literal 23490 zcmY&<1yCH(vi9Qcmf*6uCAho023QCLNpN=v5Zv7z5*!u+VR47x7F-rzoQ0sl`MLM~ z^{U=CRdc4g=hW$`>1jJXFH0|*0DKh%Wd#5NA^>3ZdH`Nd00eTLmiE2?1OPGs08o7; zodR&Bt(+}B1FU*pDF`pCfG_|O;(zdZMnXYCLP9}7MnXbHK}GowP|;D*UV(~^j)94Z ziGhy&U*P@+`2Pz8BxGb%R8(wqbZkOgY+OR(|8R&2{>vx$p8@~>5C1#o|6IKE00__l z2M9?>2m}B`0t6%igqHz8(W`tY2(Ne*^nZW=K>UwnXy_OS0HpuJdA;?&EPMdM>mwi` zA|oTABOsx?k_ixz0CdRj2~niAQO${5gXz(di+PCadu9Iq**e!@AVIg_l_hlxu>_L6 zE{%wYfP{#G{NH6?mwcT}hxA^W5Lw&&uWK?rQE+j6@9Qic859c!;+GWwHqxs^1V{t` zNx)+ZT`<_)q@)VM2M+?xR@S1XDCFB<5Ym+mV7eiokl5%Tc0~kZaH!K=5s-d&!MC%7 zxr7Ff4i*wBUZkiJ{2nKS$FStIuN>Sw$uGz>Rg6c^kB?-Q8ux0`Ys}3ODY0l3%%voa zq7CK&wv{!aWnr|)9u;po(Llvg9`eNRV;!$iOsZX2v>&T9k_8S_aa5 zLTg27n1RGbJxSp)r4=bD=v02x>F<*T@K?FI8mE=KY8x|)whco)uc}W+-U;!6fvh96 zw&)_%1rRJo!7QL9RKj&AGD%k zoX;3FfDI5W8L(mN%@Pn7S>!>Xkx5iPT~(T285Z#kw7JINs2V{u9T%p5r(br5k3PGQ z@(C}VmH=&EJw!(1>J#eypJAywDJwe7i%QJUTYb%JIN=lDLg~+M0y)G9cQf|1AsA#Th-g&nD=G1e_*6>A>Tv`S zX9zcT@k3Qw%sn;A8aE}&T0?wAtbcFkn(&d?baypS!=@uO)3N2ITlh#@*b+MN{B&lB z0b#ncooo#+fMB@e8CWkY0UOzLp~?<>l?GmZ^L#XknIcUddo}xLlTfleh=m>MBCYe6 z>qKcMY@Y?b0TOLs+D~D1EqtqpRRlqAwvl(6s9h-}P?VXKRl;>+JRA!#5n#2Vm16g)E9`TXV!28H zGGDNU6)mz|b8!t~59U!ij}W1*^jm^*24ALFNiJ-TICQJk#0*Pef*%??1dN$PSsBtaaV72#)>NMz8yfxw^kxpINXf1V53uC#N#Ywbw)LHm4RZt zx|*Php5MnS!A!J%S`7lkh*9Jy@on=!CiWQ{LNc(1v@FTQdzdSRQ)i`yA|~pAPRM6u z1i5(3tSmp<^KyC{x*r;itz3v+r;KeLA8lZUYUn!Agp}kZb0g9#VW!o{DgNs3PjQi7 z0A*#E$zbx89ONahn_~_9{i2(Cz4F0S85&AC5;_@Moe^nlS=X}092)PRH~htLZgWoZ zV2eWX08DgS7B;`w>Fnf|l~gSq#uzg3omPbhotJH|)iWa^Sc?j;0PP*~b%{^YgxLG{4QVz3RDxwz| zmUxBYoQNJMe}vU$C6|Or$L+`N*(EE%;q82x)nE%sQ)3%44#Q(59z8lo5V=+Sy$$Qn zB2MYcBCSnYM@&z+Iu?=&+jXao8xyS*GJ&!f%!XNycvDMoSLTh#jC8QyJSzcR6eKS2 zCjZDQ^qL=&4t{7fqQ$FuRwb9CN|dh)xKtsz~l&f#_}2Lp!x%JDns?Os_jF1H_ZYMSAmVkv|A5QeKaT zu1S;r)JB+VaE}xFLgWc$qJtB>4y3~(yZY7!q@l=$ zk&PAVY*{y$Mk36FP#j!BLX)nXDud4z@&brAV{x+bcZF_a1_kI$Dml80L{gg)!O!h-(w1)yRPHS#GpdBFn`$+zA9v>VG%UREwPz?8YDJ_1ZL_o7|WbsRw=TzY@*lfYIY?IK|NFCimIyW zp0Gz5#Q>CCiGXolrHW)>8d*w-fu3H4FCkcg+&dqHX&W6=h04f}kJtk6IMbWPZNR_&QbFNVs^3zGjaU#+ zP|!<}6XfJN(cMDO{LE+zwR@{))+#bE(I7=Fg5Up)QgBQWNq_pJ1AqqDed4%a*#s`J zoTiCkEkPdW5yM+(kjrLNu+yQN1?Zq&S`U9{q5MRmu~vO>|Ne9Xl7^^-gLJ#oz9&$- zJ_cfPO~EB_91(`3xAA= zuxZQb=_Xx#(R_WSV!JcS=eX> z@I$MfNy5CFidnt>c;{%H-F)jPp`~hI$TXM8VSZ>d=MlqOR|V+w z99(*e2-lC zw>Uyt$&H}&uqcn5(u_5APy-t0&5-NDPm+FJ&2lZ`>EyR!UmP^@*qwb*`2uhsheAfQ9+iKZZba2BFUEV@?0othwiU5&yjaL=MEx2U7IzL3sd?xo(Rj6`M4s zHC#e!ZM1WWXvRl$j@tCWCt)f1qSHDK5NckpDzoAj0Op(25SS3~myfxN zQ{5jnV=EQ4>EFMj&woVD;Y6zFY-U9i%A9*oglUgjoQ^T~)P~H0cgeC8X%~SXd|S4LHCcAc9G2re zay#$3z_or=>AsKZpUR_$^lauDXELV`S~c-r06PtVW6W>=jK#_MVkbEUwd6+0dYsr# zSAlJEZmm%_M(!Mq&Fgj^)97}7@#a4`Fp-I+aoRj5yPLH8q&mK-KPV6!yj`GlEX?P2 zUGwQzH9^v%^|om9s8j8)EoragX3pMsSlReYuGKwt_>_m1B^Wb2mSZAFigV^p2TX-H zNbrv|Q4cN}JP(=0uYFVr>>ind-8PGTwSE3H$;A$;^8k)y3oOom&c?zcpI;|DJ3 z(#WXA+1B#_2S&|fY^V=*<5wVXQ4_ohK8_f$Tydh)qNBFc4;8}$y`N^CdLwjjZ4L`I z&qr2Kyl)8`={g8-8+A_Ixw+r$iS3u(w#@#4;H z+wJywZ;s(#8(0Z?zoVe^%lqs;HMWv+X4?!mvB+wt^w+|$Hvw4@2u$}r=W!F`f z#3zW0Ymk}r0ei}VI^V>M%8b48=$@P5+0%lQRmHwgv1p#Q(uoG7(`&iHVYeyfpNgSo z-O!1YWtdGL?qVtq-~XmxcX@qUyn zU4(g)LQ5no9OQY!v~-}K1`SguOwZ$0IZr|Dz&Bp(WE~*Rkf*cnaaVZ(gcBX%r7&1@ zhhwB%3=;TjGt$fSCQRRp3lN1d9B=zB` zT{jJ=PU$UGYG?e`oeWi(+zP>PVBMef*<=2*a%u-(r@D!g1nq+3!tQVGzcm{us#&3y zcqqk|ZV_qF9O5%0gKusLbpw%agD1={-fA_NM>8)IdO96AkdZTnT2DXf+b2oF=I>h$%PX3UFGahY2XUvF&;K` zuJ6Kf_m(1_Yx@`C^lEN6TQFg5E;(;gRd;1#e$pW$^zxd-^w?w}0|wU*({saa<>v|0BGf`KWh)kUlW=B+dwo@9)-K9Pf27>zMu}9jm!KI9_sS^{evQ6 zmAVKs^}xxmewOjU@aH0BbcNW3LU*%D%+!%ppAGIEq~8r+?Xz}_UI2{g$)pKzDmRaQaLw3iVv_bz(r7?X>3U(`&jz^9 zW;;u(HVw~_7>6)DL>RSipQ*AG z-7By(8S9-PbAEe?kD-MiLJ+vW*?b{Ay#V_VB5L_g+rPv=?_W#nWGfSFeu2Q5fimp7 ztvW~iLvk*2DU&c#Fm;;Jl1Pq{3{3@L< zo2?Z+vd8VQh77{IJYfh@>D39#&bJ|LV`)EW`Xa;_HYox57XVvHZ#hFmex(54Um+Cw ze?KblEJ9S%@$6)ja)d?%XSFq3{mAggZnKW$Gh1p_Q`Dp0!QAzO6-SGCa5&Tq8u{=4 zVLYH=M3egW8*%7b*lE~}iG5xxzYo}s_X(iI1JSjxBn!ghOM*HinYSz?nwS!bY|jeiuXe^#`|(GDDQrB#NT5eGgrvL zwX;V`32myDqPbZ3*wBr&p;R^BAQNmce3)hP?s2I&!d$NH1)IGtZ8Mtmb_CPEW2{n_ zac+|e2g~kiRh5~kRG|*SMy&*{E|8DF;5hdrfQeKqtDUN4)N39|=2>FR97};@;rIlx zbzyIbq$t&MMg8EioXI2|XLz{yk*(@HRUlJVlACc^-3J)Dp|5e>N%6ixpSOW$FBk8b zN3uBHC%w1&dYGb7sP2=>S9^nBCvX+wVNSCkk1l#lXoiM0-AUY{vzM4!aDtqI0zS7} z8oQ_)UQ->T@Rw9t;3y2?$4}yYB#!g4M(TQc{>YSAM zCfKRpJTFArR<5BJnSSoR{ng4OQuKL)E(5v48u#YM04^p^IocIuB+A5C`RM+w{Ugx)=9I^g0YrR4Dc#vmv6h-FT> zIbPN`R-ns`pOlB!*?lI~Q?xW)YaP*EH9v<*}$< zb2qZ8y#0v$=p`UQO_X|)MH@B~EE32w z66Au0tZle_E*zxkE8b0Vyr_uD$^PAawpon%TFf}-@7GG7AxZ~t!8QLlhfm0;>2yEN zJH4vACh_vSn(aNr7W2+)hNGy_*zw;U#O5jN$}D@&i;7NF+)1@Sj*lwV%nNLU+W!tN*@{M$d6 zTn?#n2ihu%-!u{W@!{X#6a zrSJqz<@{<4c6|mpQJL7U8^09fKVh4Wa{TDx*7R=F_|ZULqH(5Yi-0uS#8eTMw!y417(G%1VSV4HH= z>Y4$p{WQ{l25A;|b zVC~54PwdCabj_%>)La+S`~rx@khv^t&zn{FRLaxnMF$?%-xJ=#F}DY8cxwB#&yG#;%nH_oCyB>cVC#(3^xW>&|dIO}OAngTq}eUdyY+z`hR-cM3VjGf=8TlpRVAEJ=n` zYAhx$gxJpU)#8SaJ+>hGQWI|wFjpWIUi0`>8Ju&9^yYVBpZIbdLF1T!-Y5K>icipE z+(^*gvR@_KEn(g$F>^XM!I}B09rw)|{S(gn3do6lvQxtrD?dmVcG9v=#?C8Bb?577 z8SuTm{@YG+a>K!n{@q$lJNV7j@w513o6nUhMyAVG=d9fp#T_>j|88@l1hKr> z6)d$$9jxf-w0r;g+jYB!M{n({%%ky+#K%?UJC@t4E0dJlp?KNgp-VB|?NnB?w?iHi zJm@``F!yw{{#%3Q?L%$9&o$D!POId{U)hCI$G%SWU^VPQ14B0dwMzTi&}d_s7+0^> zkM(9$dZ?XivTXhR`Q2st>Rp!%y5b2m9+v?Og7_0Ea<&%?P6D^k@Q1^p8iTlnLmPJ~?ikbMb zQtVd8asg$vq6&N4B4XCaor&|)-nCU)mz)li&991D(;zHZ$>_i#!4J_5aTs^V%j~E9 z^0X?S6v9QO{bBCCWiyINT)A+R-wVLNIh>W41elflYyX^v+*Xwjm_W9tMi=`6&`V-K zUWUB@l*J$POwY|M8g0*-?e;e+`AoRfYL(yY2@$&p(8f!Xhp@7jb$xnXFNGSwHh1eu zB8L2x5l|?|H@nHLxquIv&*cGETY-P3@YqL1j9SV1_FZpvB<-=xYAzZ3wY?IL7F9O- zF4?Z0DH=1qg-R3oDOZS~7 zUE+hcr6%ztp(T}}^6~5(fdv=)??2X`IsP>rs|vs_Go2ZNR1VY3&2-49LwA)`IpbV_ z6%b>>)ZO8# zY=}z69#XuN*u~!Po-5BzUuI&Q z(%PB-=cJa1VBE1xl#eo4vtZE%<2L2Si$t%#iA~SS6X$Qm8o$~M5A_G0)EZszH#ldP zya2SJTUSRp$6ybyb(+tgxgUt?%)W{)j~$G4)CqDE?X*&JL9>P^QaN;BLIZ#36}*>k zXt^rEKB6DtrVHmIXNPUNpbwOat-sw~-4Frq&d|;WOSU$H!I?RRCk9O!kJ^BQlpnK< z`o=t_o40BO226_4>!3M@chBXH2V|8shl*b|*G|Gx6-#c1Rub&fM<~W}{sh_E2@y&n{Y+3%%H{SQp4WKwM=VF(znR8qh30l(`nbPAPhJQFZ z|C4{i9JXco^xmRAnU?VYD}*|R9emrU8IJ0?`gxHNIz_hC@DsLU9oB_b^p(pqYEH<8 z^r_0rZuZ^cP`&4B8>7(ek?+3@SK%(+9thM!XON(uf|4QbPva@$YQm<#6Y#Lq?fK&h z+46fQvZ@yV6%f8!t+Dd-hxPNLrx?h#;n&a4Q)4tUhX_dapiay=o8>S0HNFxW>1GgW zhFw#kXM<#}ypzFo5Kj~o7<9A(U?}zWSr=XQ{WVm%?JFU(sAN(bNi%XTW{pSug>X@g zV8T~~TfGy#7CiT#nkNm5lj2)D2rqn?)tM%&gq-iBYMxh6q4S^Vv>s>*y=Pa&< z!r3PDu}R(s!;uP2K`#IXKFe2^AHdau+G&P0cz#&6(EXq)VeNMDSV&)Otisq@AUSn8 zRe4Z&!~R_0Zf5b*)%R5Sk0vqL8BS{hon@D4o#kw5{4RK(SX3+dTj@dXlKWP%)#`e3 z%&2KgpApl{ILLH=WO7fK(n*oc$ogyQM9Ua^OzW#5%iOZNtSrl~WW=COfwt3K7+>It zOim7jpee~(dYi5y>0E<7^>?PHx=qXZYLs8KdpxhiReD&||2!AI{aNJH;nNCpzN+HY z3!r=pWWSK==*izSUmq~W9{D7BFq&e}w-4H0yas~~+_k+~7a=sO0c z|8T*ZGacS5n zfm#yEKVRnIUTZCsg)s+0^x-BKZbr?$XX~uL&!IG=lqMsEkUzhh?PXy;$RvLBT#!J; z^p7HQvcJq|WIDKUSDT(@ml~eMCj{?K&2ah)5L)G11|FvDUpQ6%%_nx54{YO!#Ab&lTax~SW7yrGQUPurH;@-YaZz`oRxb{Qv? zgu?g$oJtjmqj`NUN@_=@QI56p;saChcv7{JNGD$`hR{$I7P2FCpLVgBl}Xb8A8|N{ zljw~&!=Mk#SU{A8`T&}+lT`WlPQox-$bCnCf@Vx_WDH}$yLK5Qw}>`4s8&N|eJXV7 zuH}0uo5H)SCH*MWZT0`6TL8=Okj}mb zz)oG{E|=U%0_srXpfWe&z8hO%`p48S9=67*hm!YO`;spJ zyo*}y@yjhQIkGxric8>;WQwY%3oOK=RuIND)~|SY_+cXSi?Q# z>SK9pl|8tc)M#i84nOMDjOYlB7!EOj#BnCIR4dUcfs zNBAiSm73kx2PybrInJjsF!@mL3;%8dM}%aTnpGOrKWxgK%}L5dN*@GBhKp*DbILU- zCI?GM&hrWoIhKQ5w(9@Y9xx=(uBL=NCqFKL{gdrkd&VmY-w0^W#j`o&d_Hcd*Guw% ze95ko)eC9UO_owkV+4*mU}8|A5OOH#aURy9-cih|*dRHwq9e|$5q*ny)XmU>b^E!5 zGxVU8TsLUbiuJ&AhXwy|+hzA^`~YB-4`xMvBLcn2M6L&*FfIMHb4}^RE`lilZRaTL zjm+Y3tLm^j4&Lt#tI>i55!NKc*+r&w>g*~-g2;BfcP-l(JxY5Z#qD*U@I66ZJ4~tt zc+Q)yG|`3JbeRQTSIX)yAh;H}pB@tISUOx85n|*`?8lSyensuTo!;#%GO(^}E962Z>0pHNSaVm8*vaUrEvN&a;%Q zRehyrRYR|ooM_<_>>pgZR4I#ZMsU*z~?pX$#nd*f5*>&I{->lwpkwa|>v*l!rLKC#RHnAhN^vH!K}0I3V4L?Tg6H^M@m?Fo{nmEu9tkEr5As7An%VvtOldljw>1gECCPNXs7vM`Qnu`TSLn(CkNxNAe)oA(-$_E9r^ z#}0|!_tIuFe0A(oOHD@lk`RVwS%}@q3jj8i_gFr&2dte#-q~w>pSt%iOiqKhrx=47LVZLx=kYziy+78132zW?9&= zvO2Xb%rDwc*=6aBgZ<{!+d95ZOp20fQVyn=_>M89;s4`W>s_N1i8-CEoMqjUAlN&* zNPk`$cnz&Ij7RR)YB|ECQY{NP!pI^4(l~SkkqsZ9u5|n3=ZdzFi!Vinc zpZRXzIk`;O>2d(;MfjVfw=Xpj zFat2iCE0Y4$1!6si+_fkkV*0LV>|YC4w$2jZv-8fcU34A%Vn7^^&Xl~)4av9$c;|; zAi>#nnBG(UJG}((EuWn;rZ$U8G~>4zb<)r!{@)is7mR$9hkRf5y@7}#jI!Y47&L9H{3~k&W|d>`ZF{ZXepqsL?kkI4 zUc=gt5JO?~RboSMob$$M;LZ_rF9~2l7==F14-TCFkl+ceW3GGZg3%1X_boh}3JLz? z8Cu0R@K2&{s6rOc>qHh$0*=!M3hEMZswBu?y+`E03fUh&dfGA3>Pf*LgPcDBapQrG9|?mQ5&{kpyD1BupgG2w zlx@|Qy#QQ$6dM!+n~Lty8!8~j`rhw;T0p8fvX+E|h3`;5$n3M7W-XWR z*{g%#EZlx2yH_F|*pCTVi5W=Q@ONl$E$|qi_TPRY4j>>klN?$+v9iP(Payj<7nR>&;Tb_}V4 z8=2_8KSz!)6=E&@>6ECgn4R5DZzx#$ z;$>@UAm2NCTTbv$nfxb`5$ac7#CLs}(2?z6Uv)sT{vc?b^3E@XRfHf9;5T|?MQx(`{(czZtsr$>QevE6$^W>M$;qA??&t) ztA*$%wN+K;g+a2ILcoC}QlZEVhuogSi_(wrEhcFP>0f$acLlNn-g|DXco}ZwVZ_`n zc22J3ttgX_=_b%ku2pYI>ObD)v0Z;lSH{};K{#+a?}x|&<@Ym2)P+z|tr1J@L7O@L z2{-1W7I>-SN5^uj{kd3E?v({>v9m9V;x$kePqLcy95nrYVF^X_knjqP!qF!1tPyh) z&Ga6xt9UbQ+Y#v;f9m(9I9>tg3Wk~-(L0@x>YH>EEIqF?%~t8E&A8aPdMT?6BP8#R z{38A;eZYY@OGwEl#4!~B?bV?Y%1V`#@~c_tLvfBQ#UCJW2Uo&mR|s>pM_7{MO*0R5 z8LTFPFAN3t@|4X)}LuAh&b>O#8dKVdU~RVl-ZDXEI7_(|SyW|+qtnkYe% zodf$KkxP$kU0nwchuSb~o|NlG?0_7lycOF*ht}DkA=G$Na=!zJfa5Qbl1%1?r|!Q6 z*7-w)Y;V@^sG@)FUnE*~=6OY8wGJ!lYs^nHomTnz4dR|r3_dmuX}W)ZsM|HXgGLCV z4HfUS7)EgL%D=Jhx>=rrP79(@QOdyOYZ-ra{&Op=JiPM+J5>CDYsY>4dSBp~=W~*W z{r${iw?4U>!p1g^zxD;-(y_czvdnf;bMhlJB>_@>nmrKY=6310gaH&voCTGl@BPX| zdi^qgTK&!*mVYQt04?kc@1u6cOBREq0SWW0{TjG&{Ak5K&Z*|+!#wWvxJxepFG^2t zd#`i^k>G{aI(^;)N7WT`mjL~nvD1HVVNmWD0O{q2z!yNMl~Zzu#qe?fJp8qc)u32fH||$;S&&*dyRJ6=0^qSuxEr91jE$5HCFK&Oio~9^ z>WqMMG3mR9h7Y68@A*f2baDdq)}V1C8SlD_>F? zQpCzH=5&?5ByL`~8LP8%n9L>7iG1f@nlH$^PGg}+gG5rYo>J{E2H+pCe~MhPtjf0n0!%=4cUu$mwc6m3xv|vrnPwpGr2oyitm3N;^s@bLqByvMj1!5ykV%X)uu@1q7nH=1YH{Pnmx- z8`&!``cK4^SCko5!P)|~$;6sRiEvq$SIb<>6EQxf{aZl?MYKtsv){IfSp z+6|O8vCwwoHMgWLtHdZ!{pXU<>sOPl>SSiLP$R`QkOUg%g8nqAk5V)e)jBkdguFZ)hI%D zq_5CD^c!@#o9p_m8Lyo~^-f4~JX}=i_QSjTe7iL(^+cGzP84}#&YL`k6gbYW=wIr< z@;&{!(9XY!#5)%n3qJWcnfZzUWd+BJTf zhC-51m}){w8VcTwv=n!cTrKxdn8OV#kFWv$9ndNPXPRy$c!jUjz`?bbq%(E&y_+D{ zj5hz7-=76yPZCE4f@BGvt)qyYeeD%n-gh>NNrr#8gqA=K#FYzAjS$g!KI__r524;s zmj$vhX;$WHhVar7M41FI>&$n$&DmC2Mcv;4tFWI;!pW^&2ae0{{xP^7d|$XKHSlXy z82jv!lO;0s_Mvmg?d@NS%cU|>@w%r|dn?97I@9T;l>YCWwjVneR6l?ANnXOWv7-Lf3AwOO&RAwF*Q_mo(sm=Ld|zZbTgLg zq=aB&M1|&CJ+N(dpel=wJG40+_L)!N5T9RLw64@1x!Uqtn4?RUUNK$ZqrKe%_O7)0 zg;+soRW=3gYt!`gsaCv9u3~L9Xfc5qxH2O4Y1BeBXFsCIKW{HY zs{3;#*gucVUu41P@cDh()UBY#0_yM>YCWQ;jxMC?P_zEwjhX$2s*f5|zl9!a4c;|7 zx7=oT$$NIT3yf9PQGDixJ|$m)IPyt_#l0zdil=%_@U0k&QD>A^=Yo$FMyo0XXUn@r zxD#a-$!;uj{HgNOV-7YcvWffnag)MTulc)Tx)#6G|9MEXPE2ci-qTEfN*;1}EBy8} zkz9d)wsp$x%#X!B)Ird@Sf@HO&GMJ3+iJhW*WC^>9~bEN_Dlc8FpxJYI26{PnVm)eOcP%Q(BHs zgnR3YB(pmwZH)$mIZq;4cjT3JPe*#?$Lam|Dg7{CA-LGrpOEk%ZHlPp9n+8tFv#mM zs2H~T8(dRYc*QH0W#Oiy@aP3khchV>%j|Z(CNizfMy5+5kDhk(Dq=5yw7wLwkL?OD z)A=zR5S~g?PLeNKf|aX@7ajwX-dmjUe}pW+4IpC}a(ab8CL)&SqkTZmgI!+Hp9T}x z6Zf8#*Mh%Exz=i|?cs-9+&3%P$XscgG)UWNeXk(~e_cKLO{BmUA74(AVc-w^c|`N$ zV@2AbW(Y_p9C#Adib>2gw2*+GCdO+V)9+sXl3aAk@biDVt06Zrs+* z#V9-pioWH~;QlMldYOB@c*6b#U`VF&4^oOtikDq(4GT&!r6z;V!8*!+?>VH1ed3|{ z6opRrCw>ZumZFWH02zS^1)YaHnvxgp-wBB=eXa>quamDw? z;Csltd(RkR2&7jxt^*B@T#I9Nomtw)au0i$9wO8)e~+Hj@ShP9XGTgDv&_v!U)pdW zV`G{}Jc#u>(}Z|a>sQp>NX}KJjy*KIweaN%tSp@}Hw|3mMsv#XN`3*1Oi(BxhifT` zzZNc4f6bKYD$!gLW=5na>q#FQiolMHv?w>MH@LXDKFQHZNfc2mBqUiHb72w=r&Q0? z1%VQ942Ip8PaT&8slmoNuuA3VsrQBDdNOOy>OlF<(NNkDaQ#QN48-@tLB0$owO$#t zx~3)dJ@B%jO}_!>{l32`*4|>vvqfzS({!g~LUWjxup# zZ$Ax4Z?V^JPnrg z!oIS&7l-e{Lnx7F1ft9=`&h}*!t|cmb?A1_)EN~Rim1H%n$(ZvOz*vJ`*Hh{BLp~> z`UgtJ{`zd-ERgkt$Kah^ZR~WbpSW?SmJ z_-@&!A?t^()o-?3^heDzU9oUwwjYCQj-$HSV*RJiakqF9! zBw0w|x+ylP`fV-Oa{slDqk9=7~8jFJ#?%vry+M`@tQKY9A ze|$aGoTuS-bpCLx%TqW&%|9~6NA&uq-L!+&vEL;}O@ki^1Z4Zy-e-zz$n6Et8pN4) zVzU)~>7T54I;<)K$JQ#{Ag(i$*FV_(n=7^kHn7OVXv_{~Qti*93@pC-5$U)2E3~Y- zeb0JtyEpkw{Hd?krYw?bu(9`{3fLGmUZm_C-`!+vuh#+U4s@tVwT}%6>6^dT)r`0I zUddNfVTS~1M!x{iBn%$eNshcErP!7pD~F181n;zz>Q)lnLft*S%PoC7RF52|rE4iN zA$nh3q~b_yV=njEC0dUy;!4*pV*^z{Sva-x9G6`9(!z2H9ThHMmap#R)sbzQKT5{D z(tUjl&kK}}Z{|jGc=vOHNSVvXgm8=ShX2e*LXE{UQFHF{<^`}1&g&8n=wkRMO4Hfx zaLyy~XQJ;HQH(X~&RxP(D((B0yUX7~E0)}zyLQ3F&31*Hn$U(RS$%eAf?3Rw=Wd!U zn*1)~aPHGRASs{tDFGF^I@pw?Nfb*1B9GzXE~i0n&!|l_HFKJwZ2;X6Bj2VN$T3k% zl4kI3efz}KJUN(_G4ZS_imCv~QgpGDa(%U^Rf`{S#r^7de)sQuaTA{2+B)`o-}iODsLA20xU0p_CCn|QsHs>$pSyObURi~QEEof0 z^d8qg@4Q2ptm5ogW&T_3$bl^1=fMFA#E-6Se5TiIM%A{TDz=|~6EBOi9jT@%o|2}7 zqaOv>UPMRBIj0pNtT~?-l^fD|{4Q!*J1i@jm`{2>aFl`%Eb~=5k~2|RR>p5h0Z-Nx#pRxeR!M0G#)l}7I9_$bAmkBYb7y>i+l0edOZe{6>zy|l# z)(!?P-^eyq^1|B<+uvA}B6v=r_EuDxDP z+9g`N9M{uFEvYzuXEJPj+}6P%o+;czq>v+?*1E^zSUbMvZTOQ)NU`~DR~*HQEXUfy zagqiC5rzAA7WX9uYhBl@L&42`G`~aCJTvasC6AQm=FJ&n_8Gx$-*v2ba0hB1!r^Pu zetq`;0L{$%9NN4}`0jPqK25>ydV)}md|$~i_)MIy-SO?@*_8y4M9CzQGX$>) z$(myzlEeKaT1KLtyV-DbHLXi^oJM6s=)z5N&JbG(NI$Q~*Di&z_q*Cb@S>V41X>QOIV zb+y~Kxhx))Hm7Kl&p9pcbdHdhIJ)J^N$+~bTT;&2sBo}ky4R{)8jNtDAR2@{G&F;e zTzqMO0{rRVnH&K4QjtM3&eF+EPnKmmENuq;yMCmP2VIeK-FATKp3q%OvEv z*^Vrc@QfKbSiFSH#*SzG9D0BmV%X zoHvT+aQP1nLlc0;Ynq2^LA#H1HPfcfwQjzpx-Z6Y-}c)Rg~ys_;l{`PE@P9YMMV{_ zld>wL^4^5_KQVsf;$y|hOz6NzxR&~g@qXl0Qce#t$~zq9qBNhWzYj{$9SKhX#tk2& zr+0U!B64a$-)gN-Bb6q$0SI2hvCKPwRq5+mRVXsp{_mLvZ!M9PDVHhxq#6-mo`SKq za&N76B)tk#WtmzDcflz&AfaH1v%Hc4QnwWQa!au*T`2^M1SjSxL>DCZ(?ZXtRub0& z*CD%Hba41oT~1N0Em@0jCeG-jJBpEI=xWynP@I>2{C{bsuH5hI1QZ_+jTjP5bR@-o zOr}TU4{gn9-1e0ER0FTZwXdqdhRExjlrkWDUXB}7d`0_KO=Re+!CA4&^R=Snr)$s+ zr+|OQd#`AFwg(U#U0?yV>q;zm3ZqGTdLeR`$J2oe}Cil5hG)>I12&iDoYf38Ir>Q+>Kp%N!e=S0lCP zZMhcppBw~A$=70JMKmo9)C3J){O)-*ae!x+SS znp&l|?(5b5R}Dr8{Apz~y5o8L+2e{rTHBOh2Wvqqe~n`Jm^vNXeutv)UPn9}ds{rJ zKyhGkC>K=hqPZHg{SNx=b>Z|gd7f+jMn~**oGqc!^`&VBJ2rQ`FLI|fm&N%X7axuD zIgsFHW4>9X=CTW=ZpSbNMYWm$heOuAja62!Y{OaL_N?sFN1s}{xG->}lM|4KAP~)uYnvefzz|m8>sj{a2Sm%FXfY4=dvKP0CNm#T0pxMFUP@W3gO5fZ|5N6zGK4zp+}aOP1F?K0mUrN>-`q zPKnBotUOG7JRdH}%Vzry!-{tZNhV0|u(=UH_*aVV_*glsudB(&^3kwJ;?k{Bgw|;5 zt1%wae!`kIj)ZQoK~1cH&Xg`h)&udQpfPAc6hHxg&aFU5=AwG9$teV~?2=+SIDc#$ z*X>Eev@cV3r$na+h;mPfGg~0sekO=35_(sGc|6=Sk>p1)YYE#dV3Z(R>K$mM+EAY- z9?$tEkI|3%oA=YiF|kh>os#%-b6Vyb2mL4rcE6sr?)a3g>XQEem-rQY#jV-wv?x5U z{?Ps3n={BV_`EEUJS~uqakma$p!^MC{{X3}>$j%q!b-9lk&Exo7 zZUYhVO%6Mq%;w1FZl5jhB!JROLs--RkJZ||t<Wr^g`5yIepl2CUQDJE#;Gf($^++l&+oIg<-8kU^F5S~XrDdD4U(we5FYMaRD zh)m9PiNErt+bpWv12;LpPikGSo`CI=&MZ%rvOS2Ajn7g6MfKLM8Jn@8j}+3ztZ8uS z;0aEFXzfN%W8e}-$l8GYN2Z9Htt8~8?6V{+dqOD%f2XgdOGTm1ln7P2x2W!V8kUy^ zm9?kXb}mNK?U@bqJv~;cxxh8lznPTR!biB$W3yP(sOpsNj(SWG?`(20(d~J1eGAvI z(@L3dgKDI*%Gr4mTufEk5U2pFma@pIR^VVL+wf{w3;0^Re+1|J6Xk~Q$cDz}nJg1vBpuxe_zD$VcOia8@G~FDXXCjDa(KL!Lz5QrXL1HHrKLbyDFlz5 zZQo|pavI-bo(^xz<%&6R-51v;Q%mQL$i*c#kZ!HMmFU}4B?}DkyPLah`q%DnQR?=wMq}=)$!;O65(y`&uhahg|i3);24Jq`fLTfiqy9m;^ zZmrpeljL$t5a0=3{1{7)BV|D3cE3n2+i{}$*In+~RDbd17%C?Fm;AVcI!8Iu21bO9 zOG<)MqE`jfGp}24V)E<_X%`*Wy)?51^_ZT91feW1LAS_--KogN+L{0<=}s_Az*eMf z15mZPa)_LWD79xUO6W@00e-amftN9!3~p?;D-_cLv5T z0dDP7t%+SVr`rs|?o=J;igUb|3zZIDJnj!z+9ZwXv^=P4iz!aE)#BRv?45Z$-!ERh zwTtC_Pgl;x&HIOwis-6@_BjUasj7ka+Kvfdxh_DC z5_Y&fpxO5Y!=dge_ehjMUAYbpQtjJO4J7bB#RpZwdABjbaeR*r$ItC!^1-dni)&B1 z#uW`Na3lb36RmAs_pM!46}dUKc=WZEpDTkJe1}Ty#Mw2hlAN7A&QfJ|ynm>apjJzQ zQzKZ=LAlb56ZbN-nIW=$F8)+HnkE?X?z@pGC??VooJE}UONyuJOlKr{e3PYa(`^Y( z$D&l5jy~IrWId`}(o0GL2c@fO()@gT9Q&bBU8N6<{Y*|<6}S+= zzGJ=N&X72_U^|oPEpcCsb=Om(OyR!eXZGfPKBqIv`Bb5djxCHB{{WydpgHaxTy&G>soHTw%fV# z{RgX6G^I*^#c|KisV|Ch5MbeQF3W@7$esqnf&w(Kz0EFh+zpErzdG*mQmt&<&rjFa z*x>SZr*x{6K7UV7R&sg!@w43>q+~=nqp&~HQ}eGoYVEWhhKqMoIC&-y4|>GFz@^&- z$Wi{SNUUzt;X$^#wU?Y3oZ)MnUbi~@YAU_WsRutU!ib^BRGk;nsT(*e8`B0f+SJg% zE&cS#5+0O?kdH*YP`^@np=@zQEN%omeM75UH3o{gV|XDN_NQhfy~!HlqJ+?zd>X?x zY2#!zRubnH52%#tdY~0<&GVC)Qnk%+Yek)tizBh}j>EKVjF6-vw3Pg7PU|NO<@|NB z?yi4d(CIL_oSe=&Q4`5@O$|WRJ4v}!wRNr5wP5pY<5rs;yqiB0q_Y-D53x^L2#IE#^jc?$k67d-N|uVR+X)N$iDl#rP2|N zljc*t0{;MTxB_;ye@WCWVw;Re?N>R8InGa;$;sU$vb*cKwZ2uOw5*P_?Y9WUL|R&oBsdU302=Mz zx|CL}D-Vy3vU&P+J&u>Po5cKKWBnlEi^T0RNZ+Z)^$v;Ky?A%BeKLEst51@|05GrW zZP1^EX54f(N>)r>UQN=LgQb#%STv!FBaV)$g2?9H_j`}k{{T<|!t}xtSR?5`z-+xK zh*z# zcqwJ7?F9WqUUfejP6p(6?i@e_sBWHv(xo8eUh;ZXsU-x?<6>!Y5e#9FcT#CA6~N8Z zxCMp{!F@jkrvS#e%GFPat3-jWvXiAt$iTN>#-tFp8bUi?wBK`lgY55`@t<-0S77G& zX$<^x=8dyO9EGL3oX042je)KoE5=*v+cA{>j!##^@>Yi1Qz@psn)d#TpF8s|OV7NY zlJgF1u<{(V@@3~3nAn(RmBsIKSil<4Nze;b_`W*Ptx<4}eR&2 z2%qw#5EeNKQxvEs5&}U3^;cCzW_qmjFIrLU?}?@Y~Vx6KLst`Uj59N_R=UevbG zh=-{?YLl!iJ4xEUN=6FH5P zz%v`Q@;7n%U9m%5ezmToiMCn*O9W&^#J^3Z9PMt@_r+`^T9%I9acUrV)uxCgqns7woPsW%GKZOjCt&{Wb zKsF(Jnj0uUq8g-|BuZ6jfag5E=lgyYBLNBcEdh{_MTkSKI08uJE%l`!(kK1wfNFaW zg;KE{xxo{qFS$4MrnoDYEQwQEOcKDAbg6DeJyfZo3_FVbC;NP=U=X(y7phqXupP@| zO$ng~^dgo@Ngx*}T3m~$l%nfEKuh`!C>lcCdesDETa+pIQhY$0;f0bhq?sGIA+n{& zwM&%=tvF+q)B)e&Qv#8|u@2x@`ebI6ycl;hv&dhT(fUo%2g0+IXp?(HWI%Im>Uw>fk3ZLS{w;(no{IV zg;Um)goklAq*TaO>-f`@$J7pW)KS2?gc3d!@^0z+@^p%115CYc^GCmqNxAvSYkT#aAte)Ou@0 zTUjGwO+zhC{E2o;G=;^j3}*643~hr#^c||3eN5_DG{XrA?ZO_5j5E%8d{Wul4DFU z@)FTvM*9ZCZ6B zE0V-bAQufsL)dDR@=Iy;2?5XgjhFKDq~1rCuu1Igkg_!`H(?3F8N%gz$n-*6XVv^X(+*iZefx(${Yzkz0F07tH8-O3%jD)rb?=umF1WV3K3}( zW{|q(s~rm9ruQn*1C}uBdTK<#Q%V{TdQ;$Hq%c_00nQ|!k6OM&_y?h?g#jWyl_*cB zf+8Qrf`l$Jn5!2ACp{tlVpgIbnLxBxY zQjoy4`BV1+Dp8S%ZGN1rOL#1)%^zKN?MosIGMUs#_Sn zMF5b6k8bs1La`R9kbwe)dr|^fz@_Bjlvm-K7G_8&}L&yM#;$V%9`asd33T@9Mx`rI4GE(sqT+?lj z3t6bm20Pnfi`OpEYV?FNzUpocW^z${#7o? zGi6e=lNVTPKq9l{VY8jLRVcMca8r@WZdQ%}h?7oB8fi%_2vcl9K}44WeN6%lOou^M zvw;X)Sk$I5cBGICO=+kIjzqnv2^BaPN-9wh095`|L~(YQuM4?HcfS0{LkWNAH z{AdUYgrxx@RiVhlzbaAaITy`pnIibo$P1+qP^b`ewF3r}gvDS|oDK!KXh==Sg7o}p zTarQ?n%lBc$O^|wAaaF5K*d0UI{tKrBI(=Kn6e^i22v5KCjqePov20N2s+Y`96=>h z;Z6lIAfS3u?P4To+&&#>`Glfq!ks+@CWtJ@Vy?N}0co(2FKS6-sSvY*-kg$$%=n)x-vbPB axRS`rw-7rLfoZq`HSa_KR4phRpa0ppok@@Y literal 0 HcmV?d00001 diff --git a/docs/io/fits/images/Red.jpg b/docs/io/fits/images/Red.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8a4e74c0a3c83298cc9d54294effd0406efa0a8d GIT binary patch literal 34623 zcmbqag-;w#us)<{f#NPd9EukAQuHW?A9A?6yOjdP-QBGm?(XjH4u!*|xWB%r*%zQiZzWlxo_#`I{lm@`T!2x9d4Z!;jfI!N_(hL9qfdI?^008CR{D=27 zfW-ew{lD@VdNCB zgGxXrsTzM}?3`cQJ4?u;<`TF|p8z%~`1QUDK>2r9fEeIG7@8@XIHSx1t!&_1vp9!d z>u-BLj4X-1156rJ3lz&5-QEFnC#EaLp8ExDq&)7ZO*@a2w=)&w#T8i{m6Kcb-XV2u zvTyaL^IRDZK5lLbzfQe)%=@4DlArB-(_K}6l%v(Y=D^No|IX;Dl&D?x?tKDmaS&8(JACfs@7kUHK->2;Y3$|nQlo9}=aw-^n@0$(paKeFAIzgHO+bAoEW(Gv0? zcT8v+Z-+zn2Ghld>DILdN^sN%AFeY;5)g!5!;}>liMg2=j5dt#q-c6)sVr`(E3(=& zo_#t}F7GWaPzRdnt8h@Whr9SQTe?3X9x__WYmgcn_X%7+JTHO3j%F*j-WO^E6*=ZJ zPc%Ttab*f#ZGdWJ!Tcn+i?Q?DM%}8JWjBtjBETZ^vb^k zn6KAJAB#(SYC|{W7FHOxAKSR(D1394&po?WA>?b{oN#5a?@P;RtL3smqN7sFtW?8{ zhnscdRPP=kPJnH|4du6%ONuO^Qjjzg|76Eb-a_#Qtdcm>YeODBc*4nroC!k~E=klE zKPfS{w|)?6gEeiNaZ9-Xb7M+QtC#)N2+Q7Doqd=-rA<0$8`FFs)9OYVO`tw^@_J zKqSXy=Jxh0_Wy8Q!wc1&J^wf)+H1bmbT?(o@bx6W@%ASQb;QVN#1tb{ELB2UtkaQ` z4q7*|JFYA?ws&gi{^Ss~g4rBW>J{=@3FGj?8-h5Q{L|k2DS3nYp<95XHjh^_wr^F_ z!hOumTy*K`@>7<5-%6r~88=(Hby?LrU})`ZjOEeyiLZXwdPTG=qiX*AA$5NT)0a)_ zv2*<*haJlzPr z0XTc!u!>I|eia*16asfq>X0#O$$*qJe7lGDtq zs^)D*chw@VT}a%vf^Jr~o`Xa7Lo0dZb9sW+Ai2*~w(N^PN|m23)O~0k4^7oh?U}pc ztMLE6G#Z~j&EzpHsV_k~yBG_7ueLS?CjCq6z&vQtL+RY}#w`ALVXr%mh7Lwga&{BK zAuy5QY>iRp#;lgyg0H5UDir(fP!)~K;;r#kjUFag2sQD5FvX$8Y%Xm z-#1I&+ka!@hpKZOP}>M0%A+D?*WeNK#V@P;{JboiuE;N3kgS~e2X*Z0?9Oi`TGe*) zeI(}414)Ebf8((VUz+vVg}seXQlnn_4Cm@et_UzM^`MXzaw=B|KiB+%xQWl|#5^J* zKGHV;Le{TUYWCP!8k4!B`BJd9&-1Ik$GMP3-1=ySP!8WxjDRlLth%3)-`!s!k#suo z`T@N5#Q}WgBK+c%EHKzA;=$Xyb_SZT3JrUrlwj$I+sZ0lh10fjT1$P+?8uGr&u1(N z;z@t`(=WAW-somT>(xF~{kM2%ak=WWD@G$_2U+34syXfaRFqV}YyE8)R#55hs79(u z=~~NhmDc6@fM4G`09>S`6fRVE%q(jb7q3>{;zx!kTxgfOUV@)zg5U3S=Ip6&4)56x-)ZQQ%Z?4G1N~^Y^K&I;3 z4@=vn=f4;oEBgg%3}9D7wL2A>WdXm!Y={D5eKDsg8u<912Iyi*lh(qwrw zNv5Gg@;Ub@R`}u!0^{6@ zT((RT+VA>Zt)Vr_bbbl)n`vN1OaFMZGVYpWamZn(=^c>NNNAzRi6}0plr?8OpWSuB znp)QD{p?+DDqR}hn&EBS_qM5ipD?8WEt+~3TW`L5(9g{Iky&DvT_MrixrP1U$4a7Q zA~hbbcnTD|i*RbF$oDWkHm#F?n$Zu|{({&)XeQD)mnVlh*PvfTk+*h6{@OV_ZLUq~ zt*>Zk^A3pEN*SVSgr-}nr~iKHvRiu6XBAL2Zomr{{*qJs<;CS+oTlVGw4a-8Z2!<@ z-y~jj;tt!&PQW)T;?`yz_)j0dL)9t0Al$lD+qqf448#06)3;UXsmor!tC8z;l0C0- zx~PGhgekn=c5^!;lqL;(s?26E(njEn^702nQ!A78fUM*Do)pgcMtQTNzJXIll9) znL3;5>Jkspnw;Xl^GYYKB@L{jVU6g4=G~pa3+1{wSq_y+zB;cY?RwW{QLvFdEl_l9 z>pd>(Ur#9<<@!==ExKX(?d1W@6b21o**8PxfA1^?xa;>&pl+fadtz?u<K%76+#e zZD0uVJD_ai{!jz!a9&xl68H|_tGG2c@0Yi@1c;n6ku>Myoywkfb;*z-IfPgpt0Ub&Mnj{@tB41GEYB2? z7N>JFfYEK04V)RX4gPXb40Da@DMzGd-ZBb}jepBG0g}}bTP5ri>;S+k1iKVmJ( z2tTgFtV72w`M1zq8iY`W5Yv^b0jpivge$zGvOJOqo-JrpS5TgdLv`%+Bt}J;d1`aC zv{aQx`{8O2NkX2hBidorgQ|vK{@Fre-mTc)1MHXM+2Jfa?-(i!fN` zdKfRYx%^6*CV~duc9Mwspl#t{@d}SRNRLU)9i`mq=REw)Uw~|Cqu|iO=PsY{^6Fca z|E4SON%H4#WMl4Np*RMbf9&Klj8=bF(5aU;kkVJ2uS9PiHn;G_3e|lo(@^C(+R0TO ztran#5=WwcH1!P2`oK6JyvAo>dmjC_``pB-<2RRgEHhy0%W_4TJN0O+h=XjqcztGf zSZWKG1qlf*L`r9Vh96Am1R{W zT4VA=?adpeVxkb}nlg1?(taa?cPVetcJv1r2SI(6tO9I2Q<*bzE)V~xsbIZ!*+nSF z7S4dc;djF-HTFDjJ-(M22rDIEvQlEKn4zr#Os4fLXoqiLjm(2yQ>S;wldF`%(Qj~* z)mua4+tTc5*z6)BvZdmxyjs+E0H;WodzUmSg9v~4+83+z z{)wNo6w@ZM9B|E{O&(A)=CHu|n3!tu`z)auZCAI~z`t`@R+jlWc@$mMz~@e`ILEpg z$;r)?w9B%yv;DIf*GB=QPGXM9ik&+uLRImDt5R)Fmjg z;OEY0VYqcb{_$^x3zt;1^_~dHeiWoRK@+x|PKDLmzkY)m2IN%72O`kr+0qMi9Ulr- zXz@O0_S<3@65PDtaD^UAq+9~a? zXU&Q81j!qVNSG_3*x&s$ta=yuqEZowahJJm+i<~BS=?Y^91JAIpX{P%PU`Q zUE7)IozGoTDR38$hia!qOH*&b(>YY=_~g&qX$M`&b(8)_liq$tmF7=<@JAYAE1mn2 z;+ms#>FYIcS+1{4Bsd?5FRVe@TNCL?#zkQtlsjBr`_$ZrgE7f?R#tTyQoizi`{7Xx zD~qGD*^2)2Pf@Lp`iRQ|m%ej8V0$Lk4k`UKnV4QGhsqvXDxxstI6-5t>edujEj79D zp5|I{!xlwrZweM@N zjTYLEQd!*4uEHAPDCg7itG$guA+jPW!(uWV%VfvxU2kDqX*c6P*HfD0?$RMqYFnUO zl1yrcO_(1TWS@V-5*Xz!5Hy?k%d8x#;t`)n-FVDI_Cjk;E5Gk7Xzc3n8lP`v?{*p2 zq>F3PVj`etED-V2r83=X$gZQK6L=4ieA7)i1p_BsOF$G(LCE@_?K%yyVlYVTviw$O zqz7Cmx}HCyyREIPBZWI`7AavSoTxLf``KI4lGsUm&SA8&&i$UvTVDP?fAaOBtBY;Y z3r-QGE-Uqvujv=5DV2V?g6=q)<*JChe^Xoc>yvbG_W0>EuYr!K>?!=L?E~hevZF>u z;k^o#J2t*}7qd9U9KDOktTC4SViBN1Yc1b@lb$f{w3_x0qQ1D-D40ij!>{}uK-haF z;HKRs{-A1!Kj2$X`wq~k`){S-bI}n`xT4L#Px!^`ZR(8#!Wm2`G~PKYbbM{G^Vh7K zf9jh)Za(yhV(Y2il*9_kWLYOX@h9{h!1GJ8dn()Ct7T^X`HfnXMnd4$yshbUTKHA} zFVR`x&f_5nEclwUeCwN&IMpO&{|+#IV}EOPmD`M1L^3uPbu_SF$q}i9nEr@<0uHwF z4OB0!E{B{pt?(QgMJt63U-?Zw1J`oWmX9z)zQ!a&^3OC{uvLD|v41RSytJ}_ zpFptYH&k`?qS}b!uoD49y4Gw1h_nGH7t?caEMB?ZlRG8hg ziZjseXtw;gH3<*nNrc7B+~}?5=sxY^eeStddzBi!qYm%-=v%EX+j8{$S}>%=w8rG( zr_|a|9^0^;!y20_( z#_L|Sw9j3A=N|YN-A4jL5$u5PaDY*51Fnm`nyb%*p5SWrepGU3{SVzRDp-Ey$$@ee z`M=`Pmy{=~!?D~B%LNgyMfQZn5|@c6t8DHI0vWi)yNK5LNU*u=BL+f6CbkVslVr!Q zsK#vK1c&QwB-W)zgzly&^rT6)wP#k8P@1sR`_yx@UTIBznJ&;!lj~G`+oPq2h>MJu zV4)ywJ00#&0NoOCO7lr!iA8hgW6i74BHzY;#ubhs6Db1{dF3AWcOmf4_*;(RbCnE) zo~x$U4^&&SdWALN* zNL2IWrwqf8$X}qPuyO{2js4k}%eaGdF1McjZ2peI{Uoiq<)`;}HOeurQV7 z(gX3F?Ti!%xz&3Wjt?W=ufB0C*Ljtel86g+%<4*{itKJA*dvTe>f54K*BGf5V!??d zXE-(aw@0WiWtJUXKg0IeypX@H-1K@*WdtV9o&RCD1By0g{a$c8p@#mvI1yU?w9+bJ z^7L(+cvQw-*#oLZ5$qtNn*{>~Iv!BK~DgC;aoW3htHqKavQ*&ThYvmf&t zEAKP|COtDere$C(*$A&lH%g38Ko>e;Y9KL;Agx_~f7D#2v2>>-6ZYaM^YeL_G8mqgQo-Au^Ii1__pjdNlgY|tqR7khTuV`D+P>DW#7I-l;Vl3b1R+f&sPAUU7 z;4xf7p%vPNLjN-D=vNBm^i+wE3LRc>FkO%lW~!Lkc0irRg!;#=+-7XH^cEy(_45-) zQ!@JEQ#-+sC57}ReFND2kZE7w?a*U%I84U>Ir!H?{nldp>&JYdr#T_jQ(WgfmsicJ zeB+_kp>J~vSvm5#Hrn@21Sb)^v@$|)sVq>JDCpA>eI%49#f8#?AAtY{2ze&I?Zw5b3d61$+5@e2?AeDX#P2d;fz zKt7PzGPY(ioZMR3Wti2Itb@XJ zH#(l+=NDUdjF;pSbk>zVzVaR)N@)s7eM;*KS^nbYr83N#NZX*#H=aK^7esWFgDHb| ze2@rw_JWp!CP_BB@GeH#*dv9pvSZ!>u{ca~1fqRv%I8dwq>HS1=yF|FP5PKQ`k748 zL)`Epi;LY0mAmS+8v-el1JXhYXy&`Wo5!|4dM?ujQv*HPnXU)79cys^4r z!x)`k7a^$GV~^6wp)o2#-9x}lF;Bnm!}imFI_BeZ@muI;myGcBaj0LnG%DF=RRbZE zbORV?xO!);Binjqf*;*(WhP58OZXU}Lh?@2NmI*?ZfiEzhzd&d5zz_ca-0vnd)g`R zR!2;0tcYx`C^dC*Pu?=xC8dggvhcV|W_huB5C6uuR6)%- z&}!8qf5b`~4Bkb%_{WV|AU)UQ`LCs_rd|Z$E?Uo<1J2H@|VIxbP3S+#l+hlf2~mz0I1xK_TCgbpJG0JY&zfd zY(ipa{PfOk>$AS>5QXpJ@}$CPJT;ZOCbl;!Vk&aFa!G|I!f}iV?)~PIW+v6r2lH>0 zxWeB?E{Gddxf7;*wlgRO-Dne>J!cd5!d?XwI=ufDeJ`Ed*1R`en-c$>%ln(qB?&F~ zfOS$*kG30zbRbi1lsLf+w|6mhF)bN`x2$A4`nU|S_ay!UJm8wi;nXOTv0JxYy*it^ zoXL8QskpZ|W3*YQ>uB&(G~XD{_HD`fP*Mi={oz}4F8X%9GvRq#oAVAB%h}uD+Q_#; zf(EHjZyE02o9A5}JH>Q~bssK$;uUNguSkUX6=nMcN3(>Mqfo7~$C33A1~8m;xs#r@ zdPsEhxo7BnGC^OZ=4yCm5h_-#47NPGW&KLYa~sl>Qr0jgEup(l|M`AyZ)=x3!RE6g z2q4`bml_#XgTQhi`A6u+l6K8@^>cveL%4Y8@3d+d;?}nhY)2HKc_*1iq0}g!WM-yL zqn*ClmX(!(s5_!HE>AC>OB7ZL6nb54Z|`6RSZSkMVRuJt8(rFFS!dpUAz=iQTkzr$ z3~=heIq5ZXQ|g2E3DSk}P((!>si3G5mcP8D0cc>G31P7+7#PyAjD}e?ZkodqVsaGk zFpi;~VVk&F9FUQ9Dd)~?cL9U3t^efm<%*HAGK3UVrni)1It&s;33i0-JPZ(-F-{Bh zZauOZT_ItmmT|XQ9Q8R-V5!>c0yflEERhdV!% zJodN6Ly)$xc1RkWwlBpq#KcqUvp6U?b|9H7Q0t_V3gh-+dvFAfR4L-ui0HS)%S`op zkY)frL$CUVv{?g=xT%U&p?%W_!(l#xh)@_UZm3!|Z6Q_VTGG6f+`NiPN4esUDT_Y} zaZr{8dCt4l438wDX9zawLi@2cjLx?guErK=H`nMfAvgCp@{BuqgT94@5 zsvQepOqTwp2&C6S`*F~q`->#S0~P5wE@A*NzcMnehDz%_8hpOY9lZ}pEBP9Bcon?oGrvn%@%=hDqCxO$g;W%f38<> z!W0ipSeFOBZ9qL*r#o78(kTw8jRNxBasA8yDO0Y^H#u(#ggvgy0*33ehu1p3ExiRT z9kT0tF)zLX4gb-EBzxr#ytP*PqvG-sI2w4}4(+5E4l`ZS^3}AK_90~K5Kd$=GxkVh ze36eH%*9tE;L3?53$hS!rj*(7`CZb_R9QE+Xh)7=aV|mxd4#a}2GE0}^xr6$Uf@|+ z4yJolrI*HMeREu!o_ryg3(p><$|W?e=)Vtbc%MWgzp&Ho#MzXHCzyvGUzZzZ%=8&W zuh(p;ov~gQ7tyV?(fJ==CLznZ>Y)USt;|p*kV}Mk)csQFwu81*T3ys^D}pCkMGB&o^A8p zZ=UMsnrw<6@1yn2+rGKz4K9cr*gpJ_d3CW>Q%AQok*YNO4|~%V@nLyUI23j+oIRu75e*_nfu;Jlz zC-30IUN>9*48aqkxxK2K8-HIvsZT3se_8Bdx zwl|vRNBy@lVRK-`iN17&Wb$_x1FvtS@xM0OBcxTiNW>X}1))XIjiPJ0v+1J$R9i3O zF1Z6r>lPK7&!Sps=~I=U4cfUFEoq#*e4NP%%V=mSNhouRxn(B_Qx-a*mt(aUJ()Wp z+ryDE!#DXpd5*f4znlY+eUVtqX@0DcvpqFIABr1JFe&}HB-QK2l?iN999yNh)m`db zgr9?&D8dMz&+QQ^Sx9%z$s6R)E;ANyE8{a}k0i7CllmM$#`t+Sw?D%1Orx}Wn6ieQ(86je&=lSV70FN9Tv^7mOTf&-JPeg2C^TyaW1qzl;r z;(bETxv&dVg*R`GcIDGk?k8OF3}deBW#Y^4bBk9Na06Wj*k^|tjh7c;Af}gi>H>x$ zvrogQaA`4}YjiPgJWD&+tTC%@EF(_7>qR%zk!Xx~rOdK19W;O|`C9*Lqe!HjoQid= zn6ERhnDz-)dFj}{l-ez^&0cg^>o;Z_eIMosx;aSN-Yv6mK?FJs-bPS>y^P}?^lWxiAlA>|J4XAO>PZXUa1&5SrCJDL#z6KjLc-JJ}H{!;&ZTf6&5mwdKr8F}sL zg@XA*k1)_^(WuaN^H%`Ix;U2y?BgI~QugJL6NeUMNv#=extCd%# z?cm>5HGqfsZ56$wX|h|iU4Yem628(07n*A82_@yUbNc&uB+mmk=R6&ZWV)zkulx+u zi?kA%bmsG14}mAnW$(gIKVtj}5DGB7AN_6$ev{+g1cvchaA4>zs*~i*{%16#$w@{7 zzSV42uZ$K1cW2RNyeNWe^yv0BLvJJJDQ|q95<;+8wuwR#G`$91Y5b<+pc^vH4arbO>)GpBx9*#_lWvNs_y@% zKpFB5u<#LiX>`mq*LwmDHtcCN)J#^N=!wL1_49O*x}A8v1Mn8XDM4HOkT1MnEVonr z`F&95rjUjHKzU}Q2S*D`ezN@-&wp)!TTM^0$-1k!&hQiJ)Oc-PjzO2%DpWaxR>&<> zMv$(sZn_bQ*_iGLx)fMJCCt2Xali6*nMj^_Xks5FM2ay}h05Equ9d+PrJ0ZLC=b%IUpfX(Z!K0}8#()HBC{^&w>gv= z((izFA72o~@sFvMLe13(Q@+SYZ{LmU!XWN59wCM*UEWQfrK%0Tz`R2~bYISu8j1-L z>2%d_u~n~^Q2!jxowXE4)P!*aLV@+?H<~U<>c;t>si_%%WGvD`Gm9Ui9lZ6(RC^+Y zEJ_GCC%I_a>N=@R>T#jzBb-8FZ)-0e781<-B`(6{7~yZxGiliHd&v@YpCc@7?Sl7i zNhNVUIGLFQI@pL@i=^pR`(TVN(13WNT3js%q4{U?+cxC}z;*Ft>A98dn-3PQl0`W5>0p2x0Jt@s<56jywt} zev1uvqHN$&g5K9v`87CAbh{D z6KTAF?*?{BI6EKduo#UHf-F!zZsO=g1hA86x$3LeERhBg?=(ZI;Ikv+oAs`!v9G(S zy10D&;Rk6gn_H8nk2Rwi0?V&)d=?M_Sw8Rb7>BE(EsA101QM`7_?>l?OTX5dT;eS2a8O3oap5#DZH#t2fKS1ikYABv0z#9T5QmDu1EBSEHcf zgR3P_mRsY34XeIIgNzQtft=>bB*)|kqf$d4ZS?B}=HtaDlE$3Z|FCJXPOnR{v*=t9 z&(8kaZFu6boAo%dJx1%&%4ZzgpWcdbqy-9JQ^?&km7Z0f8WTGAd2yLi?lRLuwM%w> zdbSt^-cHBJB`kx~sU)*_+#2!9RQDk}PWdOyq`Y#%3sffCP6;dTfRCHIjfjuZF6!IN zPhPi+)Zbbhp0gb=C4rm_BJ>;tpo|8S@S{@gswIsMRP;|kzQ9oh>1}sS&7IRMgQ+_$ z3O=v->3x~+w5_0csUCexObWxT(=~b3@(DHDfN~5G$2KKLN~oNeY$#s~Jz$cG?DkrF z(*~N)nteql1y<5HQ27tfbF_D5^_s^Vez^~)KT!H8e_}+FIz(w2%3Q=;Nbq>W?kJ=? z?KImKDxxguW>v}i4$!R=%A@>!f)#N+E6}$JEpc9hv5R`L-@gMwVj5LdZ>4OVhAbA= znf=P~p-CSP2N$jdN13 z$!TtaW*ZxfR`<(Wdrwq3qVx_!^3Fx_iP*X$k0wwWST6d>u`_Gk0*$WW<=Le3Ii)j!2vMg zs|U_9g0MATWC;9i%v~THfgrWN9@M80g=sAfxwktG2ep}x({m=o)vJC6@iquIX9jj9 z@&wQ1uDn`c71`pnUq?+N+*U7lE7-a4^R&sl?55q*n;YHwaA95mQ)d_1EJ!U%xfje; zXRl80UBNY0mm?yi)u9Y;MRf;6j1Y$bn}oUp%v_GE)Za?P_Q&d01hM*uDQ(=LV& zMTcEz7~5gA?|@_)b2dM_fZ3FSGv|Eyz#Aa7uh!~azjwX!wQAFNYa_U{NQy>F~GJK)lKOk^NHOtQ?)Fq3?5An0`Akb{x%|LQE$p)s84UQaEn^ z!`yen@UvcpcpI=ZEx*)Q;N)|Iy5p7iby z4(Kl-EIUObJ<$=Q=Nq^rH{nnsl(@cANj551_@5f35pJ^``S(D(kC}U{JYyw#QJ22D zha+?(hC2zTp8+oEE``c&l_Eh6+K~=*+qXlB>B9UbhFE$(0_lEr$CX4rz5Z(R^zYJI zp!ND(a-*K^^Vuv&MS8zmPZpP{_F4ZiNT5G( zqB*g{kS;CLR{sc5j{~dht0hUz%0a)z$VvkOSA0&*To%q;`k{^;PM_6(frN_6ntTUk zM3p{Nnz<}y84Rd<#1EDWP!-=`Yd-e7_o9hCTVPw@Q>+cp%}!`9d?U?Ga%G*@5~tIY zVC-r;%pPf%ld_E_iL1uhpm%Q!0$&pc(&|&g#d?~}q6*M~?|Sn(Zv__>sgx5sDFlG~ zyaCe5FD@z93a)PHRq@nT=IxZVLkEX7xx8=g`YS96pj zRtBODtCM6gJ$421^3`PKJbL-tc@k!P>jU`gIdYvvP8Gi;0<&K{!IeTEb@zYd_{qX< z1W%@Jyti2eEtYJpN+9oma?q{whr=l_k2e5=#~0-6lzjhbks|57u#w2@(;+uWS0FC` zX6>-p%7bHxBBte$3`w8PULXE|zR;qMtRVRL{6mLJilOaXDj5yD10v?(PFKjQl=U_r=MC5TJ)Mf#CUC=%A}e6f*Wtfpf%t}N%E1}TwWZ}}`Nnp!s!1q>>OlCF==I_`WfEzU z-fvaBvH1;NX^za?|GhFU>MrCCKts*JgCT@oY zD1Q=;#_gdg05m*_LN6=m+JzURven__H{`hNszDSk4=3fJkZ}8AJ!qww< z071Wui_DOXMKsMUW6VZXlc;XKZ=0C3i+8wvgOK`Iwm#uAw_T2ouaLJ)(A&j0_7acn zjH0i4w64C*8}AGc7np=VC2J{7c1cTFjjv0jwaii8{eJy!?HlR-i(inS@LTY$Dg7dR ze;VhDfYe=w-7DG3>ThLGN9LqD+1}O6FD6^iSB)(9uBN3Y>MaAY?-EwGtsTjmrjJAB zqMKeK65Pzi>=y!?kI`=%bZH6m&`{a{dag4>&A%B=S0{4R8;uiaHcUAhY^qmdYaZEV zS**Ow&|gsMm!_{WKc;EVOw;1Vf$fI!Iz~j{cr2M8zAop&gDxC}1SH2d*>Q_EUBEL{ z#HxZ8hD7m`vq~*n__!8JPKL-R1a~R)9&^}aXB6Epsa|+}3EWJP%qKVtHk@@u$*g1a zVMas+H`>gBQFpZ34nZ*NC-WfPbUTG6m-)uZ<35V0R1q*4WrQw7Dsw|IsRz<=G~ji+3l4=}`Ej&v^XpFG&EsfEj=W9Bn~6vqMks6aTUDH4$W1`Md)&Hg z}c0KSIT4W~~;P+xI{j)ToW0b?kMN8RjbACB@H zTispxT4Kf*RSYwHp!xyXr3v{45S6wV@KOl0VIYSfm zaV6AfbtwE%BqLy90+qI`U74Vs%l$&;utJ;n)744y@SW@k=(bJ6OStFq*nve1 zU4x>+Mm)R4C`Lv6sWQnW;uo`c8{M(fasx)@6VcnQjbs-0XT95z278?@g&OgSb$<&z zMDy`=A{v%mm*3muUru=OXJA0Dvf!dJdme9{<>S%xq!VWzT-K18BB1L=n|g>VX6|=aarwxJZ4=Zu zZyYlkCR-MMwK41}dtv6Z_%x3?FE|eZO01ADK#_?Rkd0m;j%D~h z_`AlTZqNo-jYgY?K)nf=|84Y2yz-^I1$MY6luDH<=*|90oWr<8IWzV*s>jRZ2tk!6 zfk>Gm#v$dE^0+LL_PFPWxVU8x^OJq_p2t1b0M|m4nnMW5ND{vi{~zop#9agru_E&) zJ>7p^zp?yyNlGwfX0quSN`syl|@7Y{L_wtQ**U z!XvrOAJ%uw;)s1sQw6EriKgFog+K#G*P+bflIp-QFpd~Ui{=we8-$vccs!D+oONBq z^s8>=oE1KOI9`xPk%}~e=b39#4|W{kY@W91e1+E*wc!3Y#(X)ZxZ&%TjzUykBom~G zj85IVo5y3SMvqd`L~kV>&C7x>tg5(ixU@dv@y$PI`r2*#7IcbQ)E?I>&CERSKy|Ye z`7;bcpu}b6@6^d|#i-h$xT$3MgK(iFYzY!kbu|r=t|#)N8XWR^yH_bDujC(qf>r^7 zX=;eiCOM7(3MCmNfS)c>*7f@Kn+JE;k^_wMXzG&2< zDV5AI=JqBQa<{632^v1|e%>j5jaYVGd}H%>z6oohlaDj%qbsHxJ1e;rNju^h;(X(Q zafn3Z?G@f`#0tKeY0sWhds7Pb!Ao=H`n$aCHedGh=4U{_B{k3wYvaD09$4O_1DH?f z=}@EGC?IY#YlJs}n5?_YvGeB~NQlbDeaYtSUvEp3%$lkqx}tv51+nkzs%%415%*hF zva(o;N!AyF;x(EUvvQHo7;Yx}H$IAT} zHY^wL9q%SN&!b!u)SuW4(n;Ej1uN?fHt24$D>*jEjRy4Dw5#8WckHTV;CZ#WZcedA zc0u*mZMv?TLBBmNZ0X#EHd+r)ITZeUCf-yqi3Q~z{krsZ6klXqZ3i)czm&RUiYa1g z5rjk2b@=uDbz zS%{8sC`{cRk5Gy}k}jp~Ey4yWvucg*dS=?YEJ0e7_=d4^cH=q;kfckawGQfinl`Bz;Gm^xHxRYgyNK&gvuk; z?+xkAvb2pyG7$nA2at=R6FeR2KMu9a?Qq-KbMV~P5&IZ(BFvm1-0*xgqXl)>Dpa+h zk8FN7jI8}Ay=%l~A5k;I=3uaW^f`Qz!F`|4$Xeh}hKIJvAkE#T&u<;QJ3W|D{Q>6n zarHhoL#w-0I}74KFMMSZM$#43xMq5@Kl`zWq(8as!MCIoi}V+c{g-?XP?r|>aFti2 z&M1GCc^>p;eJ2kiUz(dvSJ33Iit?@n7S25u5wNz}KN1oW^4%U?fr(yPIQJ3t!sn99{DhW=bR_y0c#834D|u?Zo7N;RfS8ff_Zuecs^) zHssNi!|@A4_^+M6_Asj>^$}S6woV^-iR%89f5@N>Ed49p;E#`RsKj-L2t(dniO_Qy z_>>2M#H(hQi9SZ@OrQb>Sd>;J#^I!g9ya(}$PctbRL*s&kpvE;Upvjdn#mJzbA{Li zGk43y;boY91yJq&L76)l?WAk8`4k?yePr1ml{fT>_FtP$3O_!MkHFh;uPQ`&iOg4g zH`#;K2-8(~oZOXiE`YS8@Z*5fF52z=Jav#WRtJJ8aj$WmJ%fHv{huR0d@kcK(zL9O3Qqv1OhGx{1FjVQ@y&AfT-23JROuQ`;tmcfD|J*zv4 zS=;Q@^{&iCSxg~D-;IN8$cyogWRuyKwCX}w$r>;s1K?6qd1v(tb#>?YDlW%mNS$DK zN2t<5Ll1!>cU*z$-Gu=Us)Wl0!s701HD8x)g?U$~K7Jbl5iCsrd-=}Q`!45G8CrfR zgUHupo(s&kW|;5j|(zsY+XMk5->xfulQvqbgjeKLIV*gl#Gy4|F*N5 zMMsWa$iYtyGCiFO}oq~s(+d1yDoTQldj&lP?X<5UTWGJX4^)_qyt6L$=i5%`l z@*o}Psr>?(qK)S7ZcKLVXX|pGZ1U44^s&)vzKZdD69vU_CK#1i)~;Z>%8ht)w4Cs% zVulRSI^JCpfSi;E+J?gP$XojJMjQO8TEe-+F(PiT44d+c%h< zD5#A!orvU=Rm+_}N4K2vi|8E-b&m&>ctDVE5HB+T^5>q1Crzf_Y?%1x&qGmcDirN) z-_|z!_I?Lumedy9Y*R>uDv&u|NUGw|9|D;o`++c}YS7ySvudGH=R!&(^FAK-DeeSO@#u~R_!tu&tuF+Jg`60LqF>s0v%T}(O*_3lOB6Rex2}@d8 z;2-#ttxnzI6yptDxWjQJ3eMePj**@E>lKJ3aZU+&07T3Z4wx%nHP)xk6=;QgDo6yA zN`~!LMWShmfPxD_vyfy#8I0y|zA>$?Yjoz8i>!wEAcKmNR8I;_xJJkHP;$%KWN8_N zigeLbTO@`RxLI=7hG^xt>0y~XaD!t5*Kd-Buv4o}aExN5 zu%(n_!watf!x6&TVvQWSNnyB#F2|(Vz$Y+Y0w*m#i!~ci^_vgq_fq;$);&shs);Vq zC!AvLUMi!XEVB<7W1K_Q91oCX#~49+3-bDK`|0RkMOyWTKf&2hS89ToduFY7jqyFw zs=BRrFBV;VD{(wiS1s)~;7nE8a)Z5ZNw7gAsZ9qCU8I#Yq^aRrjnT~S+c|F{BiaCi zrXJT0l2=K()V_Ad4oN>pSyf%6lcf@JcgU5;5lt;@V$lL^rl0h^(?%JBwZtoGrXE$o zTCK)BP4dem{J$IY3+Cm!d2-PEye@oL>9?lp-y>V5HThM_DxH(1jvp@p3-y;(xwNf zn5t>(zeXCn(pI4B3Q0KDtxLrfM^?3$8f%5K#2Ghlfop~SXm1@wcjrdg+ThUQqj%5Hr;Xwz}c>!ZI>5SaFQJeQPq}8n@rVNqt zbwX7sv|IHMXVPhJdvg}NoWIfbtG2}+5dQ!INo}6K8m0HeE0efEe)1$$mB(hArG+Q0 zx~Hi4rmV4Ns>fD(MB!98bH*%{#R|HX^NeANhs&TbEmdmU7G4Qv?s36k{hc~-{s`u* z4azmQbajnvKq#yIVseY-%IbqwToV)}Xm!OjamH{(q%kZx8|jQGz(xYzhuDg$(Bc-U z7oJc?i-cs0mgJjA^+@L@D*m=>W-*AZjpG{Ai{cOQTwNBs6{fE3NA8knQRAO3iS(>c zHf~GTpQlG`;yS*&@)_r7K_1R1q}Irl&S5mGSD zotlooO!XF+7a~xfqsj;-@g0P1nv-o^A5b5hp31)5y}9WhQ);%LY+uLz-)@As#GJUL z9qX4B?ovs*JH>aql#(AXf!dB1{_hyO?V~(~j(=RM*A}cbIp(Ku$um<5MWbhvu5LB4 z$D`s?$O0TC`vxVL2O_v9K}B0Wc881%SMh{Pw1IQw;{XT1$4UG2*hYv>jaj=n!oa<8H4Xt ze$}aJuK5HwfA39NUyQeD5H$Kp`6qn8@GWEL>k;U;rk5Yq-j5pLZm}E>QL$xlhnUZo zyqx0>!l+}5U^s(pNxl|~R{`5Ld|w56s+nNJNe4eGolkGlHWiBMGv`{fsH$M>*{`Ug ztz*{5>=yIPmwdX|{{Rt*;*D`e-k9-|H*&@;Y_7PM%P&|NVJ=;*tGe-xFv3-S{{S3) zhP7n=mb^;EqXszUA?MpP!uW0t>o<(-ZeSZ}u3aO1ura(x31a@8TGpH|R^7jvD8v}M z!tfU^erdw&UoFG8IrUyM%3um^PoF(4bmZalChIR-_Vrijxk)6c-vj0w*`NmC zoTg81vSFIhjAM0SClSS3;TR&%(oJkhRk)WdQb|w>{z+IN#E=AEu!90A(0q3g)%{xt zzG;bCoK5Sz0@c#iAndrVMe3R`J+d z6In{dakpBEo#b9c#B9t&-a$F&JBPKb`9P-XwUqOF5)DaZvP2OkJ58=k+AWwiB~@u9 zLWHdDwYRl1P>_?o&-lZ}#z2DyF*;=3RL31)ZZJ(}!V_wX{1M(B{{RbJ!&YzFv3}L# zcZ?O_9yV~@*QB=q@oe7?^QN;_Omkg4jJD^bB?lY1zGBWSq9)|Gd?T}}Q7G{OFtrMj- z=>Vxx0fp>G3HlK2&o8C7+PkY%KWwHX{bq|ja;bj1$`t@246M57APA6KUiO|sMvmg$ z7vf1GT?+i&3hxo3f|9DbNzARp9mysH`1liSU##m|=EZn?!r5lDJE*)xANhKBChz^UNr_!yoTWi z0}Fsl)`8(Al=Bh|*S&N{rB0A~N9liAHG2ltOh;X{k6PX*g7j;J;kuX6<_(R%dA{IS z;E1@4D-38{a9BB?{{W%08f!K*AE{*PzD(!ElKq=I!R zdI;I7MMi1@5t2!XAc%lOktFB}bi1h6O(0y3rH?%6i)~zHY-7N^xvHdUQ_UJERWCRC z-#z{ZB0Zz*&^o_G`u%Qyz2Bl!8~RqJozdIs(OQVfn73gwwQEWHmg6tu{3_6TYIUC7 zIey6ddFbl@07Lk{>C>@AZR^{lz{;ce5qS|2AY9}P^fH=u`rf9;(3^j!XS24`Z747C z4eFFFPnu6*KJ<|$@L)z_;z{SCi(^`q(ybgdbriQ1SPjykb0_q&W;sdi%u1-G%8Sy4 z32>;Vytg#0Lh~Sj%12-Z(J#`X$<@Rcnc9Olyb}UFuzScEXtvdH6pNZG6BJl-oU%6O z%40Hgy)?rdyKIdKjoQ_%)kQ;`k_G1d@&`Lqbjov)>b|k+A{ed}hH8IHI4&5=T39z=f~}TofIK+H z2e^E^vTEg{kTWDkSw^0P^b`Fcy(n};@^#m*la8dVs?0hx=^GGXM@d*)WmZcX$C_Z+ zZsibhvkscJ4m~p%X3x-HK*PZpeU7rNtt-1grs+XN6_jg+>Y}#kBz*6l5Fw`w4Y^u! zI%3R!6~x*;c;RH7P>J(&Zq(s-Njur$*sf6k0bvnqNX(rvYOa)XQBJD%H-5i04)-!L z<_yS>9SGY*EWJojEJB+K0IGld{lk#Vn~q0G`~w@r*u11+^ak~!tkXvIZR)06LxDbz zJTT!i+8pgVW{ptk_ZQV#pDlvoYc*K4Rkc(kvdINfxm}9o36`J$b21~9wAo!x`DS8R z6UI*zXwwsagta?uafH09g*sOxH|UNY8k_nQj_sqS$aHn`x7BQ2>UzJ8vB$M#!={hF zts2hT6_#q-#qk<@~ptI%p(ldjBgZGwywI>b+v;O z#-3uho;|DB@ly@nv0{c2@xzQYsl^}E)^3=w3{jOW)p@X*ILgAE+5L9<m^?kTL*R42TXTwiM= z1c;dB8Cz$Tl||pbcCwVWF|)d#)RC7s9r7dDB6MPk>7{DkS6HGIgGx$~Jcxn`21H2^ zCPm56v`rOyvV8AQ%zSpqJ`tu&)$SUnDbrlgEwMp@Rh;8{Xv-_i=Y2AU5*)Qoj9I-( zrskD=-tTf%NmKI@d2%3-X&~k%bcog+-VoikAl0(CTY-tk5MA#QDKstDC~znfekl59 z7bM;cX-#(VbX={8IY)Oxmrk6cLW-n`3XGeF5&(cC^BEHajWuxu#c(xaUBJHcJab=!1 z-+4jtNUDn$s5A6t-c-+wkpLX>A3j?7(LN!o+Rj)~j~LCc-Yr{my4Xl9xrra$a|R|) zI3Q`S8qbB%Tel$`DO78qs!>^0j^(Z;^DzPeci0aj+%%`K!kpuDVt5;@J*w?iMYbmO z((0p#Z)?ENaIguTLAjJh5vw~urb|-+U!Di@`HI0W3-^q@#M$Q?#!Nk4< z8-a*SQYove*EY_I^6&R$%kI!yv~#m+IqY^B@VMF^vM-PtHs3Gf{3+9={{Ta)NV96)>7ld4)d|+2-PV0=?pNk77XXU^8O5Iq!PHS^`|38) z-k_ew_jMPW_jlgix^CCWf1!?wv5joj#d>AIb;n!vQNKjJA~w}6UDXHJW)$hyNXlUg z{{Z3o_^*_4XASW@FAK*H{{WG*X)kyHNe7+<#=0+hmzNWx17U{wC1kKVs8KCs!x7@2 zEV?c6XBgVyxNcgyZxsj94Bkbw#n`4AizN$xB{Xe{QvU#V<+L3uiG0+g3xItbHR41E zktdc@pdquW+}BHjFuIYGT*{Umq>sL1Xpu8ITX2j$Rn=fcX;6zVHkBE70WxDa21G#z zIMZP8%vAD4LW z^;WHQg?D0#2PHv{SkAL6Lx@#o$?k;}rWQ#R?sk_!1&MRm2)yTzv@(uO z4z`xV)lk(lwnf6(To3>Rc4RqeAh!?1B2uS)P?K6a;kwG*lX7-%kOWAOLVmGtQQv{L zyD6xMm3;JHYB^lTDCRAtc9(Thh^ahBV!i1*Jg2r|CT*NgJ&Dn!@rMc2Dp7Azpq+Fl z9f#xY?9nLeE5zL*qIjaTERS1jdMA6H`#6Z_*=Uoci5`;NB97!ZYP}VW;jKi1o^v?| ziO-f%mO{oK9_bGdc*SNbsB29{RB%k6o=^?)`&{<90wh?#Go?gp2GLuKbFp1Ma24OHs>M4UZ#YCM zf5Jj-*=+Wp=LbU>)aP>l0BbI}m>`xQbEK_3#V^%2J5^mKO zEm$z8aD$9j6ix2nC;W(Yvg5$qtgZmwlC4ns9}1) zit0`l+Z4yJY(HIAV8~)FmTiDyzDa+KI77<*RK%PYtl2bo#|}Hm!w=vshy6bE8PGf3 zKv$#vU&A%4QMKP*e4CBpSmRwuR;*UNO{F#Ha&YHZr5K_}VvCGEwy2*bQj&#AD?0@; zl9l@pfNC>bM^+fS*IYr(C^t1?l{9jaO%}Hxo?NOT~>!zv6WV#(_d zPe#xBH`ncV)uox^%rT4`8pIUDnfd2Rr0FNF5?{&f)dz)jmO$))dr8wo@vK=Sl*G`j zWe~#a>L?OgO1^x>gpmf>i4i6vLA5_uHCG+hCu?iIx4G2}1f_jo3}+NoW~deT z1TdiCw-)OZAm;CaJfuX@6MDWNqVy|Y_PsuiXmE~2UfW(Wxq@a4NYcNoI3gnte_HFc zt(e2os)n>RRnjUIp%8yaHftc5>=qhfU0`UXDpk37)$?ZItCbc{ev+(JX~avCfB~pBesM7W8)Lclz=^1BYJA;)dEb>pU2tTeFnGrKHGibJ~?+aeP8&7 ziCJpwLSl0Q@CP$7&N7jap8iu!g)ZmUGd}+Sqx@doI;NfuuC*ImS@PNOBWh=AOyv`# z+9~aNsq9#LzB^)Xq_Z4767qxPO}YZR$`L2bg;gOTVJ2kUn|nr^Nzs=_sAKq+ABLpe zB_B|sHKeDS-hmh2^01gB5Jj(_(dWoVCHy{uR+ zu%%N)uvM`)hltl4G`UHgHI`37WH5XlaPldQJ~1`pQ>wwAVojRiYc)p*e*MiDdX`DD zs*;f0MS$6fH-jY3n3k}(!yYQ@$#hiVZ#1=vQkig!*a-u+#9rq;G6t(SV#Q=P%WO80 zxm(Ql@g8Al@bB=_-1lL^sVc&)^$nV&n$@VZB&467n6%m_IV2JdQ#Gr2JM3>A!*J$4 zlZj%kxI8@<4$^Fjt0_D_{dbNVHfxb5jLI{%mgfzz>uIm;C zgWbJpZ0XJv*{kue(fTbN2VPBcz!1LP~Io915X;WHcByKA=H-Kj3L9hZdwo|8R zzN97?(zsf>&hKJ3XCr+1_85%gF`_)a@~Wu%0)=<9b%&k5?)-P?D{}t;7Iw{&G&^~U zEa!5HDeiYl@k?${Zh}9gmw6N2^nQ+#k2PkdmA0)^(s;ARvWsTxERxNZNo~kM{`5Ef z6ap<^Ac93!uU7J(J5mUmET5V4-#b6;r`zA$Gz{W<3D-d&S6zu8teXp5^Ap*m#uUER zAwdcaiYC|2H(|XbYlZ=U z>sASdV%XIMZcJ(JD~Pp2`%@dy`ZTZDx|JAytUFo$6xlTq z-7#7J0F%q**$Ia(UGVJ3tTMbo({g@KojYQE730ecNY^~yNB)$3&+1MI zKZwB$_^>)7v=`Z z(SPZ2)Ez-?_YzlE8R6~$XzilqFV##U7YOkU;ATzkY%ECA;cw*;f#CRVw&F-OO&y00 zRZ#;F7D&m77Ogm)xF^MX>CEpJbH^GS!}b#LiAC+-pnB&0;sSu zXDb`nf(fr#u>^P_ia83r!T{N=KQxx;hyfP)0|IPMC=f=Kb+KftN{00;NhFGQK=Tn6 z=0TeiGkawdRxsg8tEQUv>M9MoIgtX>ZgXg+WacMHl-@b2n(Cz0MH{75<@2dXf!6Qm z_GxU@%wqvT$CLGis;XfNB&&6|O8M5Vl>>0~2rvs+nI=8LVv=x`E^#oedv&Mg!jd;C z*yKnMCTs{Cgn)FhqPup@QYp3HtROh0M3vm5H#>~3?%4~B!HpxURokkbE`>Zo$iG*7 zDHLE07Qi_sa8FHNgQMgrQkv}u~ zKwSd_HIpX54rT<0#jA&{Qq@qG7lrRiYai9j*n1?*h_qWZVmipEsMM6JB89STxI*`s~A75U(2x{)`Wvj7rqKqpOv!e8O< z8KX*Ky}Az=88k4cc!`&RdABZrULtf3ImnKS;Sg&m1l*xqN*_kXj>1f5DBS5D%6VG zrBd~f^V4z!L`OZMZKla;KC)sgQlb7DqD@;hD7;x3+J#nY=dvyy!EJ`XO@KNH)A!XQnafC!~|FSc|VGH0mywmD7@npEPM$8yi~EY)JwyY+=&2 zzG`<(8n1x#LDD|5;2Ni_slUc?hut#c`o!-UZNR$2Tcd^Q;A4i*2-{cqcbi3<&+p;{ z%y?g}ENe;Q#?5P%jMh~I*7Lw0CuyXzD+T=_1&5{FlOT&^Bn>_G`Aq6|ul$|c^eNKM$VvB0UnU(w`AYONj_T$m ztk~a5m=+C%>)t7YYO|HK_Z-5X^jV0%I7R@|5peaH)yIs(=ifeXFCwvr1ev0Zsci-Fk@1HAia z>$R#YY4m6vl)*C}e<+COVW7%&Y1ya4+kXE5VV=|5q-x1}98O-@CZjQzGGg7C=;HqX zL-@bx(2rKCDyOpl0B>|ejRfMk?0@n}pZR-!-5;TK_H97h-ruTsTd#NW&z_S`6kw;5 ze49hGT?U&Ntqy)^SNrE4ki?{aMd7U(&9bLYXI0%e>5EzM2X8p7 zde>+A)a7N6L5i+iVXZK{3sr+gZ5}fJ0MAK-huZ#sVk#p|bk$9J;a=3yJ|pq%@X^Fn zRlAavN^H^f=&*xv;Sx_Q!OpEMzEE$&Qnf1YmY~YDrxhfwRxm|@Cy^t(=vzn2Qk^@m zF}odY-_I&nn1C~%yu@V!HpSGfw#8NT%pSs0Jb)z3lQmoz0#0K=)x;G|&hBkQNFZl% z%mY1_$Y;+&m~WjX?PSVAhG_5H?-fNQH(Tj3z5Or5Q`vlmlTBY@&Rg)vqO$4FSya+h zP1W}(2NO4l0&*6}NqO}Su<8q%F+;wt#*LlGOLTnjaeJ92ax?ogqfHi0oz~T8*S9`o zk2}nv{QPt6mO~YPN@k?N;E;-ni9z$qNG;c}1J)oL5d`==#n$ZYmx{uWD6ZS0b6Bno z`yxlTmWC~gR<;yrFB>UaI87GKm9nW%pWJfGpEI>LgP{!zJUz>2_3(~sSfm4O?TWD* zh0M1jg)&IU$1-P1ySHNz71LE=MZRrHH=C4FNQwG4wd_s#O{O-tER$BWRB-LM!_!qV z9B;R6WC>jXG20^iiHk@t@ysU)S!yss2~T>R3#yVk)e^ZucOO4%#rCAwT1J$I7@cng zWG-vlg$?F;o7ywBbAN1Whi??9RlFjuR{c=^9?x%ew1`FPaA4M84psv`;mq^YKm*h54Q-})3&@sGkMQsnGvORmK^&Q!!dkY z5XMnioeuUWM7w03Dz5Pt3nJkLO7Q;x**sHz4eP)!HFgkTHG>dx?Hn-%Pv5z3hbrq0 z)!YjfC?e}}>UZR^Mb07Qzezy)7<J5C)PaHN10@8=KzkO?OFn`5U`Y>k#XRg+z(?78mQ%b1?s08-sv zbM}8bMvZ77UZ%&WHo5Ii$-gZ5Y|PGtERNJc6A^rf<_Nq*{(aHqVN%<0GBNw2-MOC_ zod~CDlr~OT{{YAN@{G*reQHuRD$c&Kj?2?Fx7**{q@t)(Q-+GES|@E(_cSV&^<{TL z51&7x5IZ1SzBIhyy2r!OLk9F6x>8ZT_6objir!&$x4?$haUG_|mGe#ytd+J?4R?uR zu5lDMxLbN2VeiHw8l?Sg^L88bVX$=Thso&x+{MIZEkJPRr(A0Rz;FORwKqLxiOX3Q;bfO3W70Q7?dV_s&xDs|`cfgZ5X!B>}J>p5&Oqs6z z*~sk@M04%5+Zfk()opeDw;{^+wWI+(rZkfngDPpGj#*V26W6Iz%0UDJm4)m%F zFpVkg)V}+3x-?b=wqiECc5H7v`RSH*e_!#9TEX>?0>W`#0S#_Rt87hxY|URSFvHgi zw`9BLjhwk^)vKRNXTis(&sqr5vi|@ac;)UK-Ti$+m#r{uOm|ox#;e?zH#{uxo8_z6 zCu<`l870?9z%obYB-;oApG{0dO)l@}P)-NCat7bpfw@lTm`tiBpRNU;S z(AFz(nyVzUs5>Zcg3dT&!nU}Z35e!)OIG?q!q;vp#1~H*qJ+1S;T4gEY_AM=JfioE zvWuL#s>2Ay8saV%pi-qcdWpBCMG^CEm0i7-ZnSY!@WVEOCeZ|Fr11B6lY;cM!H`)? z(MpJcy-bM>Q3CfrOK?19+IWtMp-V;5OKv~@NV=O5f*`_ z0LFS*#TLR_U@r!%x2FjG8kUVwMcyFkEZ5do-=%R~iE)Bs7+$gA zE)i>rA&Y9>4UcN$7f#=@!|}{j1A*ern}(b&CoGQ4>0xKzQfD392{NitP_mFCM1W(0n;9}cCb};&kAkaFjp6F zjTiGsttWzCbILzVBI4sYBH4qay(ox3WHFFSV9-oV0^i+=WBL4uFXW^cViXIPC)gQut1EQ&*{TxXjIox@C3;r5RVoegm znlN(NRH~)QM68&d^H)iH`Yv;xS=1y%7#4{^5w(0vV=0nkc*VO&1G;p4hhn-KT#;xQ z5i=Z&c$=6bN#^d8p}Sgel9m};RaISD*``aH)hS2|FCLT{poK(%^o>R$Mw1Oys(`Z8 zRvp#Cq!DJWuqD1#Adi!nF1bTyMfku8B=hB^fqGn(uwS-vH|#2BwMWEa^VnMaI)a!1Oj$;uQ3Zaake;5Wrir9VfMqothu?X=S|3u6aOY-)B7+@jEI!Eu z5wbfn?ILArc}b>?->+Ql{{V&Y*kDh#@2dT?%B0j&o^~L|c)7TV5&*VYFs>n0Nt!m* zwV_Xc$K#_FId{bEZT07`N6eZ30Dn2;C`}e&2}BBS^kJrrwpjSFPsfEt~CU&t^LX?`eWkc#HOw z@r2q9gxU>&Ajpf@*puBl0F%m{kOe?Tyj}^h&nrwB69W22D6$^0-PS(7e}$slz4|_i zn^jY_%WGnLK&$e~ZQGuJ(yEjVs$=JS4ZhzGZkbb~?u>QorX65!t6d-9n5MYnFAl?O z8wY5!>_O7RWW|d#p|3s+B>IhNF;h|f9{T? z$Z6wZCrTQQ-L@M-VLKjKNG6a)R=JZs;y@=x5q-O>)ZO#7Ug)u(^|9qR-D+K+o5LF+ zQ?*~tfI)ynK(PZc7CGo^SH*YzRRUr}Rwpcs6A?XzARgT|ORf*CZ)v3!5v=*Lj@@b3RQcY2H{t%eWdIbue~X2s~tz^%U1CXde)qG3UuMN2Y|5RwVB=) z!J7D%DuWgXlf@ReYj>{2a4we zkp|h4G`L}C3ennk3Zp9QR-}y3Fh_XLEXBKYq2tasj%!Yu{YoiMV=z5IKJVmtX+&_P zUaFoQ-;DjAlw-tGp)Ms8DmnCuXrKg>^Q3Z)%*304krr!2Rn^=283Fd61LN&F54fwT zR@XfCd^`Iyo%iZ6knsrmPv$wFH*Z8+@D|__gN@}4=rnnT3P^xdAhjwIVGIW&{)}qZ@l@(61 zq*GjiHg-;C;th^hi;;1qmk!03Z0@zUi(&y(l}*a2BPyztg`sO0=0UuGOy{M2!p55F zUbUB6y4BtalvjGPxmXfN9P9{?E^SQ4Zz`!xs(I0xW4@o~+nz!#Ce_tQCA_5iig-wf z_@8*bgf6PMlAFb6Zmwe9YQX2qF8P8v=-M=zYLihvu{+-!slShhhJ+}O4A^X$+aMlW zqa!b50t`jsP4w=k9*IReCdr>1t?j;zqVAKhUQ%o3FHH8%acLySaOP#l3p(|yi5_y~ z$0>-2IZn;HWa#gd6n9Mms`s=d>SYO|AE?R*j&MO8xeD3l$oidp&+yK2JZDELa;tYc z2B9W$IYrDx`DRG(xHHO}P}+Lpr79iJ0Up^WYuZn2=qfc*)x^QiD-5I(Cy|i?2Pl9* z2vJIQhFcuw65jp8!<=?#DvBDDosMv&eZR<0J71QKrl(hGs{Ki~x9$AKou^lvC%EnV z!S0@aj)>mZ^-sJ10Ir+o@(lT8Yq!WBr!89fN%V1t;rN$I8k&x=`4N^chc(79Y%x54 zR-7>7-mqy}sA2%MatkKyYE04Vim8EB7zyH zz(EHtW3Y=@Xfw=Bv$|dGmUx?b6?ChVcT$LGmk+w(*Cg>yofr=c!LR~vqVRPW)+aby zO0gEqyt*3TuAI0@9n%(t<79&HWy@BrIFerQV3C-BbPrna$}k*n9>FQZux(+&@SIWB z7@jeMxn2GPilmak68``Te+}F@YUKWiGBRVoaQ;PKE8Twi6>DEe-7s`+)j#}9)_#Wi zMCfBxF((WfV=C(6TKLSe#kI|k7~}gd6!IZ>%Lc|J_IQ?ZNz-&)F8N1lRIpiPRaIKu zIE~=mdFKMaPhiMc8)-7p=ul8tVVk@@i)?kf4|trxzIU9=6mcS}2(h9D-5kMJpCCDt zA^;%7XO6Ka6wy^`=U!Rod9{d)rqRkYuQ;SP9e`YNsMY@fbkckQd0)Tm zod&D?m&@JQXGWHB)vAtJ8p!$FtxTBh2f{(;p%jr+RabrE*&Wgc*{3P1XJGhy$oTf@ z$*XH>(ed5v&%Zq%QEHa!Rqe`u{{S0D$1Me_msI_Kn%;dL@3Q(6RW(pHYNKMmoIc;r z^2;p<3Zj@M^^$CICf6GvkpglP1GAA_dw{p1R;d*o-2@DhB$0fk^CIFR`#KJzy5G4k zUe-3C$xvo<$nxcxxf8TRXaY2Bx4%4r1Q29I*!BswMCHtlp;?eSzAf3IJ6lyvUf+)& zh|&5Kt9W;H48I@tJ^6p8h+3M%N@BPX8O#B5YnuvScEK|jDO#Q6Qe$f?ix4J3@RQ08 zN4S>!Gk&R{)L>*SnIw>4o<#LPwdXwtsZFK!^7DO}bNjyi7ZvR7NUZq9ykkdG`jiNu z0|1(M+OdRfKKf5>+r(8lQ zq@_+#-qE{LslDAos-`y=?d|queFafM-PILUF<1C;b1+Thf@#Z~)!UbRQ(MyZxhg>d zG9p2q$rchhTEj-ET=ouo7Cf`(KXyQ9x@tsIUh;)@%toc{(&zFaAjT)&sQ*psg*`V&@t1@kW`DQj|;BwK#R8?hmsp}9r^k0=< z#AMHW_ZyNpqNPz)bWufrPWSQaTKP@SJqc*F4(c?zD^_bVDBV~Kg~wzG5C=0kojW*y zLu}Ppq2BY+lTlQ@XzZ;ko?=1owZV%@`9(@hCtlTikeYd!NKdviH<;xBL1|1)bqsq%?hPPBKqNu2g#P`Xfo1}6U;2b_Us(5kiRt;@!nY`)Dke`Um6=QB1L z69y-+5jg>!x@oed70%aj6E=yP5u5|hkOxYGs#~d5b<=o0r!YAJ38uy-@Jx}U3xz2a zb!OhY{{V~j-}1hKRSt)!Z`?^0d%bJ(nH`|rE*s9>Do7xc-VM92NzMj#zL9^oPJe&n zKY!n7!ziWit&jp>#2K(Zy??97+H|c(&sVeUj(=`_{rYoes%Y9ghJPN;{(4ALS9kDx z;@<50ymXK1)$}_Qui0bI?C0#zg=*_jMLwrmeWsD0c<|_-=xBFS zvMQ1fVE}ga2qQS=JFTHRUYE3I=Q{j=zqUR*^bu7(ELBMTU*dlG`|YPxw=|Dht&8MF zej7h*_h?lLQ?zON6_3aHW_|qhXL;TB{{SsIs;YN%Q%xHc`O{r{+baUYwDb3SbZI}) zPwx-;j*d7@&kUZ-i^W`o9RC32l5HY8c0m-ZohYoGO;N73w_BCBt8ZHFzJG9nKl-oF zEe1(_n}TUHn6bD#{AoANCJnTAh!L{)G01kCK^?Im!8uIDwB-m28j6WBK8>Imrqkj@ z?_hJ8&_x_b+S?Lj&<(&7nIbP?0&UEWXsV{BI`x0WSNdq0Q5|}!+vI(b`@Omsu}vuN zsa-Ah_jkh&o!xG3zy8;jk=#zt>h}3({{Ys};rZzQ09SG8dq1)Dx#hFYW+MAKGE}Cj zecvB_ui|t`s8bpDMv^PnLxpImFF8dgtwMfgBn;$@tdIJ@&>5%qKBt=Q`#VfqLMo+B zyg#?e5y!-#1!!Ss;H=`al53ZbC)C z8XHk?>BEOb;)_<3WSKTE6KvgL1q>M#2XP&N;vU){W^cfTRlRGrLR@H`8jT zEXcPz@{I|Bvt3&~eDm|O1GM)Zf02X4oHyUs*XqZwf2-YnsXrFd71^q-sEX(5-5`F> zei}g4RaI3t8-4!n_|Gj7BA;8`arvD%I^M1G_&I(WH^cp1x;Du8%R680zK+!Hrk9du zvJYW5Bf{DCbI^wkTDPvW>fMUdAv*p(4|Vhtg=_Vy{{V~f&}S6WF@24U{{YN$@xO+T zwZpcIXH-mg_mV}2BQ)P3eH#h5kKCWB`5t+DG~snsMnU&gKKp1+AG5X-QoV{Ruh+OK zW!sl$ef^wKnWuSAUip4MM1DC&o$YmYW8!B{>Ds1EJx{QGzaM6f^V(w*%Q!aun2N2- z===0)nt%tkem?CQR$JAyK9*vOp08wV#%6pb1YSW8(Q2ly$~&j4U*bSN%5;xU^uI)) z57nOTwq3K!tJwoFp&eRBRCDY8eDdev?s*ld^{F9A2W~}X_2+wpE%Hm2@HPB-4Uo#)P!j}q7g-zGDR>) zC?MzFd5%J^V8)5B*Bf){{(YRhIs0@*{oeCG_`hfOv>%GA>iwhs-3hNT;rN|8r-z$v z`{yrj$K$7URW=!Y`Y}r9W2;5y>kqT58AsdQqqL{PZ|mV2q#P_3rLG{EQLS;7`4Jl? zlQjsA&_EDfTW5HsDaOaw@A%8U!%q2iW4p8L(FxJBU$57ntCYZxSa$yaEi;EknxWB_ zvTC-wV>;um@AXqz@O(3k)hso>79ZjI#OW%b!x!t+;RA*7WQJ^>N9o!2oHn|t(z4sM zyw3N`(4za@(pH`R;CfKJ{9Zg4DOXT)okOp^Q_Wkn^SP4*M%ADIOp;{oqthXr zfj70p(*FCQP~dvuf2nj@=UjCc|8?HX~#Rn&*?&^rDij?A;}_UO@H=+#Gv+VGMD zcysK&h+o#k_I|mE{r>=npKgWIv$L~Nf80!={|<$oo-uJ{{XA4@bcGEo-bC;sWEvkSpyYf5KQCUim5q?2fy;j=-%bk zEb=$5JHUvK2_3O`{hbj{3$?RO)JPouJ7Y(0P~y&BP*c1>!@rw{STpM$|3&GGnAb#K3 z{{VK5=+!k^>6;}{`jA_L?Vn$berU3Do1eVIiAj1G~26Hf2;L& z{f4Q}Kj)`>JvCF+tNt7NNvrYE`f4;)wf-J)@!vP)q^2i|QHeW7q|rqkgpy2l9n{T* zg^4qkS%M}bMpfCGXZE6U<(`Ip58?bY9aU-@J0Oo+Bm2GD9G=xxO{CkL0h|u|T%Tah zP#Q#mK(48j>M%R3XWnhw7coDMW0<}=^|m35zf%)@mKBBI)H+clQdKN32E!ZR_)`*D zw{3Q~Yi4Z)sf@vzPLJUj)ocxGJ7C)xLccT{y-Rvv5eCr)!z}p=&X;BNk5};oOC>c; zEE#TVHg>vP`?9$2(lKh1uF5S!KRdTx>w9oNcbAXyt2k3?qqSc1?9K`J z_G#Iwtc|=r580qDkg+4QnuPZo({B=F_W{d6=*al~14&)ibw2Oqp#K21R$j~f&@F$1 zr$4&?02lpQJ)$Y=Q`N8R_=xfF?&mr@58O0BON6`QB-`s4_Zx`jIRPScNp=mX+dm(? zJ^5%xoU3PO)7$a+`?Md56#Z}5$jpBiEdBaAi>@NB!Y#;)iHk>OT#tN>6}3*)XYcLg ze*XYy_UMAS;%-h^n!k!}eQsZD^3lE=4_9yZ%y!B8H~jQp6rRlW=5zcaXX0iwcWY|% zJhG3B^mlqY=;h1)$yU>Ycl6{{WvqvnzMrdwH&vHhpfd)ZK$`f4bTye%7y6 zsPUL1-gITgU&r`Wqq=gJ6vckNeSXgGrS9kK(IbDc^)l=AJVspq06!V!RXJ+g$HOW2 z8~*^8{!#1UwolG-?ClyG=yZ=>sy?CXcK-mcev8?oorj71#)+!B-PJoABu!KqpLn#+ z)W|16=`~YMM`PkS_*_MyDr)r{hFY|PFck}ocgBnk_PiC#9 z4&885%XRqok)MW&QEi%Qs*|>z9=B%qmE*Iwwsd`06ga7Bmc+W4a%!g){;gfi>d=cHnofUBRrtIYTey`A%xiK$7aXyiP7y}3q$@nh;A zc>J_1tx>fgA7T8*zbyo9)l&0ctK;;vk$SsTojF;HWm&UXRa9)vS6ou5CH?~I@a_1n eW4d$?YJF>uba(R6JXI6)=P9&*&GyIHr~ldit9}0f literal 0 HcmV?d00001 diff --git a/docs/io/fits/index.rst b/docs/io/fits/index.rst new file mode 100644 index 0000000..fb887b6 --- /dev/null +++ b/docs/io/fits/index.rst @@ -0,0 +1,717 @@ +.. doctest-skip-all + +.. currentmodule:: astropy.io.fits + +.. _astropy-io-fits: + +************************************** +FITS File handling (`astropy.io.fits`) +************************************** + +Introduction +============ + +The :mod:`astropy.io.fits` package provides access to FITS files. FITS +(Flexible Image Transport System) is a portable file standard widely used in +the astronomy community to store images and tables. + +.. _tutorial: + +Getting Started +=============== + +This section provides a quick introduction of using :mod:`astropy.io.fits`. The +goal is to demonstrate the package's basic features without getting into too +much detail. If you are a first time user or have never used Astropy or PyFITS, +this is where you should start. See also the :ref:`FAQ ` for +answers to common questions/issues. + +Reading and Updating Existing FITS Files +---------------------------------------- + +Opening a FITS file +^^^^^^^^^^^^^^^^^^^ + +Once the `astropy.io.fits` package is loaded using the standard convention\ +[#f1]_, we can open an existing FITS file:: + + >>> from astropy.io import fits + >>> hdulist = fits.open('input.fits') + +The :func:`open` function has several optional arguments which will be +discussed in a later chapter. The default mode, as in the above example, is +"readonly". The open function returns an object called an :class:`HDUList` +which is a `list`-like collection of HDU objects. An HDU (Header Data Unit) is +the highest level component of the FITS file structure, consisting of a header +and (typically) a data array or table. + +After the above open call, ``hdulist[0]`` is the primary HDU, ``hdulist[1]`` is +the first extension HDU, etc (if there are any extensions), and so on. It +should be noted that Astropy is using zero-based indexing when referring to +HDUs and header cards, though the FITS standard (which was designed with +FORTRAN in mind) uses one-based indexing. + +The :class:`HDUList` has a useful method :meth:`HDUList.info`, which +summarizes the content of the opened FITS file: + + >>> hdulist.info() + Filename: test1.fits + No. Name Type Cards Dimensions Format + 0 PRIMARY PrimaryHDU 220 () int16 + 1 SCI ImageHDU 61 (800, 800) float32 + 2 SCI ImageHDU 61 (800, 800) float32 + 3 SCI ImageHDU 61 (800, 800) float32 + 4 SCI ImageHDU 61 (800, 800) float32 + +After you are done with the opened file, close it with the +:meth:`HDUList.close` method: + + >>> hdulist.close() + +The headers will still be accessible after the HDUList is closed. The data may +or may not be accessible depending on whether the data are touched and if they +are memory-mapped, see later chapters for detail. + +Working with large files +"""""""""""""""""""""""" + +The :func:`open` function supports a ``memmap=True`` argument that allows the +array data of each HDU to be accessed with mmap, rather than being read into +memory all at once. This is particularly useful for working with very large +arrays that cannot fit entirely into physical memory. + +This has minimal impact on smaller files as well, though some operations, such +as reading the array data sequentially, may incur some additional overhead. On +32-bit systems arrays larger than 2-3 GB cannot be mmap'd (which is fine, +because by that point you're likely to run out of physical memory anyways), but +64-bit systems are much less limited in this respect. + +.. warning:: + When opening a file with ``memmap=True``, because of how mmap works this means that + when the HDU data is accessed (i.e. ``hdul[0].data``) another handle to the FITS file + is opened by mmap. This means that even after calling ``hdul.close()`` the mmap still + holds an open handle to the data so that it can still be accessed by unwary programs + that were built with the assumption that the .data attribute has all the data in-memory. + + In order to force the mmap to close either wait for the containing ``HDUList`` object to go + out of scope, or manually call del ``hdul[0].data`` (this works so long as there are no other + references held to the data array). + +Working with FITS Headers +^^^^^^^^^^^^^^^^^^^^^^^^^ + +As mentioned earlier, each element of an :class:`HDUList` is an HDU object with +``.header`` and ``.data`` attributes, which can be used to access the header +and data portions of the HDU. + +For those unfamiliar with FITS headers, they consist of a list of 80 byte +"cards", where a card contains a keyword, a value, and a comment. The keyword +and comment must both be strings, whereas the value can be a string or an +integer, floating point number, complex number, or `True`/`False`. Keywords +are usually unique within a header, except in a few special cases. + +The header attribute is a Header instance, another Astropy object. To get the +value associated with a header keyword, simply do (a la Python dicts):: + + >>> hdulist[0].header['targname'] + 'NGC121' + +to get the value of the keyword targname, which is a string 'NGC121'. + +Although keyword names are always in upper case inside the FITS file, +specifying a keyword name with Astropy is case-insensitive, for the user's +convenience. If the specified keyword name does not exist, it will raise a +`~.exceptions.KeyError` exception. + +We can also get the keyword value by indexing (a la Python lists):: + + >>> hdulist[0].header[27] + 96 + +This example returns the 28th (like Python lists, it is 0-indexed) keyword's +value--an integer--96. + +Similarly, it is easy to update a keyword's value in Astropy, either through +keyword name or index:: + + >>> prihdr = hdulist[0].header + >>> prihdr['targname'] = 'NGC121-a' + >>> prihdr[27] = 99 + +Please note however that almost all application code should update header +values via their keyword name and not via their positional index. This is +because most FITS keywords may appear at any position in the header. + +It is also possible to update both the value and comment associated with a +keyword by assigning them as a tuple:: + + >>> prihdr = hdulist[0].header + >>> prihdr['targname'] = ('NGC121-a', 'the observation target') + >>> prihdr['targname'] + 'NGC121-a' + >>> prihdr.comments['targname'] + 'the observation target' + +Like a dict, one may also use the above syntax to add a new keyword/value pair +(and optionally a comment as well). In this case the new card is appended to +the end of the header (unless it's a commentary keyword such as COMMENT or +HISTORY, in which case it is appended after the last card with that keyword). + +Another way to either update an existing card or append a new one is to use the +:meth:`Header.set` method:: + + >>> prihdr.set('observer', 'Edwin Hubble') + +Comment or history records are added like normal cards, though in their case a +new card is always created, rather than updating an existing HISTORY or COMMENT +card:: + + >>> prihdr['history'] = 'I updated this file 2/26/09' + >>> prihdr['comment'] = 'Edwin Hubble really knew his stuff' + >>> prihdr['comment'] = 'I like using HST observations' + >>> prihdr['history'] + I updated this file 2/26/09 + >>> prihdr['comment'] + Edwin Hubble really knew his stuff + I like using HST observations + +Note: Be careful not to confuse COMMENT cards with the comment value for normal +cards. + +To update existing COMMENT or HISTORY cards, reference them by index:: + + >>> prihdr['history'][0] = 'I updated this file on 2/27/09' + >>> prihdr['history'] + I updated this file on 2/27/09 + >>> prihdr['comment'][1] = 'I like using JWST observations' + >>> prihdr['comment'] + Edwin Hubble really knew his stuff + I like using JWST observations + + +To see the entire header as it appears in the FITS file (with the END card and +padding stripped), simply enter the header object by itself, or ``print +repr(header)``:: + + >>> header + SIMPLE = T / file does conform to FITS standard + BITPIX = 16 / number of bits per data pixel + NAXIS = 0 / number of data axes + all cards are shown... + >>> print repr(header) + identical... + +Entering simply ``print header`` will also work, but may not be very legible on +most displays, as this displays the header as it is written in the FITS file +itself, which means there are no linebreaks between cards. This is a common +confusion in new users. + +It's also possible to view a slice of the header:: + + >>> header[:2] + SIMPLE = T / file does conform to FITS standard + BITPIX = 16 / number of bits per data pixel + +Only the first two cards are shown above. + +To get a list of all keywords, use the :meth:`Header.keys` method just as you +would with a dict:: + + >>> prihdr.keys() + ['SIMPLE', 'BITPIX', 'NAXIS', ...] + + +Working with Image Data +^^^^^^^^^^^^^^^^^^^^^^^ + +If an HDU's data is an image, the data attribute of the HDU object will return +a numpy `~numpy.ndarray` object. Refer to the numpy documentation for details +on manipulating these numerical arrays. + +:: + + >>> scidata = hdulist[1].data + +Here, scidata points to the data object in the second HDU (the first HDU, +``hdulist[0]``, being the primary HDU) which corresponds to the 'SCI' +extension. Alternatively, you can access the extension by its extension name +(specified in the EXTNAME keyword):: + + >>> scidata = hdulist['SCI'].data + +If there is more than one extension with the same EXTNAME, the EXTVER value +needs to be specified along with the EXTNAME as a tuple; e.g.:: + + >>> scidata = hdulist['sci',2].data + +Note that the EXTNAME is also case-insensitive. + +The returned numpy object has many attributes and methods for a user to get +information about the array, e.g. + +:: + + >>> scidata.shape + (800, 800) + >>> scidata.dtype.name + 'float32' + +Since image data is a numpy object, we can slice it, view it, and perform +mathematical operations on it. To see the pixel value at x=5, y=2:: + + >>> print scidata[1, 4] + +Note that, like C (and unlike FORTRAN), Python is 0-indexed and the indices +have the slowest axis first and fastest changing axis last; i.e. for a 2-D +image, the fast axis (X-axis) which corresponds to the FITS NAXIS1 keyword, is +the second index. Similarly, the 1-indexed sub-section of x=11 to 20 +(inclusive) and y=31 to 40 (inclusive) would be given in Python as:: + + >>> scidata[30:40, 10:20] + +To update the value of a pixel or a sub-section:: + + >>> scidata[30:40, 10:20] = scidata[1, 4] = 999 + +This example changes the values of both the pixel \[1, 4] and the sub-section +\[30:40, 10:20] to the new value of 999. See the `Numpy documentation`_ for +more details on Python-style array indexing and slicing. + +The next example of array manipulation is to convert the image data from counts +to flux:: + + >>> photflam = hdulist[1].header['photflam'] + >>> exptime = prihdr['exptime'] + >>> scidata *= photflam / exptime + +Note that performing an operation like this on an entire image requires holding +the entire image in memory. This example performs the multiplication in-place +so that no copies are made, but the original image must first be able to fit in +main memory. For most observations this should not be an issue on modern +personal computers. + +If at this point you want to preserve all the changes you made and write it to +a new file, you can use the :meth:`HDUList.writeto` method (see below). + +.. _Numpy documentation: http://docs.scipy.org/doc/numpy/reference/arrays.indexing.html + + +Working With Table Data +^^^^^^^^^^^^^^^^^^^^^^^ + +If you are familiar with numpy `~numpy.recarray` (record array) objects, you +will find the table data is basically a record array with some extra +properties. But familiarity with record arrays is not a prerequisite for this +guide. + +Like images, the data portion of a FITS table extension is in the ``.data`` +attribute:: + + >>> hdulist = fits.open('table.fits') + >>> tbdata = hdulist[1].data # assuming the first extension is a table + +To see the first row of the table:: + + >>> print tbdata[0] + (1, 'abc', 3.7000002861022949, 0) + +Each row in the table is a :class:`FITS_record` object which looks like a +(Python) tuple containing elements of heterogeneous data types. In this +example: an integer, a string, a floating point number, and a Boolean value. So +the table data are just an array of such records. More commonly, a user is +likely to access the data in a column-wise way. This is accomplished by using +the :meth:`~FITS_rec.field` method. To get the first column (or "field" in +Numpy parlance--it is used here interchangeably with "column") of the table, +use:: + + >>> tbdata.field(0) + array([1, 2]) + +A numpy object with the data type of the specified field is returned. + +Like header keywords, a column can be referred either by index, as above, or by +name:: + + >>> tbdata.field('id') + array([1, 2]) + +When accessing a column by name, dict-like access is also possible (and even +preferable):: + + >>> tbdata['id'] + array([1, 2]) + +In most cases it is preferable to access columns by their name, as the column +name is entirely independent of its physical order in the table. As with +header keywords, column names are case-insensitive. + +But how do we know what columns we have in a table? First, let's introduce +another attribute of the table HDU: the :attr:`~BinTableHDU.columns` +attribute:: + + >>> cols = hdulist[1].columns + +This attribute is a :class:`ColDefs` (column definitions) object. If we use the +:meth:`ColDefs.info` method:: + + >>> cols.info() + name: + ['c1', 'c2', 'c3', 'c4'] + format: + ['1J', '3A', '1E', '1L'] + unit: + ['', '', '', ''] + null: + [-2147483647, '', '', ''] + bscale: + ['', '', 3, ''] + bzero: + ['', '', 0.40000000000000002, ''] + disp: + ['I11', 'A3', 'G15.7', 'L6'] + start: + ['', '', '', ''] + dim: + ['', '', '', ''] + +it will show the attributes of all columns in the table, such as their names, +formats, bscales, bzeros, etc. We can also get these properties individually; +e.g. + +:: + + >>> cols.names + ['ID', 'name', 'mag', 'flag'] + +returns a (Python) list of field names. + +Since each field is a Numpy object, we'll have the entire arsenal of Numpy +tools to use. We can reassign (update) the values:: + + >>> tbdata['flag'][:] = 0 + +take the mean of a column:: + + >>> tbdata['mag'].mean() + >>> 84.4 + +and so on. + + +Save File Changes +^^^^^^^^^^^^^^^^^ + +As mentioned earlier, after a user opened a file, made a few changes to either +header or data, the user can use :meth:`HDUList.writeto` to save the changes. +This takes the version of headers and data in memory and writes them to a new +FITS file on disk. Subsequent operations can be performed to the data in memory +and written out to yet another different file, all without recopying the +original data to (more) memory. + +:: + + >>> hdulist.writeto('newimage.fits') + +will write the current content of ``hdulist`` to a new disk file newfile.fits. +If a file was opened with the update mode, the :meth:`HDUList.flush` method can +also be used to write all the changes made since :func:`open`, back to the +original file. The :meth:`~HDUList.close` method will do the same for a FITS +file opened with update mode:: + + >>> f = fits.open('original.fits', mode='update') + ... # making changes in data and/or header + >>> f.flush() # changes are written back to original.fits + >>> f.close() # closing the file will also flush any changes and prevent + ... # further writing + + +Creating a New FITS File +------------------------ + +Creating a New Image File +^^^^^^^^^^^^^^^^^^^^^^^^^ + +So far we have demonstrated how to read and update an existing FITS file. But +how about creating a new FITS file from scratch? Such tasks are very easy in +Astropy for an image HDU. We'll first demonstrate how to create a FITS file +consisting only the primary HDU with image data. + +First, we create a numpy object for the data part:: + + >>> import numpy as np + >>> n = np.arange(100.0) # a simple sequence of floats from 0.0 to 99.9 + +Next, we create a :class:`PrimaryHDU` object to encapsulate the data:: + + >>> hdu = fits.PrimaryHDU(n) + +We then create a HDUList to contain the newly created primary HDU, and write to +a new file:: + + >>> hdulist = fits.HDUList([hdu]) + >>> hdulist.writeto('new.fits') + +That's it! In fact, Astropy even provides a shortcut for the last two lines to +accomplish the same behavior:: + + >>> hdu.writeto('new.fits') + +This will write a single HDU to a FITS file without having to manually +encapsulate it in an :class:`HDUList` object first. + + +Creating a New Table File +^^^^^^^^^^^^^^^^^^^^^^^^^ + +To create a table HDU is a little more involved than image HDU, because a +table's structure needs more information. First of all, tables can only be an +extension HDU, not a primary. There are two kinds of FITS table extensions: +ASCII and binary. We'll use binary table examples here. + +To create a table from scratch, we need to define columns first, by +constructing the :class:`Column` objects and their data. Suppose we have two +columns, the first containing strings, and the second containing floating point +numbers:: + + >>> from astropy.io import fits + >>> import numpy as np + >>> a1 = np.array(['NGC1001', 'NGC1002', 'NGC1003']) + >>> a2 = np.array([11.1, 12.3, 15.2]) + >>> col1 = fits.Column(name='target', format='20A', array=a1) + >>> col2 = fits.Column(name='V_mag', format='E', array=a2) + +Next, create a :class:`ColDefs` (column-definitions) object for all columns:: + + >>> cols = fits.ColDefs([col1, col2]) + +Now, create a new binary table HDU object by using the +:func:`BinTableHDU.from_columns` function:: + + >>> tbhdu = fits.BinTableHDU.from_columns(cols) + +This function returns (in this case) a :class:`BinTableHDU`. + +Of course, you can do this more concisely without creating intermediate +variables for the individual columns and without manually creating a +:class:`ColDefs` object:: + + + >>> from astropy.io import fits + >>> tbhdu = fits.BinTableHDU.from_columns( + ... [fits.Column(name='target', format='20A', array=a1), + ... fits.Column(name='V_mag', format='E', array=a2)]) + +Now you may write this new table HDU directly to a FITS file like so:: + + >>> hdu = fits.PrimaryHDU(n) + +This shortcut will automatically create a minimal primary HDU with no data and +prepend it to the table HDU to create a valid FITS file. If you require +additional data or header keywords in the primary HDU you may still create a +:class:`PrimaryHDU` object and build up the FITS file manually using an +:class:`HDUList`. + +For example, first create a new :class:`Header` object to encapsulate any +keywords you want to include in the primary HDU, then as before create a +:class:`PrimaryHDU`:: + + >>> prihdr = fits.Header() + >>> prihdr['OBSERVER'] = 'Edwin Hubble' + >>> prihdr['COMMENT'] = "Here's some commentary about this FITS file." + >>> prihdu = fits.PrimaryHDU(header=prihdr) + +When we create a new primary HDU with a custom header as in the above example, +this will automatically include any additional header keywords that are +*required* by the FITS format (keywords such as ``SIMPLE`` and ``NAXIS`` for +example). In general, users should not have to manually manage such keywords, +and should only create and modify observation-specific informational keywords. + +We then create a HDUList containing both the primary HDU and the newly created +table extension, and write to a new file:: + + >>> thdulist = fits.HDUList([prihdu, tbhdu]) + >>> thdulist.writeto('table.fits') + +Alternatively, we can append the table to the HDU list we already created in +the image file section:: + + >>> hdulist.append(tbhdu) + >>> hdulist.writeto('image_and_table.fits') + +The data structure used to represent FITS tables is called a :class:`FITS_rec` +and is derived from the :class:`numpy.recarray` interface. When creating +a new table HDU the individual column arrays will be assembled into a single +:class:`FITS_rec` array. + +So far, we have covered the most basic features of `astropy.io.fits`. In the +following chapters we'll show more advanced examples and explain options in +each class and method. + + +Convenience Functions +--------------------- + +`astropy.io.fits` also provides several high level ("convenience") functions. +Such a convenience function is a "canned" operation to achieve one simple task. +By using these "convenience" functions, a user does not have to worry about +opening or closing a file, all the housekeeping is done implicitly. + +.. warning:: + + These functions are useful for interactive Python sessions and simple + analysis scripts, but should not be used for application code, as they + are highly inefficient. For example, each call to :func:`getval` + requires re-parsing the entire FITS file. Code that makes repeated use + of these functions should instead open the file with :func:`open` + and access the data structures directly. + +The first of these functions is :func:`getheader`, to get the header of an HDU. +Here are several examples of getting the header. Only the file name is required +for this function. The rest of the arguments are optional and flexible to +specify which HDU the user wants to access:: + + >>> from astropy.io.fits import getheader + >>> getheader('in.fits') # get default HDU (=0), i.e. primary HDU's header + >>> getheader('in.fits', 0) # get primary HDU's header + >>> getheader('in.fits', 2) # the second extension + >>> getheader('in.fits', 'sci') # the first HDU with EXTNAME='SCI' + >>> getheader('in.fits', 'sci', 2) # HDU with EXTNAME='SCI' and EXTVER=2 + >>> getheader('in.fits', ('sci', 2)) # use a tuple to do the same + >>> getheader('in.fits', ext=2) # the second extension + >>> getheader('in.fits', extname='sci') # first HDU with EXTNAME='SCI' + >>> getheader('in.fits', extname='sci', extver=2) + +Ambiguous specifications will raise an exception:: + + >>> getheader('in.fits', ext=('sci', 1), extname='err', extver=2) + ... + TypeError: Redundant/conflicting extension arguments(s): {'ext': ('sci', + 1), 'args': (), 'extver': 2, 'extname': 'err'} + +After you get the header, you can access the information in it, such as getting +and modifying a keyword value:: + + >>> from astropy.io.fits import getheader + >>> hdr = getheader('in.fits', 1) # get first extension's header + >>> filter = hdr['filter'] # get the value of the keyword "filter' + >>> val = hdr[10] # get the 11th keyword's value + >>> hdr['filter'] = 'FW555' # change the keyword value + +For the header keywords, the header is like a dictionary, as well as a list. +The user can access the keywords either by name or by numeric index, as +explained earlier in this chapter. + +If a user only needs to read one keyword, the :func:`getval` function can +further simplify to just one call, instead of two as shown in the above +examples:: + + >>> from astropy.io.fits import getval + >>> flt = getval('in.fits', 'filter', 1) # get 1st extension's keyword + # FILTER's value + >>> val = getval('in.fits', 10, 'sci', 2) # get the 2nd sci extension's + # 11th keyword's value + +The function :func:`getdata` gets the data of an HDU. Similar to +:func:`getheader`, it only requires the input FITS file name while the +extension is specified through the optional arguments. It does have one extra +optional argument header. If header is set to True, this function will return +both data and header, otherwise only data is returned:: + + >>> from astropy.io.fits import getdata + >>> dat = getdata('in.fits', 'sci', 3) # get 3rd sci extension's data + ... # get 1st extension's data and header + >>> data, hdr = getdata('in.fits', 1, header=True) + +The functions introduced above are for reading. The next few functions +demonstrate convenience functions for writing:: + + >>> fits.writeto('out.fits', data, header) + +The :func:`writeto` function uses the provided data and an optional header to +write to an output FITS file. + +:: + + >>> fits.append('out.fits', data, header) + +The :func:`append` function will use the provided data and the optional header +to append to an existing FITS file. If the specified output file does not +exist, it will create one. + +:: + + >>> from astropy.io.fits import update + >>> update(file, dat, hdr, 'sci') # update the 'sci' extension + >>> update(file, dat, 3) # update the 3rd extension + >>> update(file, dat, hdr, 3) # update the 3rd extension + >>> update(file, dat, 'sci', 2) # update the 2nd SCI extension + >>> update(file, dat, 3, header=hdr) # update the 3rd extension + >>> update(file, dat, header=hdr, ext=5) # update the 5th extension + +The :func:`update` function will update the specified extension with the input +data/header. The 3rd argument can be the header associated with the data. If +the 3rd argument is not a header, it (and other positional arguments) are +assumed to be the extension specification(s). Header and extension specs can +also be keyword arguments. + +Finally, the :func:`info` function will print out information of the specified +FITS file:: + + >>> fits.info('test0.fits') + Filename: test0.fits + No. Name Type Cards Dimensions Format + 0 PRIMARY PrimaryHDU 138 () Int16 + 1 SCI ImageHDU 61 (400, 400) Int16 + 2 SCI ImageHDU 61 (400, 400) Int16 + 3 SCI ImageHDU 61 (400, 400) Int16 + 4 SCI ImageHDU 61 (400, 400) Int16 + +This is one of the most useful convenience functions for getting an overview of +what a given file contains without looking at any of the details. + + +Using `astropy.io.fits` +======================= +.. toctree:: + :maxdepth: 2 + + usage/headers + usage/image + usage/table + usage/verification + usage/unfamiliar + usage/scripts + usage/misc + usage/examples + +Other Information +================= + +.. toctree:: + :maxdepth: 1 + + appendix/faq + appendix/header_transition + appendix/history + +Reference/API +============= + +.. automodule:: astropy.io.fits + +.. toctree:: + :maxdepth: 3 + + api/files.rst + api/hdulists.rst + api/hdus.rst + api/headers.rst + api/cards.rst + api/tables.rst + api/images.rst + api/diff.rst + api/verification.rst + +.. rubric:: Footnotes + +.. [#f1] For legacy code only that already depends on PyFITS, it's acceptable to continue using "from astropy.io import fits as pyfits". diff --git a/docs/io/fits/usage/examples.rst b/docs/io/fits/usage/examples.rst new file mode 100644 index 0000000..8512521 --- /dev/null +++ b/docs/io/fits/usage/examples.rst @@ -0,0 +1,69 @@ +Examples +-------- + +Converting a 3-color image (JPG) to separate FITS images +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. figure:: ../images/Hs-2009-14-a-web.jpg + :scale: 100 % + :align: center + :alt: Starting image + +.. container:: figures + + .. figure:: ../images/Red.jpg + :scale: 50 + :alt: Red color information + + Red color information + + .. figure:: ../images/Green.jpg + :scale: 50 + :alt: Green color information + + Green color information + + .. figure:: ../images/Blue.jpg + :scale: 50 + :alt: Blue color information + + Blue color information + +:: + + #!/usr/bin/env python + import numpy + import Image + + from astropy.io import fits + + #get the image and color information + image = Image.open('hs-2009-14-a-web.jpg') + #image.show() + xsize, ysize = image.size + r, g, b = image.split() + rdata = r.getdata() # data is now an array of length ysize\*xsize + gdata = g.getdata() + bdata = b.getdata() + + # create numpy arrays + npr = numpy.reshape(rdata, (ysize, xsize)) + npg = numpy.reshape(gdata, (ysize, xsize)) + npb = numpy.reshape(bdata, (ysize, xsize)) + + # write out the fits images, the data numbers are still JUST the RGB + # scalings; don't use for science + red = fits.PrimaryHDU(data=npr) + red.header['LATOBS'] = "32:11:56" # add spurious header info + red.header['LONGOBS'] = "110:56" + red.writeto('red.fits') + + green = fits.PrimaryHDU(data=npg) + green.header['LATOBS'] = "32:11:56" + green.header['LONGOBS'] = "110:56" + green.writeto('green.fits') + + blue = fits.PrimaryHDU(data=npb) + blue.header['LATOBS'] = "32:11:56" + blue.header['LONGOBS'] = "110:56" + blue.writeto('blue.fits') diff --git a/docs/io/fits/usage/headers.rst b/docs/io/fits/usage/headers.rst new file mode 100644 index 0000000..2246f4f --- /dev/null +++ b/docs/io/fits/usage/headers.rst @@ -0,0 +1,380 @@ +.. doctest-skip-all + +.. currentmodule:: astropy.io.fits + +FITS Headers +------------ + +In the next three chapters, more detailed information as well as examples will +be explained for manipulating FITS headers, image/array data, and table data +respectively. + + +Header of an HDU +^^^^^^^^^^^^^^^^ + +Every HDU normally has two components: header and data. In Astropy these two +components are accessed through the two attributes of the HDU, +``hdu.header`` and ``hdu.data``. + +While an HDU may have empty data, i.e. the ``.data`` attribute is `None`, any +HDU will always have a header. When an HDU is created with a constructor, e.g. +``hdu = PrimaryHDU(data, header)``, the user may supply the header value from +an existing HDU's header and the data value from a numpy array. If the +defaults (None) are used, the new HDU will have the minimal required keywords +for an HDU of that type:: + + >>> hdu = fits.PrimaryHDU() + >>> hdu.header # show the all of the header cards + SIMPLE = T / conforms to FITS standard + BITPIX = 8 / array data type + NAXIS = 0 / number of array dimensions + EXTEND = T + +A user can use any header and any data to construct a new HDU. Astropy will +strip any keywords that describe the data structure leaving only your +informational keywords. Later it will add back in the required structural +keywords for compatibility with the new HDU and any data added to it. So, a +user can use a table HDU's header to construct an image HDU and vice versa. The +constructor will also ensure the data type and dimension information in the +header agree with the data. + + +The Header Attribute +^^^^^^^^^^^^^^^^^^^^ + +Value Access, Updating, and Creating +"""""""""""""""""""""""""""""""""""" + +As shown in the :ref:`Getting Started ` tutorial, keyword values can +be accessed via keyword name or index of an HDU's header attribute. Here is a +quick summary:: + + >>> hdulist = fits.open('input.fits') # open a FITS file + >>> prihdr = hdulist[0].header # the primary HDU header + >>> print prihdr[3] # get the 4th keyword's value + 10 + >>> prihdr[3] = 20 # change its value + >>> prihdr['DARKCORR'] # get the value of the keyword 'darkcorr' + 'OMIT' + >>> prihdr['darkcorr'] = 'PERFORM' # change darkcorr's value + +Keyword names are case-insensitive except in a few special cases (see the +sections on HIERARCH card and record-valued cards). Thus, ``prihdr['abc']``, +``prihdr['ABC']``, or ``prihdr['aBc']`` are all equivalent. + +Like with Python's :class:`dict` type, new keywords can also be added to the +header using assignment syntax:: + + >>> 'DARKCORR' in header # Check for existence + False + >>> header['DARKCORR'] = 'OMIT' # Add a new DARKCORR keyword + +You can also add a new value *and* comment by assigning them as a tuple:: + + >>> header['DARKCORR'] = ('OMIT', 'Dark Image Subtraction') + +.. note:: + + An important point to note when adding new keywords to a header is that by + default they are not appended *immediately* to the end of the file. + Rather, they are appended to the last non-commentary keyword. This is in + order to support the common use case of always having all HISTORY keywords + grouped together at the end of a header. A new non-commentary keyword will + be added at the end of the existing keywords, but before any + HISTORY/COMMENT keywords at the end of the header. + + There are a couple of ways to override this functionality: + + * Use the :meth:`Header.append` method with the ``end=True`` argument: + + >>> header.append(('DARKCORR', 'OMIT', 'Dark Image Subtraction'), + end=True) + + This forces the new keyword to be added at the actual end of the header. + + * The :meth:`Header.insert` method will always insert a new keyword exactly + where you ask for it: + + >>> header.insert(20, ('DARKCORR', 'OMIT', 'Dark Image Subtraction')) + + This inserts the DARKCORR keyword before the 20th keyword in the header + no matter what it is. + +A keyword (and its corresponding card) can be deleted using the same index/name +syntax:: + + >>> del prihdr[3] # delete the 2nd keyword + >>> del prihdr['abc'] # get the value of the keyword 'abc' + +Note that, like a regular Python list, the indexing updates after each delete, +so if ``del prihdr[3]`` is done two times in a row, the 4th and 5th keywords +are removed from the original header. Likewise, ``del prihdr[-1]`` will delete +the last card in the header. + +It is also possible to delete an entire range of cards using the slice syntax:: + + >>> del prihdr[3:5] + +The method :meth:`Header.set` is another way to update they value or comment +associated with an existing keyword, or to create a new keyword. Most of its +functionality can be duplicated with the dict-like syntax shown above. But in +some cases it might be more clear. It also has the advantage of allowing one +to either move cards within the header, or specify the location of a new card +relative to existing cards:: + + >>> prihdr.set('target', 'NGC1234', 'target name') + >>> # place the next new keyword before the 'TARGET' keyword + >>> prihdr.set('newkey', 666, before='TARGET') # comment is optional + >>> # place the next new keyword after the 21st keyword + >>> prihdr.set('newkey2', 42.0, 'another new key', after=20) + +In FITS headers, each keyword may also have a comment associated with it +explaining its purpose. The comments associated with each keyword are accessed +through the :attr:`~Header.comments` attribute:: + + >>> header['NAXIS'] + 2 + >>> header.comments['NAXIS'] + the number of image axes + >>> header.comments['NAXIS'] = 'The number of image axes' # Update + +Comments can be accessed in all the same ways that values are accessed, whether +by keyword name or card index. Slices are also possible. The only difference +is that you go through ``header.comments`` instead of just ``header`` by +itself. + + +COMMENT, HISTORY, and Blank Keywords +"""""""""""""""""""""""""""""""""""" + +Most keywords in a FITS header have unique names. If there are more than two +cards sharing the same name, it is the first one accessed when referred by +name. The duplicates can only be accessed by numeric indexing. + +There are three special keywords (their associated cards are sometimes referred +to as commentary cards), which commonly appear in FITS headers more than once. +They are (1) blank keyword, (2) HISTORY, and (3) COMMENT. Unlike other +keywords, when accessing these keywords they are returned as a list:: + + >>> prihdr['HISTORY'] + I updated this file on 02/03/2011 + I updated this file on 02/04/2011 + .... + +These lists can be sliced like any other list. For example, to display just the +last HISTORY entry, use ``prihdr['history'][-1]``. Existing commentary cards +can also be updated by using the appropriate index number for that card. + +New commentary cards can be added like any other card by using the dict-like +keyword assignment syntax, or by using the :meth:`Header.set` method. However, +unlike with other keywords, a new commentary card is always added and appended +to the last commentary card with the same keyword, rather than to the end of +the header. Here is an example:: + + >>> hdu.header['HISTORY'] = 'history 1' + >>> hdu.header[''] = 'blank 1' + >>> hdu.header['COMMENT'] = 'comment 1' + >>> hdu.header['HISTORY'] = 'history 2' + >>> hdu.header[''] = 'blank 2' + >>> hdu.header['COMMENT'] = 'comment 2' + +and the part in the modified header becomes: + +.. parsed-literal:: + + HISTORY history 1 + HISTORY history 2 + blank 1 + blank 2 + COMMENT comment 1 + COMMENT comment 2 + + +Users can also directly control exactly where in the header to add a new +commentary card by using the :meth:`Header.insert` method. + +.. note:: + + Ironically, there is no comment in a commentary card, only a string + value. + + +Card Images +^^^^^^^^^^^ + +A FITS header consists of card images. + +A card image in a FITS header consists of a keyword name, a value, and +optionally a comment. Physically, it takes 80 columns (bytes)--without carriage +return--in a FITS file's storage format. In Astropy, each card image is +manifested by a :class:`Card` object. There are also special kinds of cards: +commentary cards (see above) and card images taking more than one 80-column +card image. The latter will be discussed later. + +Most of the time the details of dealing with cards are handled by the +:class:`Header` object, and it is not necessary to directly manipulate cards. +In fact, most :class:`Header` methods that accept a ``(keyword, value)`` or +``(keyword, value, comment)`` tuple as an argument can also take a +:class:`Card` object as an argument. :class:`Card` objects are just wrappers +around such tuples that provide the logic for parsing and formatting individual +cards in a header. But there's usually nothing gained by manually using a +:class:`Card` object, except to examine how a card might appear in a header +before actually adding it to the header. + +A new Card object is created with the :class:`Card` constructor: +``Card(key, value, comment)``. For example:: + + >>> c1 = fits.Card('TEMP', 80.0, 'temperature, floating value') + >>> c2 = fits.Card('DETECTOR', 1) # comment is optional + >>> c3 = fits.Card('MIR_REVR', True, + ... 'mirror reversed? Boolean value') + >>> c4 = fits.Card('ABC', 2+3j, 'complex value') + >>> c5 = fits.Card('OBSERVER', 'Hubble', 'string value') + + >>> print c1; print c2; print c3; print c4; print c5 # show the cards + TEMP = 80.0 / temperature, floating value + DETECTOR= 1 / + MIR_REVR= T / mirror reversed? Boolean value + ABC = (2.0, 3.0) / complex value + OBSERVER= 'Hubble ' / string value + +Cards have the attributes ``.keyword``, ``.value``, and ``.comment``. Both +``.value`` and ``.comment`` can be changed but not the ``.keyword`` attribute. +In other words, once a card is created, it is created for a specific, immutable +keyword. + +The :meth:`Card` constructor will check if the arguments given are conforming +to the FITS standard and has a fixed card image format. If the user wants to +create a card with a customized format or even a card which is not conforming +to the FITS standard (e.g. for testing purposes), the :meth:`Card.fromstring` +class method can be used. + +Cards can be verified with :meth:`Card.verify`. The non-standard card ``c2`` in +the example below is flagged by such verification. More about verification in +Astropy will be discussed in a later chapter. + +:: + + >>> c1 = fits.Card.fromstring('ABC = 3.456D023') + >>> c2 = fits.Card.fromstring("P.I. ='Hubble'") + >>> print c1; print c2 + ABC = 3.456D023 + P.I. ='Hubble' + >>> c2.verify() + Output verification result: + Unfixable error: Illegal keyword name 'P.I.' + +A list of the :class:`Card` objects underlying a :class:`Header` object can be +accessed with the :attr:`Header.cards` attribute. This list is only meant for +observing, and should not be directly manipulated. In fact, it is only a +copy--modifications to it will not affect the header it came from. Use the +methods provided by the :class:`Header` class instead. + + +CONTINUE Cards +^^^^^^^^^^^^^^ + +The fact that the FITS standard only allows up to 8 characters for the keyword +name and 80 characters to contain the keyword, the value, and the comment is +restrictive for certain applications. To allow long string values for keywords, +a proposal was made in: + + http://legacy.gsfc.nasa.gov/docs/heasarc/ofwg/docs/ofwg_recomm/r13.html + +by using the CONTINUE keyword after the regular 80-column containing the +keyword. Astropy does support this convention, even though it is not a FITS +standard. The examples below show the use of CONTINUE is automatic for long +string values:: + + >>> header = fits.Header() + >>> header['abc'] = 'abcdefg' * 20 + >>> header + ABC = 'abcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcd&' + CONTINUE 'efgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefga&' + CONTINUE 'bcdefg&' + >>> header['abc'] + 'abcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefg' + >>> # both value and comments are long + >>> header['abc'] = ('abcdefg' * 10, 'abcdefg' * 10) + >>> header + ABC = 'abcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcd&' + CONTINUE 'efg&' + CONTINUE '&' / abcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefgabcdefga + CONTINUE '&' / bcdefg + +Note that when a CONTINUE card is used, at the end of each 80-characters card +image, an ampersand is present. The ampersand is not part of the string value. +Also, there is no "=" at the 9th column after CONTINUE. In the first example, +the entire 240 characters is treated by Astropy as a single card. So, if it is +the nth card in a header, the (n+1)th card refers to the next keyword, not the +next CONTINUE card. As such, CONTINUE cards are transparently handled by +Astropy as a single logical card, and it's generally not necessary to worry +about the details of the format. Keywords that resolve to a set of CONTINUE +cards can be accessed and updated just like regular keywords. + + +HIERARCH Cards +^^^^^^^^^^^^^^ + +For keywords longer than 8 characters, there is a convention originated at ESO +to facilitate such use. It uses a special keyword HIERARCH with the actual long +keyword following. Astropy supports this convention as well. + +If a keyword contains more than 8 characters Astropy will automatically use a +HIERARCH card, but will also issue a warning in case this is in error. +However, one may explicitly request a HIERARCH card by prepending the keyword +with 'HIERARCH ' (just as it would appear in the header). For example, +``header['HIERARCH abcdefghi']`` will create the keyword ``abcdefghi`` without +displaying a warning. Once created, HIERARCH keywords can be accessed like any +other: ``header['abcdefghi']``, without prepending 'HIERARCH' to the keyword. +HIEARARCH keywords also differ from normal FITS keywords in that they are +case-sensitive. + +Examples follow:: + + >>> c = fits.Card('abcdefghi', 10) + Keyword name 'abcdefghi' is greater than 8 characters; a HIERARCH card will + be created. + >>> print c + HIERARCH abcdefghi = 10 + >>> c = fits.Card('hierarch abcdefghi', 10) + >>> print c + HIERARCH abcdefghi = 10 + >>> h = fits.PrimaryHDU() + >>> h.header['hierarch abcdefghi'] = 99 + >>> h.header['abcdefghi'] + 99 + >>> h.header['abcdefghi'] = 10 + >>> h.header['abcdefghi'] + 10 + >>> h.header['ABCDEFGHI'] + Traceback (most recent call last): + File "", line 1, in + File "astropy/io/fits/header.py", line 121, in __getitem__ + return self._cards[self._cardindex(key)].value + File "astropy/io/fits/header.py", line 1106, in _cardindex + raise KeyError("Keyword %r not found." % keyword) + KeyError: "Keyword 'ABCDEFGI.' not found." + >>> h.header + SIMPLE = T / conforms to FITS standard + BITPIX = 8 / array data type + NAXIS = 0 / number of array dimensions + EXTEND = T + HIERARCH abcdefghi = 1000 + +.. note:: + + A final point to keep in mind about the :class:`Header` class is that much + of its design is intended to abstract away quirks about the FITS format. + This is why, for example, it will automatically created CONTINUE and + HIERARCH cards. The Header is just a data structure, and as user you + shouldn't have to worry about how it ultimately gets serialized to a header + in a FITS file. + + Though there are some areas where it's almost impossible to hide away the + quirks of the FITS format, Astropy tries to make it so that you have to + think about it as little as possible. If there are any areas where you + have concern yourself unnecessarily about how the header is constructed, + then let help@stsci.edu know, as there are probably areas where this can be + improved on even more. diff --git a/docs/io/fits/usage/image.rst b/docs/io/fits/usage/image.rst new file mode 100644 index 0000000..2352106 --- /dev/null +++ b/docs/io/fits/usage/image.rst @@ -0,0 +1,209 @@ +.. doctest-skip-all + +.. currentmodule:: astropy.io.fits + +Image Data +---------- + +In this chapter, we'll discuss the data component in an image HDU. + + +Image Data as an Array +^^^^^^^^^^^^^^^^^^^^^^ + +A FITS primary HDU or an image extension HDU may contain image data. The +following discussions apply to both of these HDU classes. In Astropy, for most +cases, it is just a simple numpy array, having the shape specified by the NAXIS +keywords and the data type specified by the BITPIX keyword - unless the data is +scaled, see next section. Here is a quick cross reference between allowed +BITPIX values in FITS images and the numpy data types: + +.. parsed-literal:: + + **BITPIX** **Numpy Data Type** + 8 numpy.uint8 (note it is UNsigned integer) + 16 numpy.int16 + 32 numpy.int32 + -32 numpy.float32 + -64 numpy.float64 + +To recap the fact that in numpy the arrays are 0-indexed and the axes are +ordered from slow to fast. So, if a FITS image has NAXIS1=300 and NAXIS2=400, +the numpy array of its data will have the shape of (400, 300). + +Here is a summary of reading and updating image data values:: + + >>> f = fits.open('image.fits') # open a FITS file + >>> scidata = f[1].data # assume the first extension is an image + >>> print scidata[1,4] # get the pixel value at x=5, y=2 + >>> scidata[30:40, 10:20] # get values of the subsection + ... # from x=11 to 20, y=31 to 40 (inclusive) + >>> scidata[1,4] = 999 # update a pixel value + >>> scidata[30:40, 10:20] = 0 # update values of a subsection + >>> scidata[3] = scidata[2] # copy the 3rd row to the 4th row + +Here are some more complicated examples by using the concept of the "mask +array". The first example is to change all negative pixel values in scidata to +zero. The second one is to take logarithm of the pixel values which are +positive:: + + >>> scidata[scidata < 0] = 0 + >>> scidata[scidata > 0] = numpy.log(scidata[scidata > 0]) + +These examples show the concise nature of numpy array operations. + + +Scaled Data +^^^^^^^^^^^ + +Sometimes an image is scaled, i.e. the data stored in the file is not the +image's physical (true) values, but linearly transformed according to the +equation: + +.. parsed-literal:: + + physical value = BSCALE \* (storage value) + BZERO + +BSCALE and BZERO are stored as keywords of the same names in the header of the +same HDU. The most common use of scaled image is to store unsigned 16-bit +integer data because FITS standard does not allow it. In this case, the stored +data is signed 16-bit integer (BITPIX=16) with BZERO=32768 (:math:`2^{15}`), +BSCALE=1. + + +Reading Scaled Image Data +""""""""""""""""""""""""" + +Images are scaled only when either of the BSCALE/BZERO keywords are present in +the header and either of their values is not the default value (BSCALE=1, +BZERO=0). + +For unscaled data, the data attribute of an HDU in Astropy is a numpy array of +the same data type specified by the BITPIX keyword. For scaled image, the +``.data`` attribute will be the physical data, i.e. already transformed from +the storage data and may not be the same data type as prescribed in BITPIX. +This means an extra step of copying is needed and thus the corresponding memory +requirement. This also means that the advantage of memory mapping is reduced +for scaled data. + +For floating point storage data, the scaled data will have the same data type. +For integer data type, the scaled data will always be single precision floating +point (``numpy.float32``). Here is an example of what happens to such a file, +before and after the data is touched:: + + >>> f = fits.open('scaled_uint16.fits') + >>> hdu = f[1] + >>> print hdu.header['bitpix'], hdu.header['bzero'] + 16 32768 + >>> print hdu.data # once data is touched, it is scaled + [ 11. 12. 13. 14. 15.] + >>> hdu.data.dtype.name + 'float32' + >>> print hdu.header['bitpix'] # BITPIX is also updated + -32 + >>> # BZERO and BSCALE are removed after the scaling + >>> print hdu.header['bzero'] + KeyError: "Keyword 'bzero' not found." + +.. warning:: + + An important caveat to be aware of when dealing with scaled data in PyFITS, + is that when accessing the data via the ``.data`` attribute, the data is + automatically scaled with the BZERO and BSCALE parameters. If the file was + opened in "update" mode, it will be saved with the rescaled data. This + surprising behavior is a compromise to err on the side of not losing data: + If some floating point calculations were made on the data, rescaling it + when saving could result in a loss of information. + + To prevent this automatic scaling, open the file with the + ``do_not_scale_image_data=True`` argument to ``fits.open()``. This is + especially useful for updating some header values, while ensuring that the + data is not modified. + + One may also manually reapply scale parameters by using ``hdu.scale()`` + (see below). Alternately, one may open files with the ``scale_back=True`` + argument. This assures that the original scaling is preserved when saving + even when the physical values are updated. In other words, it reapplies + the scaling to the new physical values upon saving. + + +Writing Scaled Image Data +""""""""""""""""""""""""" + +With the extra processing and memory requirement, we discourage use of scaled +data as much as possible. However, Astropy does provide ways to write scaled +data with the `~ImageHDU.scale` method. Here are a few examples:: + + >>> # scale the data to Int16 with user specified bscale/bzero + >>> hdu.scale('int16', bzero=32768) + >>> # scale the data to Int32 with the min/max of the data range + >>> hdu.scale('int32', 'minmax') + >>> # scale the data, using the original BSCALE/BZERO + >>> hdu.scale('int32', 'old') + +The first example above shows how to store an unsigned short integer array. + +Great caution must be exercised when using the :meth:`~ImageHDU.scale` method. +The :attr:`~ImageHDU.data` attribute of an image HDU, after the +:meth:`~ImageHDU.scale` call, will become the storage values, not the physical +values. So, only call :meth:`~ImageHDU.scale` just before writing out to FITS +files, i.e. calls of :meth:`~HDUList.writeto`, :meth:`~HDUList.flush`, or +:meth:`~HDUList.close`. No further use of the data should be exercised. Here is +an example of what happens to the :attr:`~ImageHDU.data` attribute after the +:meth:`~ImageHDU.scale` call:: + + >>> hdu = fits.PrimaryHDU(numpy.array([0., 1, 2, 3])) + >>> print hdu.data + [ 0. 1. 2. 3.] + >>> hdu.scale('int16', bzero=32768) + >>> print hdu.data # now the data has storage values + [-32768 -32767 -32766 -32765] + >>> hdu.writeto('new.fits') + + +.. _data-sections: + +Data Sections +^^^^^^^^^^^^^ + +When a FITS image HDU's :attr:`~ImageHDU.data` is accessed, either the whole +data is copied into memory (in cases of NOT using memory mapping or if the data +is scaled) or a virtual memory space equivalent to the data size is allocated +(in the case of memory mapping of non-scaled data). If there are several very +large image HDUs being accessed at the same time, the system may run out of +memory. + +If a user does not need the entire image(s) at the same time, e.g. processing +images(s) ten rows at a time, the :attr:`~ImageHDU.section` attribute of an +HDU can be used to alleviate such memory problems. + +With PyFITS' improved support for memory-mapping, the sections feature is not +as necessary as it used to be for handling very large images. However, if the +image's data is scaled with non-trivial BSCALE/BZERO values, accessing the data +in sections may still be necessary under the current implementation. Memmap is +also insufficient for loading images larger than 2 to 4 GB on a 32-bit +system--in such cases it may be necessary to use sections. + +Here is an example of getting the median image from 3 input images of the size +5000x5000:: + + >>> f1 = fits.open('file1.fits') + >>> f2 = fits.open('file2.fits') + >>> f3 = fits.open('file3.fits') + >>> output = numpy.zeros(5000 * 5000) + >>> for i in range(50): + ... j = i * 100 + ... k = j + 100 + ... x1 = f1[1].section[j:k,:] + ... x2 = f2[1].section[j:k,:] + ... x3 = f3[1].section[j:k,:] + ... # use scipy.stsci.image's median function + ... output[j:k] = image.median([x1, x2, x3]) + +Data in each :attr:`~ImageHDU.section` does not need to be contiguous for +memory savings to be possible. PyFITS will do its best to join together +discontiguous sections of the array while reading as little as possible into +main memory. + +Sections cannot currently be assigned to. Any modifications made to a data +section are not saved back to the original file. diff --git a/docs/io/fits/usage/misc.rst b/docs/io/fits/usage/misc.rst new file mode 100644 index 0000000..bf59831 --- /dev/null +++ b/docs/io/fits/usage/misc.rst @@ -0,0 +1,42 @@ +.. currentmodule:: astropy.io.fits + +Miscellaneous Features +---------------------- + +This section describes some of the miscellaneous features of :mod:`astropy.io.fits`. + +Differs +^^^^^^^ + +The :mod:`astropy.io.fits.diff` module contains several facilities for +generating and reporting the differences between two FITS files, or two +components of a FITS file. + +The :class:`FITSDiff` class can be used to generate and represent the +differences between either two FITS files on disk, or two existing +:class:`HDUList` objects (or some combination thereof). + +Likewise, the :class:`HeaderDiff` class can be used to find the differences +just between two :class:`Header` objects. Other available differs include +:class:`HDUDiff`, :class:`ImageDataDiff`, :class:`TableDataDiff`, and +:class:`RawDataDiff`. + +Each of these classes are instantiated with two instances of the objects that +they diff. The returned diff instance has a number of attributes starting with +``.diff_`` that describe differences between the two objects. + +For example the :class:`HeaderDiff` class cam be used to find the differences +between two :class:`Header` objects like so:: + + >>> from astropy.io import fits + >>> header1 = fits.Header([('KEY_A', 1), ('KEY_B', 2)]) + >>> header2 = fits.Header([('KEY_A', 3), ('KEY_C', 4)]) + >>> diff = fits.diff.HeaderDiff(header1, header2) + >>> diff.identical + False + >>> diff.diff_keywords + (['KEY_B'], ['KEY_C']) + >>> diff.diff_keyword_values + defaultdict( at ...>, {'KEY_A': [(1, 3)]}) + +See the API documentation for details on the different differ classes. diff --git a/docs/io/fits/usage/scripts.rst b/docs/io/fits/usage/scripts.rst new file mode 100644 index 0000000..0c5bdec --- /dev/null +++ b/docs/io/fits/usage/scripts.rst @@ -0,0 +1,29 @@ +Executable Scripts +------------------ + +Astropy installs a couple of useful utility programs on your system that are +built with Astropy. + +fitsheader +^^^^^^^^^^ +.. automodule:: astropy.io.fits.scripts.fitsheader + +fitscheck +^^^^^^^^^ +.. automodule:: astropy.io.fits.scripts.fitscheck + +With Astropy installed, please run ``fitscheck --help`` to see the full program +usage documentation. + +fitsdiff +^^^^^^^^ + +.. currentmodule:: astropy.io.fits + +``fitsdiff`` provides a thin command-line wrapper around the :class:`FITSDiff` +interface--it outputs the report from a :class:`FITSDiff` of two FITS files, +and like common diff-like commands returns a 0 status code if no differences +were found, and 1 if differences were found: + +With Astropy installed, please run ``fitscheck --help`` to see the full program +usage documentation. diff --git a/docs/io/fits/usage/table.rst b/docs/io/fits/usage/table.rst new file mode 100644 index 0000000..e9c43b1 --- /dev/null +++ b/docs/io/fits/usage/table.rst @@ -0,0 +1,350 @@ +.. doctest-skip-all + +.. currentmodule:: astropy.io.fits + +Table Data +---------- + +In this chapter, we'll discuss the data component in a table HDU. A table will +always be in an extension HDU, never in a primary HDU. + +There are two kinds of table in the FITS standard: binary tables and ASCII +tables. Binary tables are more economical in storage and faster in data access +and manipulation. ASCII tables store the data in a "human readable" form and +therefore take up more storage space as well as more processing time since the +ASCII text needs to be parsed into numerical values. + + +Table Data as a Record Array +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + +What is a Record Array? +""""""""""""""""""""""" + +A record array is an array which contains records (i.e. rows) of heterogeneous +data types. Record arrays are available through the records module in the numpy +library. Here is a simple example of record array:: + + >>> from numpy import rec + >>> bright = rec.array([(1,'Sirius', -1.45, 'A1V'), + ... (2,'Canopus', -0.73, 'F0Ib'), + ... (3,'Rigil Kent', -0.1, 'G2V')], + ... formats='int16,a20,float32,a10', + ... names='order,name,mag,Sp') + +In this example, there are 3 records (rows) and 4 fields (columns). The first +field is a short integer, second a character string (of length 20), third a +floating point number, and fourth a character string (of length 10). Each +record has the same (heterogeneous) data structure. + +The underlying data structure used for FITS tables is a class called +:class:`FITS_rec` which is a specialized subclass of `numpy.recarray`. A +:class:`FITS_rec` can be instantiated directly using the same initialization +format presented for plain recarrays as in the example above. One may also +instantiate a new :class:`FITS_rec` from a list of PyFITS `Column` objects +using the :meth:`FITS_rec.from_columns` class method. This has the exact same +semantics as :meth:`BinTableHDU.from_columns` and +:meth:`TableHDU.from_columns`, except that it only returns an actual FITS_rec +array and not a whole HDU object. + + +Metadata of a Table +""""""""""""""""""" + +The data in a FITS table HDU is basically a record array, with added +attributes. The metadata, i.e. information about the table data, are stored in +the header. For example, the keyword TFORM1 contains the format of the first +field, TTYPE2 the name of the second field, etc. NAXIS2 gives the number of +records(rows) and TFIELDS gives the number of fields (columns). For FITS +tables, the maximum number of fields is 999. The data type specified in TFORM +is represented by letter codes for binary tables and a FORTRAN-like format +string for ASCII tables. Note that this is different from the format +specifications when constructing a record array. + + +Reading a FITS Table +"""""""""""""""""""" + +Like images, the ``.data`` attribute of a table HDU contains the data of the +table. To recap, the simple example in the Quick Tutorial:: + + >>> f = fits.open('bright_stars.fits') # open a FITS file + >>> tbdata = f[1].data # assume the first extension is a table + >>> print tbdata[:2] # show the first two rows + [(1, 'Sirius', -1.4500000476837158, 'A1V'), + (2, 'Canopus', -0.73000001907348633, 'F0Ib')] + + >>> print tbdata['mag'] # show the values in field "mag" + [-1.45000005 -0.73000002 -0.1 ] + >>> print tbdata.field(1) # columns can be referenced by index too + ['Sirius' 'Canopus' 'Rigil Kent'] + +Note that in Astropy, when using the ``field()`` method, it is 0-indexed while +the suffixes in header keywords, such as TFORM is 1-indexed. So, +``tbdata.field(0)`` is the data in the column with the name specified in TTYPE1 +and format in TFORM1. + +.. warning:: + + The FITS format allows table columns with a zero-width data format, such as + ``'0D'``. This is probably intended as a space-saving measure on files in + which that column contains no data. In such files, the zero-width columns + are ommitted when accessing the table data, so the indexes of fields might + change when using the ``field()`` method. For this reason, if you expect + to encounter files containg zero-width columns it is recommended to access + fields by name rather than by index. + + +Table Operations +^^^^^^^^^^^^^^^^ + + +Selecting Records in a Table +"""""""""""""""""""""""""""" + +Like image data, we can use the same "mask array" idea to pick out desired +records from a table and make a new table out of it. + +In the next example, assuming the table's second field having the name +'magnitude', an output table containing all the records of magnitude > 5 from +the input table is generated:: + + >>> from astropy.io import fits + >>> t = fits.open('table.fits') + >>> tbdata = t[1].data + >>> mask = tbdata.['magnitude'] > 5 + >>> newtbdata = tbdata[mask] + >>> hdu = fits.BinTableHDU(data=newtbdata) + >>> hdu.writeto('newtable.fits') + + +Merging Tables +"""""""""""""" + +Merging different tables is straightforward in Astropy. Simply merge the column +definitions of the input tables:: + + >>> t1 = fits.open('table1.fits') + >>> t2 = fits.open('table2.fits') + >>> new_columns = t1[1].columns + t2[1].columns + >>> hdu = fits.BinTableHDU.from_columns(new_columns) + >>> hdu.writeto('newtable.fits') + +The number of fields in the output table will be the sum of numbers of fields +of the input tables. Users have to make sure the input tables don't share any +common field names. The number of records in the output table will be the +largest number of records of all input tables. The expanded slots for the +originally shorter table(s) will be zero (or blank) filled. + +A simpler version of this example can be used to append a new column to a +table. Updating an existing table with a new column is generally more +difficult than it's worth, but one can "append" a column to a table by creating +a new table with columns from the existing table plus the new column(s):: + + >>> orig_table = fits.open('table.fits')[1].data + >>> orig_cols = orig_table.columns + >>> new_cols = fits.ColDefs([ + ... fits.Column(name='NEWCOL1', format='D', + ... array=np.zeros(len(orig_table))), + ... fits.Column(name='NEWCOL2', format='D', + ... array=np.zeros(len(orig_table)))]) + >>> hdu = fits.BinTableHDU.from_columns(orig_cols + new_cols) + >>> hdu.writeto('newtable.fits') + +Now ``newtable.fits`` contains a new table with the original table, plus the +two new columns filled with zeros. + + +Appending Tables +"""""""""""""""" + +Appending one table after another is slightly trickier, since the two tables +may have different field attributes. Here are two examples. The first is to +append by field indices, the second one is to append by field names. In both +cases, the output table will inherit column attributes (name, format, etc.) of +the first table:: + + >>> t1 = fits.open('table1.fits') + >>> t2 = fits.open('table2.fits') + >>> nrows1 = t1[1].data.shape[0] + >>> nrows2 = t2[1].data.shape[0] + >>> nrows = nrows1 + nrows2 + >>> hdu = fits.BinTableHDU.from_columns(t1[1].columns, nrows=nrows) + >>> for colname in t1[1].columns.names: + ... hdu.data[colname][nrows1:] = t2[1].data[colname] + >>> hdu.writeto('newtable.fits') + + +Scaled Data in Tables +^^^^^^^^^^^^^^^^^^^^^ + +A table field's data, like an image, can also be scaled. Scaling in a table has +a more generalized meaning than in images. In images, the physical data is a +simple linear transformation from the storage data. The table fields do have +such a construct too, where BSCALE and BZERO are stored in the header as TSCALn +and TZEROn. In addition, boolean columns and ASCII tables' numeric fields are +also generalized "scaled" fields, but without TSCAL and TZERO. + +All scaled fields, like the image case, will take extra memory space as well as +processing. So, if high performance is desired, try to minimize the use of +scaled fields. + +All the scalings are done for the user, so the user only sees the physical +data. Thus, this no need to worry about scaling back and forth between the +physical and storage column values. + + +Creating a FITS Table +^^^^^^^^^^^^^^^^^^^^^ + + +Column Creation +""""""""""""""" + +To create a table from scratch, it is necessary to create individual columns +first. A :class:`Column` constructor needs the minimal information of column +name and format. Here is a summary of all allowed formats for a binary table: + +.. parsed-literal:: + + **FITS format code Description 8-bit bytes** + + L logical (Boolean) 1 + X bit \* + B Unsigned byte 1 + I 16-bit integer 2 + J 32-bit integer 4 + K 64-bit integer 4 + A character 1 + E single precision floating point 4 + D double precision floating point 8 + C single precision complex 8 + M double precision complex 16 + P array descriptor 8 + Q array descriptor 16 + +We'll concentrate on binary tables in this chapter. ASCII tables will be +discussed in a later chapter. The less frequently used X format (bit array) and +P format (used in variable length tables) will also be discussed in a later +chapter. + +Besides the required name and format arguments in constructing a +:class:`Column`, there are many optional arguments which can be used in +creating a column. Here is a list of these arguments and their corresponding +header keywords and descriptions: + +.. parsed-literal:: + + **Argument Corresponding Description** + **in Column() header keyword** + + name TTYPE column name + format TFORM column format + unit TUNIT unit + null TNULL null value (only for B, I, and J) + bscale TSCAL scaling factor for data + bzero TZERO zero point for data scaling + disp TDISP display format + dim TDIM multi-dimensional array spec + start TBCOL starting position for ASCII table + array the data of the column + + +Here are a few Columns using various combination of these arguments: + + >>> import numpy as np + >>> from fits import Column + >>> counts = np.array([312, 334, 308, 317]) + >>> names = np.array(['NGC1', 'NGC2', 'NGC3', 'NGC4']) + >>> c1 = Column(name='target', format='10A', array=names) + >>> c2 = Column(name='counts', format='J', unit='DN', array=counts) + >>> c3 = Column(name='notes', format='A10') + >>> c4 = Column(name='spectrum', format='1000E') + >>> c5 = Column(name='flag', format='L', array=[True, False, True, True]) + +In this example, formats are specified with the FITS letter codes. When there +is a number (>1) preceding a (numeric type) letter code, it means each cell in +that field is a one-dimensional array. In the case of column c4, each cell is +an array (a numpy array) of 1000 elements. + +For character string fields, the number be to the *left* of the letter 'A' when +creating binary tables, and should be to the *right* when creating ASCII +tables. However, as this is a common confusion both formats are understood +when creating binary tables (note, however, that upon writing to a file the +correct format will be written in the header). So, for columns c1 and c3, they +both have 10 characters in each of their cells. For numeric data type, the +dimension number must be before the letter code, not after. + +After the columns are constructed, the :meth:`BinTableHDU.from_columns` class +method can be used to construct a table HDU. We can either go through the +column definition object:: + + >>> coldefs = fits.ColDefs([c1, c2, c3, c4, c5]) + >>> tbhdu = fits.BinTableHDU.from_columns(coldefs) + +or directly use the :meth:`BinTableHDU.from_columns` method:: + + >>> tbhdu = fits.BinTableHDU.from_columns([c1, c2, c3, c4, c5]) + +.. note:: + + Users familiar with older versions of PyFITS or Astropy will wonder what + happened to :func:`~astropy.io.fits.new_table`. It is still there, but is + deprecated. :meth:`BinTableHDU.from_columns` and its companion for ASCII + tables :meth:`TableHDU.from_columns` are the same as + :func:`~astropy.io.fits.new_table` in the arguments they accept and their + behavior. They just make it more explicit what type of table HDU they + create. + +A look of the newly created HDU's header will show that relevant keywords are +properly populated:: + + >>> tbhdu.header + XTENSION = 'BINTABLE' / binary table extension + BITPIX = 8 / array data type + NAXIS = 2 / number of array dimensions + NAXIS1 = 4025 / length of dimension 1 + NAXIS2 = 4 / length of dimension 2 + PCOUNT = 0 / number of group parameters + GCOUNT = 1 / number of groups + TFIELDS = 5 / number of table fields + TTYPE1 = 'target ' + TFORM1 = '10A ' + TTYPE2 = 'counts ' + TFORM2 = 'J ' + TUNIT2 = 'DN ' + TTYPE3 = 'notes ' + TFORM3 = '10A ' + TTYPE4 = 'spectrum' + TFORM4 = '1000E ' + TTYPE5 = 'flag ' + TFORM5 = 'L ' + +.. warning:: + + It should be noted that when creating a new table with + :meth:`BinTableHDU.from_columns`, an in-memory copy of all of the input + column arrays is created. This is because it is not guaranteed that the + columns are arranged contiguously in memory in row-major order (in fact, + they are most likely not), so they have to be combined into a new array. + +However, if the array data *is* already contiguous in memory, such as in an +existing record array, a kludge can be used to create a new table HDU without +any copying. First, create the Columns as before, but without using the +``array=`` argument:: + + >>> c1 = Column(name='target', format='10A') + +Then call :meth:`BinTableHDU.from_columns`:: + + >>> tbhdu = fits.BinTableHDU.from_columns([c1, c2, c3, c4, c5]) + +This will create a new table HDU as before, with the correct column +definitions, but an empty data section. Now simply assign your array directly +to the HDU's data attribute:: + + >>> tbhdu.data = mydata + +In a future version of Astropy table creation will be simplified and this +process won't be necessary. diff --git a/docs/io/fits/usage/unfamiliar.rst b/docs/io/fits/usage/unfamiliar.rst new file mode 100644 index 0000000..3d058a0 --- /dev/null +++ b/docs/io/fits/usage/unfamiliar.rst @@ -0,0 +1,535 @@ +.. doctest-skip-all + +.. currentmodule:: astropy.io.fits + +Less Familiar Objects +--------------------- + +In this chapter, we'll discuss less frequently used FITS data structures. They +include ASCII tables, variable length tables, and random access group FITS +files. + + +ASCII Tables +^^^^^^^^^^^^ + +FITS standard supports both binary and ASCII tables. In ASCII tables, all the +data are stored in a human readable text form, so it takes up more space and +extra processing to parse the text for numeric data. Depending on how the +columns are formatted, floating point data may also lose precision. + +In Astropy, the interface for ASCII tables and binary tables is basically the +same, i.e. the data is in the ``.data`` attribute and the ``field()`` method +is used to refer to the columns and returns a numpy array. When reading the +table, Astropy will automatically detect what kind of table it is. + +:: + + >>> from astropy.io import fits + >>> hdus = fits.open('ascii_table.fits') + >>> hdus[1].data[:1] + FITS_rec( + ... [(10.123000144958496, 37)], + ... dtype=[('a', '>f4'),('b','>i4')]) + >>> hdus[1].data['a'] + array([ 10.12300014, 5.19999981, 15.60999966, 0. , + 345. ], dtype=float32) + >>> hdus[1].data.formats + ['E10.4', 'I5'] + +Note that the formats in the record array refer to the raw data which are ASCII +strings (therefore 'a11' and 'a5'), but the ``.formats`` attribute of data +retains the original format specifications ('E10.4' and 'I5'). + + +Creating an ASCII Table +""""""""""""""""""""""" + +Creating an ASCII table from scratch is similar to creating a binary table. The +difference is in the Column definitions. The columns/fields in an ASCII table +are more limited than in a binary table. It does not allow more than one +numerical value in a cell. Also, it only supports a subset of what allowed in a +binary table, namely character strings, integer, and (single and double +precision) floating point numbers. Boolean and complex numbers are not allowed. + +The format syntax (the values of the TFORM keywords) is different from that of a +binary table, they are: + +.. parsed-literal:: + + Aw Character string + Iw (Decimal) Integer + Fw.d Single precision real + Ew.d Single precision real, in exponential notation + Dw.d Double precision real, in exponential notation + +where, w is the width, and d the number of digits after the decimal point. The +syntax difference between ASCII and binary tables can be confusing. For example, +a field of 3-character string is specified '3A' in a binary table and as 'A3' in +an ASCII table. + +The other difference is the need to specify the table type when using the +:meth:`TableHDU.from_columns` method, and that `Column` should be provided the +``ascii=True`` argument in order to be unambiguous. + +.. note:: + + Although binary tables are more common in most FITS files, earlier versions + of the FITS format only supported ASCII tables. That is why the class + :class:`TableHDU` is used for representing ASCII tables specifically, + whereas :class:`BinTableHDU` is more explicit that it represents a binary + table. These names come from the value ``XTENSION`` keyword in the tables' + headers, which is ``TABLE`` for ASCII tables and ``BINTABLE`` for binary + tables. + +:meth:`TableHDU.from_columns` can be used like so:: + + >>> import numpy as np + >>> from astropy.io import fits + >>> a1 = np.array(['abcd', 'def']) + >>> r1 = np.array([11., 12.]) + >>> c1 = fits.Column(name='abc', format='A3', array=a1, ascii=True) + >>> c2 = fits.Column(name='def', format='E', array=r1, bscale=2.3, + ... bzero=0.6, ascii=True) + >>> c3 = fits.Column(name='t1', format='I', array=[91, 92, 93], + ... ascii=True) + >>> hdu = fits.TableHDU.from_columns([c1, c2, c3]) + >>> hdu.writeto('ascii.fits') + >>> hdu.data + FITS_rec([('abcd', 11.0, 91), ('def', 12.0, 92), ('', 0.0, 93)], + dtype=[('abc', '|S3'), ('def', '|S14'), ('t1', '|S10')]) + +It should be noted that when the formats of the columns are unambiguously +specific to ASCII tables it is not necessary to specify ``ascii=True`` in +the :class:`ColDefs` constructor. In this case there *is* ambiguity because +the format code ``'I'`` represents a 16-bit integer in binary tables, while in +ASCII tables it is not technically a valid format. ASCII table format codes +technically require a character width for each column, such as ``'I10'`` to +create a column that can hold integers up to 10 characters wide. + +However, PyFITS allows the width specification to be ommitted in some cases. +When it is ommitted from ``'I'`` format columns the minimum width needed to +accurately represent all integers in the column is used. The only problem with +using this shortcut is its ambiguity with the binary table ``'I'`` format, so +specifying ``ascii=True`` is a good practice (though PyFITS will still figure +out what you meant in most cases). + + +Variable Length Array Tables +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The FITS standard also supports variable length array tables. The basic idea is +that sometimes it is desirable to have tables with cells in the same field +(column) that have the same data type but have different lengths/dimensions. +Compared with the standard table data structure, the variable length table can +save storage space if there is a large dynamic range of data lengths in +different cells. + +A variable length array table can have one or more fields (columns) which are +variable length. The rest of the fields (columns) in the same table can still +be regular, fixed-length ones. Astropy will automatically detect what kind of +field it is during reading; no special action is needed from the user. The data +type specification (i.e. the value of the TFORM keyword) uses an extra letter +'P' and the format is + +.. parsed-literal:: + + rPt(max) + +where r is 0, 1, or absent, t is one of the letter code for regular table data +type (L, B, X, I, J, etc. currently, the X format is not supported for variable +length array field in Astropy), and max is the maximum number of elements. So, +for a variable length field of int32, The corresponding format spec is, +e.g. 'PJ(100)':: + + >>> f = fits.open('variable_length_table.fits') + >>> print f[1].header['tform5'] + 1PI(20) + >>> print f[1].data.field(4)[:3] + [array([1], dtype=int16) array([88, 2], dtype=int16) + array([ 1, 88, 3], dtype=int16)] + +The above example shows a variable length array field of data type int16 and its +first row has one element, second row has 2 elements etc. Accessing variable +length fields is almost identical to regular fields, except that operations on +the whole filed are usually not possible. A user has to process the field row by +row. + + +Creating a Variable Length Array Table +"""""""""""""""""""""""""""""""""""""" + +Creating a variable length table is almost identical to creating a regular +table. The only difference is in the creation of field definitions which are +variable length arrays. First, the data type specification will need the 'P' +letter, and secondly, the field data must be an objects array (as included in +the numpy module). Here is an example of creating a table with two fields, one +is regular and the other variable length array:: + + >>> from astropy.io import fits + >>> import numpy as np + >>> c1 = fits.Column(name='var', format='PJ()', + ... array=np.array([[45., 56] + ... [11, 12, 13]], + ... dtype=np.object)) + >>> c2 = fits.Column(name='xyz', format='2I', array=[[11, 3], [12, 4]]) + >>> tbhdu = fits.BinTableHDU.from_columns([c1, c2]) + >>> print tbhdu.data + FITS_rec([(array([45, 56]), array([11, 3], dtype=int16)), + (array([11, 12, 13]), array([12, 4], dtype=int16))], + dtype=[('var', '>> tbhdu.writeto('var_table.fits') + >>> hdu = fits.open('var_table.fits') + >>> hdu[1].header + XTENSION= 'BINTABLE' / binary table extension + BITPIX = 8 / array data type + NAXIS = 2 / number of array dimensions + NAXIS1 = 12 / length of dimension 1 + NAXIS2 = 2 / length of dimension 2 + PCOUNT = 20 / number of group parameters + GCOUNT = 1 / number of groups + TFIELDS = 2 / number of table fields + TTYPE1 = 'var ' + TFORM1 = 'PJ(3) ' + TTYPE2 = 'xyz ' + TFORM2 = '2I ' + + +.. _random-groups: + +Random Access Groups +^^^^^^^^^^^^^^^^^^^^ + +Another less familiar data structure supported by the FITS standard is the +random access group. This convention was established before the binary table +extension was introduced. In most cases its use can now be superseded by the +binary table. It is mostly used in radio interferometry. + +Like Primary HDUs, a Random Access Group HDU is always the first HDU of a FITS +file. Its data has one or more groups. Each group may have any number +(including 0) of parameters, together with an image. The parameters and the +image have the same data type. + +All groups in the same HDU have the same data structure, i.e. same data type +(specified by the keyword BITPIX, as in image HDU), same number of parameters +(specified by PCOUNT), and the same size and shape (specified by NAXISn +keywords) of the image data. The number of groups is specified by GCOUNT and +the keyword NAXIS1 is always 0. Thus the total data size for a Random Access +Group HDU is + +.. parsed-literal:: + + \|BITPIX\| \* GCOUNT \* (PCOUNT + NAXIS2 \* NAXIS3 \* ... \* NAXISn) + + +Header and Summary +"""""""""""""""""" + +Accessing the header of a Random Access Group HDU is no different from any +other HDU. Just use the .header attribute. + +The content of the HDU can similarly be summarized by using the +:meth:`HDUList.info` method:: + + >>> f = fits.open('random_group.fits') + >>> print f[0].header['groups'] + True + >>> print f[0].header['gcount'] + 7956 + >>> print f[0].header['pcount'] + 6 + >>> f.info() + Filename: random_group.fits + No. Name Type Cards Dimensions Format + 0 AN GroupsHDU 158 (3, 4, 1, 1, 1) Float32 7956 Groups + 6 Parameters + + +Data: Group Parameters +"""""""""""""""""""""" + +The data part of a random access group HDU is, like other HDUs, in the +``.data`` attribute. It includes both parameter(s) and image array(s). + +Show the data in 100th group, including parameters and data:: + + >>> print f[0].data[99] + (-8.1987486677035799e-06, 1.2010923615889215e-05, + -1.011189139244005e-05, 258.0, 2445728., 0.10, array([[[[[ 12.4308672 , + 0.56860745, 3.99993873], + [ 12.74043655, 0.31398511, 3.99993873], + [ 0. , 0. , 3.99993873], + [ 0. , 0. , 3.99993873]]]]], dtype=float32)) + +The data first lists all the parameters, then the image array, for the +specified group(s). As a reminder, the image data in this file has the shape of +(1,1,1,4,3) in Python or C convention, or (3,4,1,1,1) in IRAF or FORTRAN +convention. + +To access the parameters, first find out what the parameter names are, with the +.parnames attribute:: + + >>> f[0].data.parnames # get the parameter names + ['uu--', 'vv--', 'ww--', 'baseline', 'date', 'date'] + +The group parameter can be accessed by the :meth:`~GroupData.par` method. Like +the table :meth:`~FITS_rec.field` method, the argument can be either index or +name:: + + >>> print f[0].data.par(0)[99] # Access group parameter by name or by index + -8.1987486677035799e-06 + >>> print f[0].data.par('uu--')[99] + -8.1987486677035799e-06 + +Note that the parameter name 'date' appears twice. This is a feature in the +random access group, and it means to add the values together. Thus:: + + >>> f[0].data.parnames # get the parameter names + ['uu--', 'vv--', 'ww--', 'baseline', 'date', 'date'] + >>> print f[0].data.par(4)[99] # Duplicate parameter name 'date' + 2445728.0 + >>> print f[0].data.par(5)[99] + 0.10 + >>> # When accessed by name, it adds the values together if the name is + >>> # shared by more than one parameter + >>> print f[0].data.par('date')[99] + 2445728.10 + +The :meth:`~GroupData.par` is a method for either the entire data object or one +data item (a group). So there are two possible ways to get a group parameter +for a certain group, this is similar to the situation in table data (with its +:meth:`~FITS_rec.field` method):: + + >>> print f[0].data.par(0)[99] + -8.1987486677035799e-06 + >>> print f[0].data[99].par(0) + -8.1987486677035799e-06 + +On the other hand, to modify a group parameter, we can either assign the new +value directly (if accessing the row/group number last) or use the +:meth:`~Group.setpar` method (if accessing the row/group number first). The +method :meth:`~Group.setpar` is also needed for updating by name if the +parameter is shared by more than one parameters:: + + >>> # Update group parameter when selecting the row (group) number last + >>> f[0].data.par(0)[99] = 99. + >>> # Update group parameter when selecting the row (group) number first + >>> f[0].data[99].setpar(0, 99.) # or setpar('uu--', 99.) + >>> + >>> # Update group parameter by name when the name is shared by more than + >>> # one parameters, the new value must be a tuple of constants or + >>> # sequences + >>> f[0].data[99].setpar('date', (2445729., 0.3)) + >>> f[0].data[:3].setpar('date', (2445729., [0.11, 0.22, 0.33])) + >>> f[0].data[:3].par('date') + array([ 2445729.11 , 2445729.22 , 2445729.33000001]) + + +Data: Image Data +"""""""""""""""" + +The image array of the data portion is accessible by the +:attr:`~GroupData.data` attribute of the data object. A numpy array is +returned:: + + >>> print f[0].data.data[99] + array([[[[[ 12.4308672 , 0.56860745, 3.99993873], + [ 12.74043655, 0.31398511, 3.99993873], + [ 0. , 0. , 3.99993873], + [ 0. , 0. , 3.99993873]]]]], type=float32) + + +Creating a Random Access Group HDU +"""""""""""""""""""""""""""""""""" + +To create a random access group HDU from scratch, use :class:`GroupData` to +encapsulate the data into the group data structure, and use :class:`GroupsHDU` +to create the HDU itself:: + + >>> # Create the image arrays. The first dimension is the number of groups. + >>> imdata = numpy.arange(100.0, shape=(10, 1, 1, 2, 5)) + >>> # Next, create the group parameter data, we'll have two parameters. + >>> # Note that the size of each parameter's data is also the number of + >>> # groups. + >>> # A parameter's data can also be a numeric constant. + >>> pdata1 = numpy.arange(10) + 0.1 + >>> pdata2 = 42 + >>> # Create the group data object, put parameter names and parameter data + >>> # in lists assigned to their corresponding arguments. + >>> # If the data type (bitpix) is not specified, the data type of the + >>> # image will be used. + >>> x = fits.GroupData(imdata, parnames=['abc', 'xyz'], + ... pardata=[pdata1, pdata2], bitpix=-32) + >>> # Now, create the GroupsHDU and write to a FITS file. + >>> hdu = fits.GroupsHDU(x) + >>> hdu.writeto('test_group.fits') + >>> hdu.header + SIMPLE = T / conforms to FITS standard + BITPIX = -32 / array data type + NAXIS = 5 / number of array dimensions + NAXIS1 = 0 + NAXIS2 = 5 + NAXIS3 = 2 + NAXIS4 = 1 + NAXIS5 = 1 + EXTEND = T + GROUPS = T / has groups + PCOUNT = 2 / number of parameters + GCOUNT = 10 / number of groups + PTYPE1 = 'abc ' + PTYPE2 = 'xyz ' + >>> print hdu.data[:2] + FITS_rec[ + (0.10000000149011612, 42.0, array([[[[ 0., 1., 2., 3., 4.], + [ 5., 6., 7., 8., 9.]]]], dtype=float32)), + (1.1000000238418579, 42.0, array([[[[ 10., 11., 12., 13., 14.], + [ 15., 16., 17., 18., 19.]]]], dtype=float32)) + ] + + +Compressed Image Data +^^^^^^^^^^^^^^^^^^^^^ + +A general technique has been developed for storing compressed image data in +FITS binary tables. The principle used in this convention is to first divide +the n-dimensional image into a rectangular grid of sub images or 'tiles'. +Each tile is then compressed as a continuous block of data, and the resulting +compressed byte stream is stored in a row of a variable length column in a +FITS binary table. Several commonly used algorithms for compressing image +tiles are supported. These include, Gzip, Rice, IRAF Pixel List (PLIO), and +Hcompress. + +For more details, reference "A FITS Image Compression Proposal" from: + + http://www.adass.org/adass/proceedings/adass99/P2-42/ + +and "Registered FITS Convention, Tiled Image Compression Convention": + + http://fits.gsfc.nasa.gov/registry/tilecompression.html + +Compressed image data is accessed, in Astropy, using the optional +"astropy.io.fits.compression" module contained in a C shared library +(compression.so). If an attempt is made to access an HDU containing compressed +image data when the compression module is not available, the user is notified +of the problem and the HDU is treated like a standard binary table HDU. This +notification will only be made the first time compressed image data is +encountered. In this way, the compression module is not required in order for +Astropy to work. + + +Header and Summary +"""""""""""""""""" + +In Astropy, the header of a compressed image HDU appears to the user like any +image header. The actual header stored in the FITS file is that of a binary +table HDU with a set of special keywords, defined by the convention, to +describe the structure of the compressed image. The conversion between binary +table HDU header and image HDU header is all performed behind the scenes. +Since the HDU is actually a binary table, it may not appear as a primary HDU in +a FITS file. + +The content of the HDU header may be accessed using the ``.header`` attribute:: + + >>> f = fits.open('compressed_image.fits') + >>> print f[1].header + XTENSION= 'IMAGE ' / extension type + BITPIX = 16 / array data type + NAXIS = 2 / number of array dimensions + NAXIS1 = 512 / length of data axis + NAXIS2 = 512 / length of data axis + PCOUNT = 0 / number of parameters + GCOUNT = 1 / one data group (required keyword) + EXTNAME = 'COMPRESSED' / name of this binary table extension + +The contents of the corresponding binary table HDU may be accessed using the +hidden ``._header`` attribute. However, all user interface with the HDU header +should be accomplished through the image header (the ``.header`` attribute):: + + >>> f = fits.open('compressed_image.fits') + >>> print f[1]._header + XTENSION= 'BINTABLE' / binary table extension + BITPIX = 8 / 8-bit bytes + NAXIS = 2 / 2-dimensional binary table + NAXIS1 = 8 / width of table in bytes + NAXIS2 = 512 / number of rows in table + PCOUNT = 157260 / size of special data area + GCOUNT = 1 / one data group (required keyword) + TFIELDS = 1 / number of fields in each row + TTYPE1 = 'COMPRESSED_DATA' / label for field 1 + TFORM1 = '1PB(384)' / data format of field: variable length array + ZIMAGE = T / extension contains compressed image + ZBITPIX = 16 / data type of original image + ZNAXIS = 2 / dimension of original image + ZNAXIS1 = 512 / length of original image axis + ZNAXIS2 = 512 / length of original image axis + ZTILE1 = 512 / size of tiles to be compressed + ZTILE2 = 1 / size of tiles to be compressed + ZCMPTYPE= 'RICE_1 ' / compression algorithm + ZNAME1 = 'BLOCKSIZE' / compression block size + ZVAL1 = 32 / pixels per block + EXTNAME = 'COMPRESSED' / name of this binary table extension + +The contents of the HDU can be summarized by using either the :func:`info` +convenience function or method:: + + >>> fits.info('compressed_image.fits') + Filename: compressed_image.fits + No. Name Type Cards Dimensions Format + 0 PRIMARY PrimaryHDU 6 () int16 + 1 COMPRESSED CompImageHDU 52 (512, 512) int16 + >>> + >>> f = fits.open('compressed_image.fits') + >>> f.info() + Filename: compressed_image.fits + No. Name Type Cards Dimensions Format + 0 PRIMARY PrimaryHDU 6 () int16 + 1 COMPRESSED CompImageHDU 52 (512, 512) int16 + >>> + + +Data +"""" + +As with the header, the data of a compressed image HDU appears to the user as +standard uncompressed image data. The actual data is stored in the fits file +as Binary Table data containing at least one column (COMPRESSED_DATA). Each +row of this variable-length column contains the byte stream that was generated +as a result of compressing the corresponding image tile. Several optional +columns may also appear. These include, UNCOMPRESSED_DATA to hold the +uncompressed pixel values for tiles that cannot be compressed, ZSCALE and ZZERO +to hold the linear scale factor and zero point offset which may be needed to +transform the raw uncompressed values back to the original image pixel values, +and ZBLANK to hold the integer value used to represent undefined pixels (if +any) in the image. + +The contents of the uncompressed HDU data may be accessed using the ``.data`` +attribute:: + + >>> f = fits.open('compressed_image.fits') + >>> f[1].data + array([[38, 43, 35, ..., 45, 43, 41], + [36, 41, 37, ..., 42, 41, 39], + [38, 45, 37, ..., 42, 35, 43], + ..., + [49, 52, 49, ..., 41, 35, 39], + [57, 52, 49, ..., 40, 41, 43], + [53, 57, 57, ..., 39, 35, 45]], dtype=int16) + +The compressed data can be accessed via the ``.compressed_data`` attribute, but +this rarely need be accessed directly. It may be useful for performing direct +copies of the compressed data without needing to decompress it first. + + +Creating a Compressed Image HDU +""""""""""""""""""""""""""""""" + +To create a compressed image HDU from scratch, simply construct a +:class:`CompImageHDU` object from an uncompressed image data array and its +associated image header. From there, the HDU can be treated just like any +other image HDU:: + + >>> hdu = fits.CompImageHDU(imageData, imageHeader) + >>> hdu.writeto('compressed_image.fits') + +The API documentation for the :class:`CompImageHDU` initializer method +describes the possible options for constructing a :class:`CompImageHDU` object. diff --git a/docs/io/fits/usage/verification.rst b/docs/io/fits/usage/verification.rst new file mode 100644 index 0000000..cf6b610 --- /dev/null +++ b/docs/io/fits/usage/verification.rst @@ -0,0 +1,348 @@ +.. doctest-skip-all + +.. currentmodule:: astropy.io.fits + +Verification +------------ + +Astropy has built in a flexible scheme to verify FITS data being conforming to +the FITS standard. The basic verification philosophy in Astropy is to be +tolerant in input and strict in output. + +When Astropy reads a FITS file which is not conforming to FITS standard, it +will not raise an error and exit. It will try to make the best educated +interpretation and only gives up when the offending data is accessed and no +unambiguous interpretation can be reached. + +On the other hand, when writing to an output FITS file, the content to be +written must be strictly compliant to the FITS standard by default. This +default behavior can be overwritten by several other options, so the user will +not be held up because of a minor standard violation. + + +FITS Standard +^^^^^^^^^^^^^ + +Since FITS standard is a "loose" standard, there are many places the violation +can occur and to enforce them all will be almost impossible. It is not uncommon +for major observatories to generate data products which are not 100% FITS +compliant. Some observatories have also developed their own sub-standard +(dialect?) and some of these become so prevalent that they become de facto +standards. Examples include the long string value and the use of the CONTINUE +card. + +The violation of the standard can happen at different levels of the data +structure. Astropy's verification scheme is developed on these hierarchical +levels. Here are the 3 Astropy verification levels: + +1. The HDU List + +2. Each HDU + +3. Each Card in the HDU Header + +These three levels correspond to the three categories of objects: +:class:`HDUList`, any HDU (e.g. :class:`PrimaryHDU`, :class:`ImageHDU`, etc.), +and :class:`Card`. They are the only objects having the ``verify()`` method. +Most other classes in astropy.io.fits do not have a ``verify()`` method. + +If ``verify()`` is called at the HDU List level, it verifies standard +compliance at all three levels, but a call of ``verify()`` at the Card level +will only check the compliance of that Card. Since Astropy is tolerant when +reading a FITS file, no ``verify()`` is called on input. On output, +``verify()`` is called with the most restrictive option as the default. + + +Verification Options +^^^^^^^^^^^^^^^^^^^^ + +There are several options accepted by all verify(option) calls in Astropy. In +addition, they available for the ``output_verify`` argument of the following +methods: ``close()``, ``writeto()``, and ``flush()``. In these cases, they are +passed to a ``verify()`` call within these methods. The available options are: + +**exception** + +This option will raise an exception, if any FITS standard is violated. This is +the default option for output (i.e. when ``writeto()``, ``close()``, or +``flush()`` is called. If a user wants to overwrite this default on output, the +other options listed below can be used. + +**warn** + +This option is the same as the ignore option but will send warning messages. It +will not try to fix any FITS standard violations whether fixable or not. + +**ignore** + +This option will ignore any FITS standard violation. On output, it will write +the HDU List content to the output FITS file, whether or not it is conforming +to the FITS standard. + +The ignore option is useful in the following situations: + +1. An input FITS file with non-standard formatting is read and the user wants + to copy or write out to an output file. The non-standard formatting will be + preserved in the output file. + +2. A user wants to create a non-standard FITS file on purpose, possibly for + testing or consistency. + +No warning message will be printed out. This is like a silent warning option +(see below). + +**fix** + +This option will try to fix any FITS standard violations. It is not always +possible to fix such violations. In general, there are two kinds of FITS +standard violations: fixable and non-fixable. For example, if a keyword has a +floating number with an exponential notation in lower case 'e' (e.g. 1.23e11) +instead of the upper case 'E' as required by the FITS standard, it is a fixable +violation. On the other hand, a keyword name like 'P.I.' is not fixable, since +it will not know what to use to replace the disallowed periods. If a violation +is fixable, this option will print out a message noting it is fixed. If it is +not fixable, it will throw an exception. + +The principle behind fixing is to do no harm. For example, it is plausible to +'fix' a Card with a keyword name like 'P.I.' by deleting it, but Astropy will +not take such action to hurt the integrity of the data. + +Not all fixes may be the "correct" fix, but at least Astropy will try to make +the fix in such a way that it will not throw off other FITS readers. + +**silentfix** + +Same as fix, but will not print out informative messages. This may be useful in +a large script where the user does not want excessive harmless messages. If the +violation is not fixable, it will still throw an exception. + +In addition, as of Astropy version 0.4.0 the following 'combined' options are +available: + + * **fix+ignore** + * **fix+warn** + * **fix+exception** + * **silentfix+ignore** + * **silentfix+warn** + * **silentfix+exception** + +These options combine the semantics of the basic options. For example +``silentfix+exception`` is actually equivalent to just ``silentfix`` in that +fixable errors will be fixed silently, but any unfixable errors will raise an +exception. On the other hand ``silentfix+warn`` will issue warnings for +unfixable errors, but will stay silent about any fixed errors. + + +Verifications at Different Data Object Levels +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +We'll examine what Astropy's verification does at the three different levels: + + +Verification at HDUList +""""""""""""""""""""""" + +At the HDU List level, the verification is only for two simple cases: + +1. Verify that the first HDU in the HDU list is a Primary HDU. This is a + fixable case. The fix is to insert a minimal Primary HDU into the HDU list. + +2. Verify second or later HDU in the HDU list is not a Primary HDU. Violation + will not be fixable. + + +Verification at Each HDU +"""""""""""""""""""""""" + +For each HDU, the mandatory keywords, their locations in the header, and their +values will be verified. Each FITS HDU has a fixed set of required keywords in +a fixed order. For example, the Primary HDU's header must at least have the +following keywords: + +.. parsed-literal:: + + SIMPLE = T / + BITPIX = 8 / + NAXIS = 0 + +If any of the mandatory keywords are missing or in the wrong order, the fix +option will fix them:: + + >>> hdu.header # has a 'bad' header + SIMPLE = T / + NAXIS = 0 + BITPIX = 8 / + >>> hdu.verify('fix') # fix it + Output verification result: + 'BITPIX' card at the wrong place (card 2). Fixed by moving it to the right + place (card 1). + >>> h.header # voila! + SIMPLE = T / conforms to FITS standard + BITPIX = 8 / array data type + NAXIS = 0 + + +Verification at Each Card +""""""""""""""""""""""""" + +The lowest level, the Card, also has the most complicated verification +possibilities. Here is a lit of fixable and not fixable Cards: + +Fixable Cards: + +1. floating point numbers with lower case 'e' or 'd' + +2. the equal sign is before column 9 in the card image + +3. string value without enclosing quotes + +4. missing equal sign before column 9 in the card image + +5. space between numbers and E or D in floating point values + +6. unparsable values will be "fixed" as a string + +Here are some examples of fixable cards: + + >>> hdu.header[4:] # has a bunch of fixable cards + FIX1 = 2.1e23 + FIX2= 2 + FIX3 = string value without quotes + FIX4 2 + FIX5 = 2.4 e 03 + FIX6 = '2 10 ' + >>> hdu.header[5] # can still access the values before the fix + 2 + >>> hdu.header['fix4'] + 2 + >>> hdu.header['fix5'] + 2400.0 + >>> hdu.verify('silentfix') + >>> hdu.header[4:] + FIX1 = 2.1E23 + FIX2 = 2 + FIX3 = 'string value without quotes' + FIX4 = 2 + FIX5 = 2.4E03 + FIX6 = '2 10 ' + +Unfixable Cards: + +1. illegal characters in keyword name + +We'll summarize the verification with a "life-cycle" example:: + + >>> h = fits.PrimaryHDU() # create a PrimaryHDU + >>> # Try to add an non-standard FITS keyword 'P.I.' (FITS does no allow + >>> # '.' in the keyword), if using the update() method - doesn't work! + >>> h['P.I.'] = 'Hubble' + ValueError: Illegal keyword name 'P.I.' + >>> # Have to do it the hard way (so a user will not do this by accident) + >>> # First, create a card image and give verbatim card content (including + >>> # the proper spacing, but no need to add the trailing blanks) + >>> c = fits.Card.fromstring("P.I. = 'Hubble'") + >>> h.header.append(c) # then append it to the header + >>> # Now if we try to write to a FITS file, the default output + >>> # verification will not take it. + >>> h.writeto('pi.fits') + Output verification result: + HDU 0: + Card 4: + Unfixable error: Illegal keyword name 'P.I.' + ...... + raise VerifyError + VerifyError + >>> # Must set the output_verify argument to 'ignore', to force writing a + >>> # non-standard FITS file + >>> h.writeto('pi.fits', output_verify='ignore') + >>> # Now reading a non-standard FITS file + >>> # astropy.io.fits is magnanimous in reading non-standard FITS files + >>> hdus = fits.open('pi.fits') + >>> hdus[0].header + SIMPLE = T / conforms to FITS standard + BITPIX = 8 / array data type + NAXIS = 0 / number of array dimensions + EXTEND = T + P.I. = 'Hubble' + >>> # even when you try to access the offending keyword, it does NOT + >>> # complain + >>> hdus[0].header['p.i.'] + 'Hubble' + >>> # But if you want to make sure if there is anything wrong/non-standard, + >>> # use the verify() method + >>> hdus.verify() + Output verification result: + HDU 0: + Card 4: + Unfixable error: Illegal keyword name 'P.I.' + + +Verification using the FITS Checksum Keyword Convention +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The North American FITS committee has reviewed the FITS Checksum Keyword +Convention for possible adoption as a FITS Standard. This convention provides +an integrity check on information contained in FITS HDUs. The convention +consists of two header keyword cards: CHECKSUM and DATASUM. The CHECKSUM +keyword is defined as an ASCII character string whose value forces the 32-bit +1's complement checksum accumulated over all the 2880-byte FITS logical records +in the HDU to equal negative zero. The DATASUM keyword is defined as a +character string containing the unsigned integer value of the 32-bit 1's +complement checksum of the data records in the HDU. Verifying the the +accumulated checksum is still equal to negative zero provides a fairly reliable +way to determine that the HDU has not been modified by subsequent data +processing operations or corrupted while copying or storing the file on +physical media. + +In order to avoid any impact on performance, by default Astropy will not verify +HDU checksums when a file is opened or generate checksum values when a file is +written. In fact, CHECKSUM and DATASUM cards are automatically removed from +HDU headers when a file is opened, and any CHECKSUM or DATASUM cards are +stripped from headers when a HDU is written to a file. In order to verify the +checksum values for HDUs when opening a file, the user must supply the checksum +keyword argument in the call to the open convenience function with a value of +True. When this is done, any checksum verification failure will cause a +warning to be issued (via the warnings module). If checksum verification is +requested in the open, and no CHECKSUM or DATASUM cards exist in the HDU +header, the file will open without comment. Similarly, in order to output the +CHECKSUM and DATASUM cards in an HDU header when writing to a file, the user +must supply the checksum keyword argument with a value of True in the call to +the writeto function. It is possible to write only the DATASUM card to the +header by supplying the checksum keyword argument with a value of 'datasum'. + +Here are some examples:: + + >>> # Open the file pix.fits verifying the checksum values for all HDUs + >>> hdul = fits.open('pix.fits', checksum=True) + +:: + + >>> # Open the file in.fits where checksum verification fails for the + >>> # primary HDU + >>> hdul = fits.open('in.fits', checksum=True) + Warning: Checksum verification failed for HDU #0. + +:: + + >>> # Create file out.fits containing an HDU constructed from data and + >>> # header containing both CHECKSUM and DATASUM cards. + >>> fits.writeto('out.fits', data, header, checksum=True) + +:: + + >>> # Create file out.fits containing all the HDUs in the HDULIST + >>> # hdul with each HDU header containing only the DATASUM card + >>> hdul.writeto('out.fits', checksum='datasum') + +:: + + >>> # Create file out.fits containing the HDU hdu with both CHECKSUM + >>> # and DATASUM cards in the header + >>> hdu.writeto('out.fits', checksum=True) + +:: + + >>> # Append a new HDU constructed from array data to the end of + >>> # the file existingfile.fits with only the appended HDU + >>> # containing both CHECKSUM and DATASUM cards. + >>> fits.append('existingfile.fits', data, checksum=True) diff --git a/docs/io/misc.rst b/docs/io/misc.rst new file mode 100644 index 0000000..a03ed47 --- /dev/null +++ b/docs/io/misc.rst @@ -0,0 +1,16 @@ +********************************************** +Miscellaneous Input/Output (`astropy.io.misc`) +********************************************** + +The `astropy.io.misc` module contains miscellaneous input/output routines that +do not fit elsewhere, and are often used by other Astropy sub-packages. For +example, `astropy.io.misc.hdf5` contains functions to read/write +:class:`~astropy.table.Table` objects from/to HDF5 files, but these +should not be imported directly by users. Instead, users can access this +functionality via the :class:`~astropy.table.Table` class itself (see +:ref:`table_io`). Routines that are intended to be used directly by users are +listed in the `astropy.io.misc` section. + +.. automodapi:: astropy.io.misc + +.. automodapi:: astropy.io.misc.hdf5 diff --git a/docs/io/registry.rst b/docs/io/registry.rst new file mode 100644 index 0000000..90ccb3c --- /dev/null +++ b/docs/io/registry.rst @@ -0,0 +1,105 @@ +.. _io_registry: + +************************************ +I/O Registry (`astropy.io.registry`) +************************************ + +.. note:: + + The I/O registry is only meant to be used directly by users who want + to define their own custom readers/writers. Users who want to find + out more about what built-in formats are supported by + :class:`~astropy.table.Table` by default should see + :ref:`table_io`. No built-in formats are currently defined for + :class:`~astropy.nddata.NDData`, but this will be added in + future). + +Introduction +============ + +The I/O registry is a sub-module used to define the readers/writers available +for the :class:`~astropy.table.Table` and +:class:`~astropy.nddata.NDData` classes. + +Using `astropy.io.registry` +=========================== + +The following example demonstrates how to create a reader for the +:class:`~astropy.table.Table` class. First, we can create a highly +simplistic FITS reader which just reads the data as a structured array:: + + from astropy.table import Table + + def fits_table_reader(filename, hdu=1): + from astropy.io import fits + data = fits.open(filename)[hdu].data + return Table(data) + +and then register it:: + + from astropy.io import registry + registry.register_reader('fits', Table, fits_table_reader) + +Reader functions can take any arguments except ``format`` (since this +is reserved for :func:`~astropy.io.registry.read`) and should return an instance of the class specified as the second argument of ``register_reader`` (:class:`~astropy.table.Table` in the above case.) + +We can then read in a FITS table with:: + + t = Table.read('catalog.fits', format='fits') + +In practice, it would be nice to have the ``read`` method automatically +identify that this file was a FITS file, so we can construct a function that +can recognize FITS files, which we refer to here as an *identifier* function. +An identifier function should take a first argument that should be a string +which indicates whether the identifier is being called from ``read`` or +``write``, and should then accept arbitrary number of positional and keyword +arguments via ``*args`` and ``**kwargs``, which are the arguments passed to +``Table.read``. We can write a simplistic function that only looks at +filenames (but in practice, this function could even look at the first few +bytes of the file for example). The only requirement is that it return a +boolean indicating whether the input matches that expected for the format:: + + def fits_identify(origin, *args, **kwargs): + return isinstance(args[0], basestring) and \ + args[0].lower().split('.')[-1] in ['fits', 'fit'] + +.. note:: Identifier functions should be prepared for arbitrary input - in + particular, the first argument may not be a filename or file + object, so it should not assume that this is the case. + +We then register this identifier function:: + + registry.register_identifier('fits', Table, fits_identify) + +And we can then do:: + + t = Table.read('catalog.fits') + +If multiple formats match the current input, then an exception is +raised, and similarly if no format matches the current input. In that +case, the format should be explicitly given with the ``format=`` +keyword argument. + +Similarly, it is possible to create custom writers. To go with our simplistic FITS reader above, we can write a simplistic FITS writer:: + + def fits_table_writer(table, filename, clobber=False): + import numpy as np + from astropy.io import fits + fits.writeto(filename, np.array(table), clobber=clobber) + +We then register the writer:: + + io_registry.register_writer('fits', Table, fits_table_writer) + +And we can then write the file out to a FITS file:: + + t.write('catalog_new.fits', format='fits') + +If we have registered the identifier as above, we can simply do:: + + t.write('catalog_new.fits') + +Reference/API +============= + +.. automodapi:: astropy.io.registry diff --git a/docs/io/unified.rst b/docs/io/unified.rst new file mode 100644 index 0000000..d7d40b2 --- /dev/null +++ b/docs/io/unified.rst @@ -0,0 +1,288 @@ +.. doctest-skip-all + +.. _table_io: + +Unified file read/write interface +=================================== + +Astropy provides a unified interface for reading and writing data in different formats. +For many common cases this will simplify the process of file I/O and reduce the need to +master the separate details of all the I/O packages within Astropy. This functionality is +still in active development and the number of supported formats will be increasing. For +details on the implementation see :ref:`io_registry`. + +Getting started with Table I/O +------------------------------ + +The :class:`~astropy.table.Table` class includes two methods, +:meth:`~astropy.table.Table.read` and +:meth:`~astropy.table.Table.write`, that make it possible to read from +and write to files. A number of formats are automatically supported (see +`Built-in table readers/writers`_) and new file formats and extensions can be +registered with the :class:`~astropy.table.Table` class (see +:ref:`io_registry`). + +To use this interface, first import the :class:`~astropy.table.Table` class, then +simply call the :class:`~astropy.table.Table` +:meth:`~astropy.table.Table.read` method with the name of the file and +the file format, for instance ``'ascii.daophot'``:: + + >>> from astropy.table import Table + >>> t = Table.read('photometry.dat', format='ascii.daophot') + +It is possible to load tables directly from the Internet using URLs. For example, +download tables from Vizier catalogues in CDS format (``'ascii.cds'``):: + + >>> t = Table.read("ftp://cdsarc.u-strasbg.fr/pub/cats/VII/253/snrs.dat", + ... readme="ftp://cdsarc.u-strasbg.fr/pub/cats/VII/253/ReadMe", + ... format="ascii.cds") + +For certain file formats, the format can be automatically detected, for +example from the filename extension:: + + >>> t = Table.read('table.tex') + +Similarly, for writing, the format can be explicitly specified:: + + >>> t.write(filename, format='latex') + +As for the :meth:`~astropy.table.Table.read` method, the format may +be automatically identified in some cases. + +Any additional arguments specified will depend on the format. For examples of this see the +section `Built-in table readers/writers`_. This section also provides the full list of +choices for the ``format`` argument. + +.. _built_in_readers_writers: + +Built-in table readers/writers +------------------------------ + +The full list of built-in readers and writers is shown in the table below: + +=========================== ==== ===== ============= ========== + Format Read Write Auto-identify Deprecated +=========================== ==== ===== ============= ========== + aastex Yes Yes No Yes + ascii Yes Yes No + ascii.aastex Yes Yes No + ascii.basic Yes Yes No + ascii.cds Yes No No + ascii.commented_header Yes Yes No + ascii.daophot Yes No No + ascii.fixed_width Yes Yes No +ascii.fixed_width_no_header Yes Yes No + ascii.fixed_width_two_line Yes Yes No + ascii.html Yes Yes Yes + ascii.ipac Yes Yes No + ascii.latex Yes Yes Yes + ascii.no_header Yes Yes No + ascii.rdb Yes Yes Yes + ascii.sextractor Yes No No + ascii.tab Yes Yes No + ascii.csv Yes Yes Yes + cds Yes No No Yes + daophot Yes No No Yes + fits Yes Yes Yes + hdf5 Yes Yes Yes + html Yes Yes No Yes + ipac Yes Yes No Yes + latex Yes Yes No Yes + rdb Yes Yes No Yes + votable Yes Yes Yes +=========================== ==== ===== ============= ========== + +Deprecated format names like ``aastex`` will be removed in a future version. +Use the full name (e.g. ``ascii.aastex``) instead. + +.. _table_io_ascii: + +ASCII formats +^^^^^^^^^^^^^^ + +The :meth:`~astropy.table.Table.read` and +:meth:`~astropy.table.Table.write` methods can be used to read and write formats +supported by `astropy.io.ascii`. + +Use ``format='ascii'`` in order to interface to the generic +:func:`~astropy.io.ascii.read` and :func:`~astropy.io.ascii.write` +functions from `astropy.io.ascii`. When reading a table this means +that all supported ASCII table formats will be tried in order to successfully +parse the input. For example:: + + >>> t = Table.read('astropy/io/ascii/tests/t/latex1.tex', format='ascii') + >>> print t + cola colb colc + ---- ---- ---- + a 1 2 + b 3 4 + +When writing a table with ``format='ascii'`` the output is a basic +character-delimited file with a single header line containing the +column names. + +All additional arguments are passed to the `astropy.io.ascii` +:func:`~astropy.io.ascii.read` and :func:`~astropy.io.ascii.write` +functions. Further details are available in the sections on +:ref:`io_ascii_read_parameters` and :ref:`io_ascii_write_parameters`. For example, to change +column delimiter and the output format for the ``colc`` column use:: + + >>> t.write(sys.stdout, format='ascii', delimiter='|', formats={'colc': '%0.2f'}) + cola|colb|colc + a|1|2.00 + b|3|4.00 + +A full list of the supported ``format`` values and corresponding format types +for ASCII tables is given below. The ``Suffix`` column indicates the filename +suffix where the format will be auto-detected, while the ``Write`` column +indicates which support write functionality. + +=============================== ====== ===== ============================================================================================ + Format Suffix Write Description +=============================== ====== ===== ============================================================================================ +``ascii`` Yes ASCII table in any supported format (uses guessing) +``ascii.aastex`` Yes :class:`~astropy.io.ascii.AASTex`: AASTeX deluxetable used for AAS journals +``ascii.basic`` Yes :class:`~astropy.io.ascii.Basic`: Basic table with custom delimiters +``ascii.cds`` :class:`~astropy.io.ascii.Cds`: CDS format table +``ascii.commented_header`` Yes :class:`~astropy.io.ascii.CommentedHeader`: Column names in a commented line +``ascii.daophot`` :class:`~astropy.io.ascii.Daophot`: IRAF DAOphot format table +``ascii.fixed_width`` Yes :class:`~astropy.io.ascii.FixedWidth`: Fixed width +``ascii.fixed_width_no_header`` Yes :class:`~astropy.io.ascii.FixedWidthNoHeader`: Fixed width with no header +``ascii.fixed_width_two_line`` Yes :class:`~astropy.io.ascii.FixedWidthTwoLine`: Fixed width with second header line +``ascii.ipac`` Yes :class:`~astropy.io.ascii.Ipac`: IPAC format table +``ascii.html`` .html Yes :class:`~astropy.io.ascii.HTML`: HTML table +``ascii.latex`` .tex Yes :class:`~astropy.io.ascii.Latex`: LaTeX table +``ascii.no_header`` Yes :class:`~astropy.io.ascii.NoHeader`: Basic table with no headers +``ascii.rdb`` .rdb Yes :class:`~astropy.io.ascii.Rdb`: Tab-separated with a type definition header line +``ascii.sextractor`` :class:`~astropy.io.ascii.SExtractor`: SExtractor format table +``ascii.tab`` Yes :class:`~astropy.io.ascii.Tab`: Basic table with tab-separated values +``ascii.csv`` .csv Yes :class:`~astropy.io.ascii.Csv`: Basic table with comma-separated values +=============================== ====== ===== ============================================================================================ + +.. note:: + + When specifying a specific ASCII table format using the unified interface, the format name is + prefixed with ``ascii.`` in order to identify the format as ASCII-based. Compare the + table above to the `astropy.io.ascii` list of :ref:`supported_formats`. Therefore the following + are equivalent:: + + >>> dat = ascii.read('file.dat', format='daophot') + >>> dat = Table.read('file.dat', format='ascii.daophot') + + For compatibility with astropy version 0.2 and earlier, the following format + values are also allowed in ``Table.read()``: ``daophot``, ``ipac``, ``html``, ``latex``, and ``rdb``. + +.. _table_io_fits: + +FITS +^^^^ + +Reading/writing from/to `FITS `_ +files is supported with ``format='fits'``. In most cases, existing FITS +files should be automatically identified as such based on the header of the +file, but if not, or if writing to disk, then the format should be explicitly +specified. + +If a FITS table file contains only a single table, then it can be read in +with:: + + >>> t = Table.read('data.fits') + +If more than one table is present in the file, the first table found will be +read in and a warning will be emitted:: + + >>> t = Table.read('data.fits') + WARNING: hdu= was not specified but multiple tables are present, reading in first available table (hdu=1) [astropy.io.fits.connect] + +To write to a new file:: + + >>> t.write('new_table.fits') + +At this time, the ``meta`` attribute of the +:class:`~astropy.table.Table` class is simply an ordered +dictionary and does not fully represent the structure of a FITS +header (for example, keyword comments are dropped). This is likely +to change in a future release. + +.. _table_io_hdf5: + +HDF5 +^^^^^^^^ + +Reading/writing from/to `HDF5 `_ files is +supported with ``format='hdf5'`` (this requires `h5py +`_ to be installed). However, the ``.hdf5`` +file extension is automatically recognized when writing files, and HDF5 files +are automatically identified (even with a different extension) when reading +in (using the first few bytes of the file to identify the format), so in most +cases you will not need to explicitly specify ``format='hdf5'``. + +Since HDF5 files can contain multiple tables, the full path to the table +should be specified via the ``path=`` argument when reading and writing. +For example, to read a table called ``data`` from an HDF5 file named +``observations.hdf5``, you can do:: + + >>> t = Table.read('observations.hdf5', path='data') + +To read a table nested in a group in the HDF5 file, you can do:: + + >>> t = Table.read('observations.hdf5', path='group/data') + +To write a table to a new file, the path should also be specified:: + + >>> t.write('new_file.hdf5', path='updated_data') + +It is also possible to write a table to an existing file using ``append=True``:: + + >>> t.write('observations.hdf5', path='updated_data', append=True) + +As with other formats, the ``overwrite=True`` argument is supported for +overwriting existing files. To overwrite only a single table within an HDF5 +file that has multiple datasets, use *both* the ``overwrite=True`` and +``append=True`` arguments. + +Finally, when writing to HDF5 files, the ``compression=`` argument can be +used to ensure that the data is compressed on disk:: + + >>> t.write('new_file.hdf5', path='updated_data', compression=True) + + + + +.. _table_io_votable: + +VO Tables +^^^^^^^^^^^ + +Reading/writing from/to `VO table `_ +files is supported with ``format='votable'``. In most cases, existing VO +tables should be automatically identified as such based on the header of the +file, but if not, or if writing to disk, then the format should be explicitly +specified. + +If a VO table file contains only a single table, then it can be read in with:: + + >>> t = Table.read('aj285677t3_votable.xml') + +If more than one table is present in the file, an error will be raised, +unless the table ID is specified via the ``table_id=`` argument:: + + >>> t = Table.read('catalog.xml') + Traceback (most recent call last): + File "", line 1, in + File "/Volumes/Raptor/Library/Python/2.7/lib/python/site-packages/astropy/table/table.py", line 1559, in read + table = reader(*args, **kwargs) + File "/Volumes/Raptor/Library/Python/2.7/lib/python/site-packages/astropy/io/votable/connect.py", line 44, in read_table_votable + raise ValueError("Multiple tables found: table id should be set via the id= argument. The available tables are " + ', '.join(tables.keys())) + ValueError: Multiple tables found: table id should be set via the table_id= argument. The available tables are twomass, spitzer + + >>> t = Table.read('catalog.xml', table_id='twomass') + +To write to a new file, the ID of the table should also be specified (unless +``t.meta['ID']`` is defined):: + + >>> t.write('new_catalog.xml', table_id='updated_table', format='votable') + +When writing, the ``compression=True`` argument can be used to force +compression of the data on disk, and the ``overwrite=True`` argument can be +used to overwrite an existing file. diff --git a/docs/io/votable/.gitignore b/docs/io/votable/.gitignore new file mode 100644 index 0000000..30c35a7 --- /dev/null +++ b/docs/io/votable/.gitignore @@ -0,0 +1,2 @@ +warnings.rst +exceptions.rst diff --git a/docs/io/votable/api_exceptions.rst b/docs/io/votable/api_exceptions.rst new file mode 100644 index 0000000..669e837 --- /dev/null +++ b/docs/io/votable/api_exceptions.rst @@ -0,0 +1,41 @@ +.. include:: references.txt + +`astropy.io.votable.exceptions` +=============================== + +.. contents:: + +.. automodule:: astropy.io.votable.exceptions + +Exception utilities +------------------- + +.. currentmodule:: astropy.io.votable.exceptions + +.. autofunction:: warn_or_raise + +.. autofunction:: vo_raise + +.. autofunction:: vo_reraise + +.. autofunction:: vo_warn + +.. autofunction:: parse_vowarning + +.. autoclass:: VOWarning + :show-inheritance: + +.. autoclass:: VOTableChangeWarning + :show-inheritance: + +.. autoclass:: VOTableSpecWarning + :show-inheritance: + +.. autoclass:: UnimplementedWarning + :show-inheritance: + +.. autoclass:: IOWarning + :show-inheritance: + +.. autoclass:: VOTableSpecError + :show-inheritance: diff --git a/docs/io/votable/index.rst b/docs/io/votable/index.rst new file mode 100644 index 0000000..4fc800c --- /dev/null +++ b/docs/io/votable/index.rst @@ -0,0 +1,444 @@ +.. doctest-skip-all + +.. include:: references.txt + +.. _astropy-io-votable: + +******************************************* +VOTable XML handling (`astropy.io.votable`) +******************************************* + +Introduction +============ + +The `astropy.io.votable` subpackage converts VOTable XML files to and +from Numpy record arrays. + +Getting Started +=============== + +Reading a VOTable file +---------------------- + +To read in a VOTable file, pass a file path to +`~astropy.io.votable.parse`:: + + from astropy.io.votable import parse + votable = parse("votable.xml") + +``votable`` is a `~astropy.io.votable.tree.VOTableFile` object, which +can be used to retrieve and manipulate the data and save it back out +to disk. + +VOTable files are made up of nested ``RESOURCE`` elements, each of +which may contain one or more ``TABLE`` elements. The ``TABLE`` +elements contain the arrays of data. + +To get at the ``TABLE`` elements, one can write a loop over the +resources in the ``VOTABLE`` file:: + + for resource in votable.resources: + for table in resource.tables: + # ... do something with the table ... + pass + +However, if the nested structure of the resources is not important, +one can use `~astropy.io.votable.tree.VOTableFile.iter_tables` to +return a flat list of all tables:: + + for table in votable.iter_tables(): + # ... do something with the table ... + pass + +Finally, if there is expected to be only one table in the file, it +might be simplest to just use +`~astropy.io.votable.tree.VOTableFile.get_first_table`:: + + table = votable.get_first_table() + +Even easier, there is a convenience method to parse a VOTable file and +return the first table all in one step:: + + from astropy.io.votable import parse_single_table + table = parse_single_table("votable.xml") + +From a `~astropy.io.votable.tree.Table` object, one can get the data itself +in the ``array`` member variable:: + + data = table.array + +This data is a Numpy record array. + +The columns get their names from both the ``ID`` and ``name`` +attributes of the ``FIELD`` elements in the ``VOTABLE`` file. For +example, suppose we had a ``FIELD`` specified as follows: + +.. code-block:: xml + + + + representing the ICRS declination of the center of the image. + + + +.. note:: + + The mapping from VOTable ``name`` and ``ID`` attributes to Numpy + dtype ``names`` and ``titles`` is highly confusing. + + In VOTable, ``ID`` is guaranteed to be unique, but is not + required. ``name`` is not guaranteed to be unique, but is + required. + + In Numpy record dtypes, ``names`` are required to be unique and + are required. ``titles`` are not required, and are not required + to be unique. + + Therefore, VOTable's ``ID`` most closely maps to Numpy's + ``names``, and VOTable's ``name`` most closely maps to Numpy's + ``titles``. However, in some cases where a VOTable ``ID`` is not + provided, a Numpy ``name`` will be generated based on the VOTable + ``name``. Unfortunately, VOTable fields do not have an attribute + that is both unique and required, which would be the most + convenient mechanism to uniquely identify a column. + +This column of data can be extracted from the record array using:: + + >>> table.array['dec_targ'] + array([17.15153360566, 17.15153360566, 17.15153360566, 17.1516686826, + 17.1516686826, 17.1516686826, 17.1536197136, 17.1536197136, + 17.1536197136, 17.15375479055, 17.15375479055, 17.15375479055, + 17.1553884541, 17.15539736932, 17.15539752176, + 17.25736014763, + # ... + 17.2765703], dtype=object) + +or equivalently:: + + >>> table.array['Dec'] + array([17.15153360566, 17.15153360566, 17.15153360566, 17.1516686826, + 17.1516686826, 17.1516686826, 17.1536197136, 17.1536197136, + 17.1536197136, 17.15375479055, 17.15375479055, 17.15375479055, + 17.1553884541, 17.15539736932, 17.15539752176, + 17.25736014763, + # ... + 17.2765703], dtype=object) + +Building a new table from scratch +--------------------------------- + +It is also possible to build a new table, define some field datatypes +and populate it with data:: + + from astropy.io.votable.tree import VOTableFile, Resource, Table, Field + + # Create a new VOTable file... + votable = VOTableFile() + + # ...with one resource... + resource = Resource() + votable.resources.append(resource) + + # ... with one table + table = Table(votable) + resource.tables.append(table) + + # Define some fields + table.fields.extend([ + Field(votable, name="filename", datatype="char", arraysize="*"), + Field(votable, name="matrix", datatype="double", arraysize="2x2")]) + + # Now, use those field definitions to create the numpy record arrays, with + # the given number of rows + table.create_arrays(2) + + # Now table.array can be filled with data + table.array[0] = ('test1.xml', [[1, 0], [0, 1]]) + table.array[1] = ('test2.xml', [[0.5, 0.3], [0.2, 0.1]]) + + # Now write the whole thing to a file. + # Note, we have to use the top-level votable file object + votable.to_xml("new_votable.xml") + +Outputting a VOTable file +------------------------- + +To save a VOTable file, simply call the +`~astropy.io.votable.tree.VOTableFile.to_xml` method. It accepts +either a string or Unicode path, or a Python file-like object:: + + votable.to_xml('output.xml') + +There are a number of data storage formats supported by +`astropy.io.votable`. The ``TABLEDATA`` format is XML-based and +stores values as strings representing numbers. The ``BINARY`` format +is more compact, and stores numbers in base64-encoded binary. VOTable +version 1.3 adds the ``BINARY2`` format, which allows for masking of +any data type, including integers and bit fields which can not be +masked in the older ``BINARY`` format. The storage format can be set +on a per-table basis using the `~astropy.io.votable.tree.Table.format` +attribute, or globally using the +`~astropy.io.votable.tree.VOTableFile.set_all_tables_format` method:: + + votable.get_first_table().format = 'binary' + votable.set_all_tables_format('binary') + votable.to_xml('binary.xml') + +Using `astropy.io.votable` +========================== + +Standard compliance +------------------- + +`astropy.io.votable.tree.Table` supports the `VOTable Format Definition +Version 1.1 +`_, +`Version 1.2 +`_, +and the `Version 1.3 proposed recommendation +`_. +Some flexibility is provided to support the 1.0 draft version and +other non-standard usage in the wild. To support these cases, set the +keyword argument ``pedantic`` to ``False`` when parsing. + +.. note:: + + Each warning and VOTABLE-specific exception emitted has a number and + is documented in more detail in :ref:`warnings` and + :ref:`exceptions`. + +Output always conforms to the 1.1, 1.2 or 1.3 spec, depending on the +input. + +.. _pedantic-mode: + +Pedantic mode +^^^^^^^^^^^^^ + +Many VOTABLE files in the wild do not conform to the VOTABLE +specification. If reading one of these files causes exceptions, you +may turn off pedantic mode in `astropy.io.votable` by passing +``pedantic=False`` to the `~astropy.io.votable.parse` or +`~astropy.io.votable.parse_single_table` functions:: + + from astropy.io.votable import parse + votable = parse("votable.xml", pedantic=False) + +Note, however, that it is good practice to report these errors to the +author of the application that generated the VOTABLE file to bring the +file into compliance with the specification. + +Even with ``pedantic`` turned off, many warnings may still be omitted. +These warnings are all of the type +`~astropy.io.votable.exceptions.VOTableSpecWarning` and can be turned +off using the standard Python `warnings` module. + +Missing values +-------------- + +Any value in the table may be "missing". `astropy.io.votable` stores +a Numpy masked array in each `~astropy.io.votable.tree.Table` +instance. This behaves like an ordinary Numpy masked array, except +for variable-length fields. For those fields, the datatype of the +column is "object" and another Numpy masked array is stored there. +Therefore, operations on variable length columns will not work -- this +is simply because variable length columns are not directly supported +by Numpy masked arrays. + +Datatype mappings +----------------- + +The datatype specified by a ``FIELD`` element is mapped to a Numpy +type according to the following table: + + ================================ ======================================================================== + VOTABLE type Numpy type + ================================ ======================================================================== + boolean b1 + -------------------------------- ------------------------------------------------------------------------ + bit b1 + -------------------------------- ------------------------------------------------------------------------ + unsignedByte u1 + -------------------------------- ------------------------------------------------------------------------ + char (*variable length*) O - In Python 2.x, a `str` object; in 3.x, a ``bytes()`` object. + -------------------------------- ------------------------------------------------------------------------ + char (*fixed length*) S + -------------------------------- ------------------------------------------------------------------------ + unicodeChar (*variable length*) O - In Python 2.x, a `unicode` object, in utf-16; in 3.x a `str` object + -------------------------------- ------------------------------------------------------------------------ + unicodeChar (*fixed length*) U + -------------------------------- ------------------------------------------------------------------------ + short i2 + -------------------------------- ------------------------------------------------------------------------ + int i4 + -------------------------------- ------------------------------------------------------------------------ + long i8 + -------------------------------- ------------------------------------------------------------------------ + float f4 + -------------------------------- ------------------------------------------------------------------------ + double f8 + -------------------------------- ------------------------------------------------------------------------ + floatComplex c8 + -------------------------------- ------------------------------------------------------------------------ + doubleComplex c16 + ================================ ======================================================================== + +If the field is a fixed size array, the data is stored as a Numpy +fixed-size array. + +If the field is a variable size array (that is ``arraysize`` contains +a '*'), the cell will contain a Python list of Numpy values. Each +value may be either an array or scalar depending on the ``arraysize`` +specifier. + +Examining field types +--------------------- + +To look up more information about a field in a table, one can use the +`~astropy.io.votable.tree.Table.get_field_by_id` method, which returns +the `~astropy.io.votable.tree.Field` object with the given ID. For +example:: + + >>> field = table.get_field_by_id('Dec') + >>> field.datatype + 'char' + >>> field.unit + 'deg' + +.. note:: + Field descriptors should not be mutated. To change the set of + columns, convert the Table to an `astropy.table.Table`, make the + changes, and then convert it back. + +.. _votable-serialization: + +Data serialization formats +-------------------------- + +VOTable supports a number of different serialization formats. + +- `TABLEDATA + `__ + stores the data in pure XML, where the numerical values are written + as human-readable strings. + +- `BINARY + `__ + is a binary representation of the data, stored in the XML as an + opaque ``base64``-encoded blob. + +- `BINARY2 + `__ + was added in VOTable 1.3, and is identical to "BINARY", except that + it explicitly records the position of missing values rather than + identifying them by a special value. + +- `FITS + `__ + stores the data in an external FITS file. This serialization is not + supported by the `astropy.io.votable` writer, since it requires + writing multiple files. + +The serialization format can be selected in two ways: + + 1) By setting the ``format`` attribute of a + `astropy.io.votable.tree.Table` object:: + + votable.get_first_table().format = "binary" + votable.to_xml("new_votable.xml") + + 2) By overriding the format of all tables using the + ``tabledata_format`` keyword argument when writing out a VOTable + file:: + + votable.to_xml("new_votable.xml", tabledata_format="binary") + +Converting to/from an `astropy.table.Table` +------------------------------------------- + +The VOTable standard does not map conceptually to an +`astropy.table.Table`. However, a single table within the ``VOTable`` +file may be converted to and from an `astropy.table.Table`:: + + from astropy.io.votable import parse_single_table + table = parse_single_table("votable.xml").to_table() + +As a convenience, there is also a function to create an entire VOTable +file with just a single table:: + + from astropy.io.votable import from_table, writeto + votable = from_table(table) + writeto(votable, "output.xml") + +.. note:: + + By default, ``to_table`` will use the ``ID`` attribute from the files to + create the column names for the `~astropy.table.Table` object. However, + it may be that you want to use the ``name`` attributes instead. For this, + set the ``use_names_over_ids`` keyword to `True`. Note that since field + ``names`` are not guaranteed to be unique in the VOTable specification, + but column names are required to be unique in Numpy structured arrays (and + thus `astropy.table.Table` objects), the names may be renamed by appending + numbers to the end in some cases. + +Performance considerations +-------------------------- + +File reads will be moderately faster if the ``TABLE`` element includes +an nrows_ attribute. If the number of rows is not specified, the +record array must be resized repeatedly during load. + +.. _nrows: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC10 + +See Also +======== + +- `VOTable Format Definition Version 1.1 + `_ + +- `VOTable Format Definition Version 1.2 + `_ + +- `VOTable Format Definition Version 1.3, Proposed Recommendatation + `_ + + +Reference/API +============= + +.. automodapi:: astropy.io.votable + :no-inheritance-diagram: + :skip: VOWarning + :skip: VOTableChangeWarning + :skip: VOTableSpecWarning + :skip: UnimplementedWarning + :skip: IOWarning + :skip: VOTableSpecError + +.. automodapi:: astropy.io.votable.tree + :no-inheritance-diagram: + +.. automodapi:: astropy.io.votable.converters + :no-inheritance-diagram: + +.. automodapi:: astropy.io.votable.ucd + :no-inheritance-diagram: + +.. automodapi:: astropy.io.votable.util + :no-inheritance-diagram: + +.. automodapi:: astropy.io.votable.validator + :no-inheritance-diagram: + +.. automodapi:: astropy.io.votable.xmlutil + :no-inheritance-diagram: + + +astropy.io.votable.exceptions Module +------------------------------------ + +.. toctree:: + :maxdepth: 1 + + api_exceptions.rst diff --git a/docs/io/votable/references.txt b/docs/io/votable/references.txt new file mode 100644 index 0000000..f3ff4c8 --- /dev/null +++ b/docs/io/votable/references.txt @@ -0,0 +1,23 @@ +.. _BINARY: http://www.ivoa.net/Documents/PR/VOTable/VOTable-20040322.html#ToC27 +.. _BINARY2: http://www.ivoa.net/documents/VOTable/20130315/PR-VOTable-1.3-20130315.html#sec:BIN2 +.. _COOSYS: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC19 +.. _DESCRIPTION: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC19 +.. _FIELD: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC24 +.. _FIELDref: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC31 +.. _FITS: http://fits.gsfc.nasa.gov/fits_documentation.html +.. _GROUP: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC31 +.. _ID: http://www.w3.org/TR/REC-xml/#id +.. _INFO: http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#ToC19 +.. _LINK: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC22 +.. _multidimensional arrays: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC12 +.. _numerical accuracy: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC26 +.. _PARAM: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC24 +.. _PARAMref: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC31 +.. _RESOURCE: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC21 +.. _TABLE: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC23 +.. _TABLEDATA: http://www.ivoa.net/Documents/PR/VOTable/VOTable-20040322.html#ToC25 +.. _unified content descriptor: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC28 +.. _unique type: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC29 +.. _units: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC27 +.. _VALUES: http://www.ivoa.net/Documents/REC/VOTable/VOTable-20040811.html#ToC30 +.. _VOTABLE: http://www.ivoa.net/Documents/PR/VOTable/VOTable-20040322.html#ToC9 diff --git a/docs/known_issues.rst b/docs/known_issues.rst new file mode 100644 index 0000000..125054b --- /dev/null +++ b/docs/known_issues.rst @@ -0,0 +1,240 @@ +============ +Known Issues +============ + +While most bugs and issues are managed using the `astropy issue +tracker `_, this document +lists issues that are too difficult to fix, may require some +intervention from the user to workaround, or are due to bugs in other +projects or packages. + +.. _quantity_issues: + +Quantities lose their units with some operations +------------------------------------------------ + +Quantities are subclassed from numpy's `~numpy.ndarray` and in some numpy operations +(and in scipy operations using numpy internally) the subclass is ignored, which +means that either a plain array is returned, or a `~astropy.units.quantity.Quantity` without units. +E.g.:: + + In [1]: import astropy.units as u + + In [2]: import numpy as np + + In [3]: q = u.Quantity(np.arange(10.), u.m) + + In [4]: np.dot(q,q) + Out[4]: 285.0 + + In [5]: np.hstack((q,q)) + Out[5]: + + +Work-arounds are available for some cases. For the above:: + + In [6]: q.dot(q) + Out[6]: + + In [7]: u.Quantity([q, q]).flatten() + Out[7]: + + +See: https://github.com/astropy/astropy/issues/1274 + +Some docstrings can not be displayed in IPython < 0.13.2 +-------------------------------------------------------- + +Displaying long docstrings that contain Unicode characters may fail on +some platforms in the IPython console (prior to IPython version +0.13.2):: + + In [1]: import astropy.units as u + + In [2]: u.Angstrom? + ERROR: UnicodeEncodeError: 'ascii' codec can't encode character u'\xe5' in + position 184: ordinal not in range(128) [IPython.core.page] + +This can be worked around by changing the default encoding to ``utf-8`` +by adding the following to your ``sitecustomize.py`` file:: + + import sys + sys.setdefaultencoding('utf-8') + +Note that in general, `this is not recommended +`_, +because it can hide other Unicode encoding bugs in your application. +However, in general if your application does not deal with text +processing and you just want docstrings to work, this may be +acceptable. + +The IPython issue: https://github.com/ipython/ipython/pull/2738 + +Locale errors +------------- + +On MacOS X, you may see the following error when running ``setup.py``:: + + ... + ValueError: unknown locale: UTF-8 + +You may also (on MacOS X or other platforms) see errors such as:: + + ... + stderr = stderr.decode(stdio_encoding) + TypeError: decode() argument 1 must be str, not None + +This is due to the ``LC_CTYPE`` environment variable being incorrectly set to +``UTF-8`` by default, which is not a valid locale setting. To fix this, set +this environment variable, as well as the ``LANG`` and ``LC_ALL`` environment +variables to e.g. ``en_US.UTF-8`` using, in the case of ``bash``:: + + export LANG="en_US.UTF-8" + export LC_ALL="en_US.UTF-8" + export LC_CTYPE="en_US.UTF-8" + +To avoid any issues in future, you should add this line to your e.g. +``~/.bash_profile`` or ``.bashrc`` file. + +To test these changes, open a new terminal and type ``locale``, and you should +see something like:: + + $ locale + LANG="en_US.UTF-8" + LC_COLLATE="en_US.UTF-8" + LC_CTYPE="en_US.UTF-8" + LC_MESSAGES="en_US.UTF-8" + LC_MONETARY="en_US.UTF-8" + LC_NUMERIC="en_US.UTF-8" + LC_TIME="en_US.UTF-8" + LC_ALL="en_US.UTF-8" + +If so, you can go ahead and try running ``setup.py`` again (in the new +terminal). + +Floating point precision issues on Python 2.6 on Microsoft Windows +------------------------------------------------------------------ + +When converting floating point numbers to strings on Python 2.6 on a +Microsoft Windows platform, some of the requested precision may be +lost. + +The easiest workaround is to install Python 2.7. + +The Python issue: http://bugs.python.org/issue7117 + +Failing logging tests when running the tests in IPython +------------------------------------------------------- + +When running the Astropy tests using ``astropy.test()`` in an IPython +interpreter some of the tests in the ``astropy/tests/test_logger.py`` fail. +This is due to mutually incompatible behaviors in IPython and py.test, and is +not due to a problem with the test itself or the feature being tested. + +See: https://github.com/astropy/astropy/issues/717 + +mmap support for ``astropy.io.fits`` on GNU Hurd +------------------------------------------------ + +On Hurd and possibly other platforms ``flush()`` on memory-mapped files is not +implemented, so writing changes to a mmap'd FITS file may not be reliable and is +thus disabled. Attempting to open a FITS file in writeable mode with mmap will +result in a warning (and mmap will be disabled on the file automatically). + +See: https://github.com/astropy/astropy/issues/968 + +Crash on upgrading from Astropy 0.2 to a newer version +------------------------------------------------------ + +It is possible for installation of a new version of Astropy, or upgrading of an +existing installation to crash due to not having permissions on the +``~/.astropy/`` directory (in your home directory) or some file or subdirectory +in that directory. In particular this can occur if you installed Astropy as +the root user (such as with ``sudo``) at any point. This can manifest in +several ways, but the most common is a traceback ending with ``ImportError: +cannot import name config``. To resolve this issue either run ``sudo chown -R + ~/.astropy`` or, if you don't need anything in it you can blow +it away with ``sudo rm -rf ~/.astropy``. + +See for example: https://github.com/astropy/astropy/issues/987 + +Color printing on Windows +------------------------- + +Colored printing of log messages and other colored text does work in Windows +but only when running in the IPython console. Colors are not currently +supported in the basic Python command-line interpreter on Windows. + +Table sorting can silently fail on MacOS X or Windows with Python 3 and Numpy < 1.6.2 +------------------------------------------------------------------------------------- + +In Python 3, prior to Numpy 1.6.2, there was a bug (in Numpy) that caused +sorting of structured arrays to silently fail under certain circumstances (for +example if the Table contains string columns) on MacOS X, Windows, and possibly +other platforms other than Linux. Since ``Table.sort`` relies on Numpy to +internally sort the data, it is also affected by this bug. If you are using +Python 3, and need the sorting functionality for tables, we recommend updating +to a more recent version of Numpy. + +Anaconda users should upgrade with ``conda``, not ``pip`` +--------------------------------------------------------- + +Upgrading Astropy in the anaconda python distribution using ``pip`` can result +in a corrupted install with a mix of files from the old version and the new +version. Anaconda users should update with ``conda update astropy``. There +may be a brief delay between the release of Astropy on PyPI and its release +via the ``conda`` package manager; users can check the availability of new +versions with ``conda search astropy``. + +Installation fails on Mageia-2 or Mageia-3 distributions +-------------------------------------------------------- + +Building may fail with warning messages such as:: + + unable to find 'pow' or 'sincos' + +at the linking phase. Upgrading the OS packages for Python should +fix the issue, though an immediate workaround is to edit the file:: + + /usr/lib/python2.7/config/Makefile + +and search for the line that adds the option ``-Wl,--no-undefined`` to the +``LDFLAGS`` variable and remove that option. + + +Remote data utilities in `astropy.utils.data` fail on some Python distributions +------------------------------------------------------------------------------- + +The remote data utilities in `astropy.utils.data` depend on the Python +standard library `shelve` module, which in some cases depends on the +standard library `bsddb` module. Some Python distributions, including but +not limited to + +* OS X, Python 2.7.5 via homebrew +* Linux, Python 2.7.6 via conda [#]_ +* Linux, Python 2.6.9 via conda + +are built without support for the ``bsddb`` module, resulting in an error +such as:: + + ImportError: No module named _bsddb + +One workaround is to install the ``bsddb3`` module. + +.. [#] Continuum `says + `_ + this will be fixed in their next Python build. + + +Very long integers in ASCII tables silently converted to float for Numpy 1.5 +---------------------------------------------------------------------------- + +For Numpy 1.5, when reading an ASCII table that has integers which are too +large to fit into the native C long int type for the machine, then the +values get converted to float type with no warning. This is due to the +behavior of `numpy.array` and cannot easily be worked around. We recommend +that users upgrade to a newer version of Numpy. For Numpy >= 1.6 a warning +is printed and the values are treated as strings to preserve all information. + diff --git a/docs/license.rst b/docs/license.rst new file mode 100644 index 0000000..a798268 --- /dev/null +++ b/docs/license.rst @@ -0,0 +1,17 @@ +******** +Licenses +******** + +Astropy License +=============== + +Astropy is licensed under a 3-clause BSD style license: + +.. include:: ../licenses/LICENSE.rst + +Other Licenses +============== + +Full licenses for third-party software astropy is derived from or included +with Astropy can be found in the ``'licenses/'`` directory of the source +code distribution. diff --git a/docs/logging.rst b/docs/logging.rst new file mode 100644 index 0000000..a0a4a64 --- /dev/null +++ b/docs/logging.rst @@ -0,0 +1,151 @@ +************** +Logging system +************** + +Overview +======== + +The Astropy logging system is designed to give users flexibility in deciding +which log messages to show, to capture them, and to send them to a file. + +All messages printed by Astropy routines should use the built-in logging +facility (normal ``print()`` calls should only be done by routines that are +explicitly requested to print output). Messages can have one of several +levels: + +* DEBUG: Detailed information, typically of interest only when diagnosing + problems. + +* INFO: An message conveying information about the current task, and + confirming that things are working as expected + +* WARNING: An indication that something unexpected happened, and that user + action may be required. + +* ERROR: indicates a more serious issue, including exceptions + +By default, only WARNING and ERROR messages are displayed, and are sent to a +log file located at ``~/.astropy/astropy.log`` (if the file is writeable). + +Configuring the logging system +============================== + +First, import the logger:: + + from astropy import log + +The threshold level (defined above) for messages can be set with e.g.:: + + log.setLevel('INFO') + +Color (enabled by default) can be disabled with:: + + log.setColor(False) + +Warnings from ``warnings.warn`` can be logged with:: + + log.enable_warnings_logging() + +which can be disabled with:: + + log.disable_warnings_logging() + +and exceptions can be included in the log with:: + + log.set_exception_logging() + +which can be disabled with:: + + log.disable_exception_logging() + +It is also possible to set these settings from the Astropy configuration file, +which also allows an overall log file to be specified. See +`Using the configuration file`_ for more information. + +Context managers +================ + +In some cases, you may want to capture the log messages, for example to check +whether a specific message was output, or to log the messages from a specific +section of code to a file. Both of these are possible using context managers. + +To add the log messages to a list, first import the logger if you have not +already done so:: + + from astropy import log + +then enclose the code in which you want to log the messages to a list in a +``with`` statement:: + + with log.log_to_list() as log_list: + # your code here + +In the above example, once the block of code has executed, ``log_list`` will +be a Python list containing all the Astropy logging messages that were raised. +Note that messages continue to be output as normal. + +Similarly, you can output the log messages of a specific section of code to a +file using:: + + with log.log_to_file('myfile.log'): + # your code here + +which will add all the messages to ``myfile.log`` (this is in addition to the +overall log file mentioned in `Using the configuration file`_). + +While these context managers will include all the messages emitted by the +logger (using the global level set by ``log.setLevel``), it is possible to +filter a subset of these using ``filter_level=``, and specifying one of +``'DEBUG'``, ``'INFO'``, ``'WARN'``, ``'ERROR'``. Note that if +``filter_level`` is a lower level than that set via ``setLevel``, only +messages with the level set by ``setLevel`` or higher will be included (i.e. +``filter_level`` is only filtering a subset of the messages normally emitted +by the logger). + +Similarly, it is possible to filter a subset of the messages by origin by +specifying ``filter_origin=`` followed by a string. If the origin of a message +starts with that string, the message will be included in the context manager. +For example, ``filter_origin='astropy.wcs'`` will include only messages +emitted in the ``astropy.wcs`` sub-package. + +Using the configuration file +============================ + +Options for the logger can be set in the ``[config.logging_helper]`` section +of the Astropy configuration file:: + + [config.logging_helper] + + # Threshold for the logging messages. Logging messages that are less severe + # than this level will be ignored. The levels are 'DEBUG', 'INFO', 'WARNING', + # 'ERROR' + log_level = 'INFO' + + # Whether to use color for the level names + use_color = True + + # Whether to log warnings.warn calls + log_warnings = False + + # Whether to log exceptions before raising them + log_exceptions = False + + # Whether to always log messages to a log file + log_to_file = True + + # The file to log messages to + log_file_path = '~/.astropy/astropy.log' + + # Threshold for logging messages to log_file_path + log_file_level = 'INFO' + + # Format for log file entries + log_file_format = '%(asctime)s, %(origin)s, %(levelname)s, %(message)s' + + +Reference/API +============= + +.. automodapi:: astropy.logger + :no-inheritance-diagram: + diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..93dfe92 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,170 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. changes to make an overview over all changed/added/deprecated items + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Astropy.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Astropy.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +:end diff --git a/docs/modeling/algorithms.rst b/docs/modeling/algorithms.rst new file mode 100644 index 0000000..0183cd0 --- /dev/null +++ b/docs/modeling/algorithms.rst @@ -0,0 +1,61 @@ +********** +Algorithms +********** + +Univariate polynomial evaluation +================================ + +* The evaluation of 1-D polynomials uses Horner's algorithm. + +* The evaluation of 1-D Chebyshev and Legendre polynomials uses Clenshaw's + algorithm. + + +Multivariate polynomial evaluation +================================== + +* Multivariate Polynomials are evaluated following the algorithm in [1]_ . The + algorithm uses the following notation: + + - **multiindex** is a tuple of non-negative integers for which the length is + defined in the following way: + + .. math:: \alpha = (\alpha1, \alpha2, \alpha3), |\alpha| = \alpha1+\alpha2+\alpha3 + + + - **inverse lexical order** is the ordering of monomials in such a way that + :math:`{x^a < x^b}` if and only if there exists :math:`{1 \le i \le n}` + such that :math:`{a_n = b_n, \dots, a_{i+1} = b_{i+1}, a_i < b_i}`. + + In this ordering :math:`y^2 > x^2*y` and :math:`x*y > y` + + - **Multivariate Horner scheme** uses d+1 variables :math:`r_0, ...,r_d` to + store intermediate results, where *d* denotes the number of variables. + + Algorithm: + + 1. Set *di* to the max number of variables (2 for a 2-D polynomials). + + 2. Set :math:`r_0` to :math:`c_{\alpha(0)}`, where c is a list of + coeeficients for each multiindex in inverse lexical order. + + 3. For each monomial, n, in the polynomial: + + - determine :math:`k = max \{1 \leq j \leq di: \alpha(n)_j \neq \alpha(n-1)_j\}` + + - Set :math:`r_k := l_k(x)* (r_0 + r_1 + \dots + r_k)` + + - Set :math:`r_0 = c_{\alpha(n)}, r_1 = \dots r_{k-1} = 0.` + + 4. return :math:`r_0 + \dots + r_{di}` + +* The evaluation of multivariate Chebyshev and Legendre polynomials uses a + variation of the above Horner's scheme, in which every Legendre or Chebyshev + function is considered a separate variable. In this case the length of the + :math:`\alpha` indices tuple is equal to the number of functions in x plus + the number of functions in y. In addition the Chebyshev and Legendre + functions are cached for efficiency. + + + +.. [1] J. M. Pena, Thomas Sauer, "On the Multivariate Horner Scheme", SIAM Journal on Numerical Analysis, Vol 37, No. 4 diff --git a/docs/modeling/design.rst b/docs/modeling/design.rst new file mode 100644 index 0000000..fd5ef09 --- /dev/null +++ b/docs/modeling/design.rst @@ -0,0 +1,28 @@ +.. _modeling-design: + +******************* +Models Design Goals +******************* + +The `astropy.modeling` and `astropy.modeling.fitting` modules described here +are designed to work as peers. The goal is to be able to add models without +explicit reference to fitting algorithms and likewise, add different fitting +algorithms without changing the existing models. + +Furthermore, the models are designed to be combined in many ways. It is +possible, for example, to combine models `serially +` so that the output values of one +model are used as input values to another. It is also possible to form a new +model by combining models in `parallel +` (each model is evaluated +separately with the original input and the deltas are summed). Since models +may have multiple input values, machinery is provided that allows assigning +outputs from one model into the appropriate input of another in a flexible way, +`~astropy.modeling.LabeledInput`. Finally, it is permitted to combine any +number of models using all of these mechanisms simultaneously. A composite +model can be used to make further composite models. + +In the future this will support a model language which will allow using models +in algebraic operations like + +.. math:: model = (model_1 + model_2) * model_3 diff --git a/docs/modeling/fitting.rst b/docs/modeling/fitting.rst new file mode 100644 index 0000000..691c0fe --- /dev/null +++ b/docs/modeling/fitting.rst @@ -0,0 +1,125 @@ +********************** +Fitting Models to Data +********************** + +This module provides wrappers, called Fitters, around some Numpy and Scipy +fitting functions. All Fitters can be called as functions. They take an +instance of `~astropy.modeling.FittableModel` as input and modify its +``parameters`` attribute. The idea is to make this extensible and allow +users to easily add other fitters. + +Linear fitting is done using Numpy's `numpy.linalg.lstsq` function. There are +currently two non-linear fitters which use `scipy.optimize.leastsq` and +`scipy.optimize.fmin_slsqp`. + +The rules for passing input to fitters are: + +* Non-linear fitters currently work only with single models (not model sets). + +* The linear fitter can fit a single input to multiple model sets creating + multiple fitted models. This may require specifying the ``model_set_axis`` + argument just as used when evaluating models; this may be required for the + fitter to know how to broadcast the input data. + + +Fitting examples +---------------- + +- Fitting a polynomial model to multiple data sets simultaneously:: + + >>> from astropy.modeling import models, fitting + >>> import numpy as np + >>> p1 = models.Polynomial1D(3) + >>> p1.c0 = 1 + >>> p1.c1 = 2 + >>> print(p1) + Model: Polynomial1D + Inputs: 1 + Outputs: 1 + Model set size: 1 + Degree: 3 + Parameters: + c0 c1 c2 c3 + --- --- --- --- + 1.0 2.0 0.0 0.0 + >>> x = np.arange(10) + >>> y = p1(x) + >>> yy = np.array([y, y]) + >>> p2 = models.Polynomial1D(3, n_models=2) + >>> pfit = fitting.LinearLSQFitter() + >>> new_model = pfit(p2, x, yy) + >>> print(new_model) # doctest: +SKIP + Model: Polynomial1D + Inputs: 1 + Outputs: 1 + Model set size: 2 + Degree: 3 + Parameters: + c0 c1 c2 c3 + --- --- ------------------ ----------------- + 1.0 2.0 -5.86673908219e-16 3.61636197841e-17 + 1.0 2.0 -5.86673908219e-16 3.61636197841e-17 + +Fitters support constrained fitting. + +- All fitters support fixed (frozen) parameters through the ``fixed`` argument + to models or setting the `~astropy.modeling.Parameter.fixed` + attribute directly on a parameter. + + For linear fitters, freezing a polynomial coefficient means that a polynomial + without that term will be fitted to the data. For example, fixing ``c0`` in a + polynomial model will fit a polynomial with the zero-th order term missing. + However, the fixed value of the coefficient is used when evaluating the + model:: + + >>> x = np.arange(1, 10, .1) + >>> p1 = models.Polynomial1D(2, c0=[1, 1], c1=[2, 2], c2=[3, 3], + ... n_models=2) + >>> p1 + + >>> y = p1(x, model_set_axis=False) + >>> p1.c0.fixed = True + >>> pfit = fitting.LinearLSQFitter() + >>> new_model = pfit(p1, x, y) + >>> print(new_model) # doctest: +SKIP + Model: Polynomial1D + Inputs: 1 + Outputs: 1 + Model set size: 2 + Degree: 2 + Parameters: + c0 c1 c2 + --- ------------- ------------- + 1.0 2.38641216243 2.96827885742 + 1.0 2.38641216243 2.96827885742 + +- A parameter can be `~astropy.modeling.Parameter.tied` (linked to + another parameter). This can be done in two ways:: + + >>> def tiedfunc(g1): + ... mean = 3 * g1.stddev + ... return mean + >>> g1 = models.Gaussian1D(amplitude=10., mean=3, stddev=.5, + ... tied={'mean': tiedfunc}) + + or:: + + >>> g1 = models.Gaussian1D(amplitude=10., mean=3, stddev=.5) + >>> g1.mean.tied = tiedfunc + +Bounded fitting is supported through the ``bounds`` arguments to models or by +setting `~astropy.modeling.Parameter.min` and `~astropy.modeling.Parameter.max` +attributes on a parameter. Bounds for the +`~astropy.modeling.fitting.LevMarLSQFitter` are always exactly satisfied--if +the value of the parameter is outside the fitting interval, it will be reset to +the value at the bounds. The `~astropy.modeling.fitting.SLSQPLSQFitter` handles +bounds internally. + +- Different fitters support different types of constraints:: + + >>> fitting.LinearLSQFitter.supported_constraints + ['fixed'] + >>> fitting.LevMarLSQFitter.supported_constraints + ['fixed', 'tied', 'bounds'] + >>> fitting.SLSQPLSQFitter.supported_constraints + ['bounds', 'eqcons', 'ineqcons', 'fixed', 'tied'] diff --git a/docs/modeling/index.rst b/docs/modeling/index.rst new file mode 100644 index 0000000..8ace246 --- /dev/null +++ b/docs/modeling/index.rst @@ -0,0 +1,266 @@ +.. include:: links.inc + +.. _astropy-modeling: + +*************************************** +Models and Fitting (`astropy.modeling`) +*************************************** + +Introduction +============ + +`astropy.modeling` provides a framework for representing models and performing +model evaluation and fitting. It currently supports 1-D and 2-D models and +fitting with parameter constraints. + +It is :ref:`designed ` to be easily extensible and flexible. +Models do not reference fitting algorithms explicitly and new fitting +algorithms may be added without changing the existing models (though not all +models can be used with all fitting algorithms due to constraints such as model +linearity). + +The goal is to eventually provide a rich toolset of models and fitters such +that most users will not need to define new model classes, nor special purpose +fitting routines (while making it reasonably easy to do when necessary). + +.. warning:: + + `astropy.modeling` is currently a work-in-progress, and thus it is likely + there will be significant API changes in later versions of Astropy. If you + have specific ideas for how it might be improved, feel free to let us know + on the `astropy-dev mailing list`_ or at http://feedback.astropy.org + + +Getting started +=============== + +The examples here use the predefined models and assume the following modules +have been imported:: + + >>> import numpy as np + >>> from astropy.modeling import models, fitting + + +Using Models +------------ + +The `astropy.modeling` package defines a number of models that are collected +under a single namespace as ``astropy.modeling.models``. Models behave like +parametrized functions:: + + >>> from astropy.modeling import models + >>> g = models.Gaussian1D(amplitude=1.2, mean=0.9, stddev=0.5) + >>> print(g) + Model: Gaussian1D + Inputs: 1 + Outputs: 1 + Model set size: 1 + Parameters: + amplitude mean stddev + --------- ---- ------ + 1.2 0.9 0.5 + +Model parameters can be accessed as attributes:: + + >>> g.amplitude + Parameter('amplitude', value=1.2) + >>> g.mean + Parameter('mean', value=0.9) + >>> g.stddev + Parameter('stddev', value=0.5) + +and can also be updated via those attributes:: + + >>> g.amplitude = 0.8 + >>> g.amplitude + Parameter('amplitude', value=0.8) + +Models can be evaluated by calling them as functions:: + + >>> g(0.1) + 0.22242984036255528 + >>> g(np.linspace(0.5, 1.5, 7)) + array([ 0.58091923, 0.71746405, 0.7929204 , 0.78415894, 0.69394278, + 0.54952605, 0.3894018 ]) + +As the above example demonstrates, in general most models evaluate array-like +inputs according to the standard `Numpy broadcasting rules`_ for arrays. + +Models can therefore already be useful to evaluate common functions, +independently of the fitting features of the package. + + +Simple 1-D model fitting +------------------------ + +In this section, we look at a simple example of fitting a Gaussian to a +simulated dataset. We use the `~astropy.modeling.functional_models.Gaussian1D` +and `~astropy.modeling.functional_models.Trapezoid1D` models and the +`~astropy.modeling.fitting.LevMarLSQFitter` fitter to fit the data: + +.. plot:: + :include-source: + + import numpy as np + from astropy.modeling import models, fitting + + # Generate fake data + np.random.seed(0) + x = np.linspace(-5., 5., 200) + y = 3 * np.exp(-0.5 * (x - 1.3)**2 / 0.8**2) + y += np.random.normal(0., 0.2, x.shape) + + # Fit the data using a box model + t_init = models.Trapezoid1D(amplitude=1., x_0=0., width=1., slope=0.5) + fit_t = fitting.LevMarLSQFitter() + t = fit_t(t_init, x, y) + + # Fit the data using a Gaussian + g_init = models.Gaussian1D(amplitude=1., mean=0, stddev=1.) + fit_g = fitting.LevMarLSQFitter() + g = fit_g(g_init, x, y) + + # Plot the data with the best-fit model + plt.figure(figsize=(8,5)) + plt.plot(x, y, 'ko') + plt.plot(x, t(x), 'b-', lw=2, label='Trapezoid') + plt.plot(x, g(x), 'r-', lw=2, label='Gaussian') + plt.xlabel('Position') + plt.ylabel('Flux') + plt.legend(loc=2) + +As shown above, once instantiated, the fitter class can be used as a function +that takes the initial model (``t_init`` or ``g_init``) and the data values +(``x`` and ``y``), and returns a fitted model (``t`` or ``g``). + + +Simple 2-D model fitting +------------------------ + +Similarly to the 1-D example, we can create a simulated 2-D data dataset, and +fit a polynomial model to it. This could be used for example to fit the +background in an image. + +.. plot:: + :include-source: + + import numpy as np + from astropy.modeling import models, fitting + + # Generate fake data + np.random.seed(0) + y, x = np.mgrid[:128, :128] + z = 2. * x ** 2 - 0.5 * x ** 2 + 1.5 * x * y - 1. + z += np.random.normal(0., 0.1, z.shape) * 50000. + + # Fit the data using astropy.modeling + p_init = models.Polynomial2D(degree=2) + fit_p = fitting.LevMarLSQFitter() + p = fit_p(p_init, x, y, z) + + # Plot the data with the best-fit model + plt.figure(figsize=(8,2.5)) + plt.subplot(1,3,1) + plt.imshow(z, interpolation='nearest', vmin=-1e4, vmax=5e4) + plt.title("Data") + plt.subplot(1,3,2) + plt.imshow(p(x, y), interpolation='nearest', vmin=-1e4, vmax=5e4) + plt.title("Model") + plt.subplot(1,3,3) + plt.imshow(z - p(x, y), interpolation='nearest', vmin=-1e4, vmax=5e4) + plt.title("Residual") + +A list of models is provided in the `Reference/API`_ section. The fitting +framework includes many useful features that are not demonstrated here, such as +weighting of datapoints, fixing or linking parameters, and placing lower or +upper limits on parameters. For more information on these, take a look at the +:doc:`fitting` documentation. + + +Model sets +---------- + +In some cases it is necessary to describe many models of the same type but with +different parameter values. This could be done simply by instantiating as many +instances of a `~astropy.modeling.Model` as are needed. But that can be +inefficient for a large number of models. To that end, all model classes in +`astropy.modeling` can also be used to represent a model *set* which is a +collection of models of the same type, but with different values for their +parameters. + +To instantiate a model set, use argument ``n_models=N`` where ``N`` is the +number of models in the set when constructing the model. The value of each +parameter must be a list or array of length ``N``, such that each item in +the array corresponds to one model in the set:: + + >>> g = models.Gaussian1D(amplitude=[1, 2], mean=[0, 0], + ... stddev=[0.1, 0.2], n_models=2) + >>> print(g) + Model: Gaussian1D + Inputs: 1 + Outputs: 1 + Model set size: 2 + Parameters: + amplitude mean stddev + --------- ---- ------ + 1.0 0.0 0.1 + 2.0 0.0 0.2 + +This is equivalent to two Gaussians with the parameters ``amplitude=1, mean=0, +stddev=0.1`` and ``amplitude=2, mean=0, stddev=0.2`` respectively. When +printing the model the parameter values are displayed as a table, with each row +corresponding to a single model in the set. + +The number of models in a model set can be determined using the `len` builtin:: + + >>> len(g) + 2 + +Single models have a length of 1, and are not considered a model set as such. + +When evaluating a model set, by default the input must be the same length as +the number of models, with one input per model:: + + >>> g([0, 0.1]) + array([ 1. , 1.76499381]) + +The result is an array with one result per model in the set. It is also +possible to broadcast a single value to all models in the set:: + + >>> g(0) + array([ 1., 2.]) + +Model sets are used primarily for fitting, allowing a large number of models of +the same type to be fitted simultaneously (and independently from each other) +to some large set of inputs. For example, fitting a polynomial to the time +response of each pixel in a data cube. This can greatly speed up the fitting +process, especially for linear models. + + +Using `astropy.modeling` +======================== + +.. toctree:: + :maxdepth: 1 + + parameters + models + fitting + new + algorithms + design + + +Reference/API +============= + +.. automodapi:: astropy.modeling +.. automodapi:: astropy.modeling.fitting +.. automodapi:: astropy.modeling.functional_models +.. automodapi:: astropy.modeling.optimizers +.. automodapi:: astropy.modeling.powerlaws +.. automodapi:: astropy.modeling.polynomial +.. automodapi:: astropy.modeling.projections +.. automodapi:: astropy.modeling.statistic +.. automodapi:: astropy.modeling.rotations + diff --git a/docs/modeling/links.inc b/docs/modeling/links.inc new file mode 100644 index 0000000..a4ce3d4 --- /dev/null +++ b/docs/modeling/links.inc @@ -0,0 +1 @@ +.. _Numpy broadcasting rules: http://docs.scipy.org/doc/numpy/user/basics.broadcasting.html diff --git a/docs/modeling/models.rst b/docs/modeling/models.rst new file mode 100644 index 0000000..085c64a --- /dev/null +++ b/docs/modeling/models.rst @@ -0,0 +1,374 @@ +.. include:: links.inc + +*********************************** +Instantiating and Evaluating Models +*********************************** + +The base class of all models is `~astropy.modeling.Model`, however +fittable models should subclass `~astropy.modeling.FittableModel`. +Fittable models can be linear or nonlinear in a regression analysis sense. + +In general models are instantiated by providing the parameter values that +define that instance of the model to the constructor, as demonstrated in +the section on :ref:`modeling-parameters`. + +Additionally, a `~astropy.modeling.Model` instance may represent a single model +with one set of parameters, or a model *set* consisting of a set of parameters +each representing a different parameterization of the same parametric model. +For example one may instantiate a single Gaussian model with one mean, standard +deviation, and amplitude. Or one may create a set of N Gaussians, each one of +which would be fitted to, for example, a different plane in an image cube. + +Regardless of whether using a single model, or a model set, parameter values +may be scalar values, or arrays of any size and shape, so long as they are +compatible according to the standard `Numpy broadcasting rules`_. For example, +a model may be instantiated with all scalar parameters:: + + >>> from astropy.modeling.models import Gaussian1D + >>> g = Gaussian1D(amplitude=1, mean=0, stddev=1) + >>> g + + +Or it may use all array parameters. For example if all parameters are 2x2 +arrays the model is computed element-wise using all elements in the arrays:: + + >>> g = Gaussian1D(amplitude=[[1, 2], [3, 4]], mean=[[0, 1], [1, 0]], + ... stddev=[[0.1, 0.2], [0.3, 0.4]]) + >>> g + + >>> g(0) + array([[ 1.00000000e+00, 7.45330634e-06], + [ 1.15977604e-02, 4.00000000e+00]]) + +Or it may even use a mix of scalar values and arrays of different sizes and +dimensions so long as they are compatible:: + + >>> g = Gaussian1D(amplitude=[[1, 2], [3, 4]], mean=0.1, stddev=[0.1, 0.2]) + >>> g(0) + array([[ 0.60653066, 1.76499381], + [ 1.81959198, 3.52998761]]) + +In this case, four values are computed--one using each element of the amplitude +array. Each model uses a mean of 0.1, and a standard deviation of 0.1 is +used with the amplitudes of 1 and 3, and 0.2 is used with amplitudes 2 and 4. + +If any of the parameters have incompatible values this will result in an +error:: + + >>> g = Gaussian1D(amplitude=1, mean=[1, 2], stddev=[1, 2, 3]) + Traceback (most recent call last): + ... + InputParameterError: Parameter 'mean' of shape (2,) cannot be broadcast + with parameter 'stddev' of shape (3,). All parameter arrays must have + shapes that are mutually compatible according to the broadcasting rules. + + +Model Sets +========== + +By default, `~astropy.modeling.Model` instances represent a single model. +There are two ways, when instantiating a `~astropy.modeling.Model` instance, to +create a model set instead. The first is to specify the ``n_models`` argument +when instantiating the model:: + + >>> g = Gaussian1D(amplitude=[1, 2], mean=[0, 0], stddev=[0.1, 0.2], + ... n_models=2) + >>> g + + +When specifying some ``n_models=N`` this requires that the parameter values be +arrays of some kind, the first *axis* of which has as length of ``N``. This +axis is referred to as the ``model_set_axis``, and by default is is the ``0th`` +axis of parameter arrays. In this case the parameters were given as 1-D arrays +of length 2. The values ``amplitude=1, mean=0, stddev=0.1`` are the parameters +for the first model in the set. The values ``amplitude=2, mean=0, stddev=0.2`` +are the parameters defining the second model in the set. + +This has different semantics from simply using array values for the parameters, +in that ensures that parameter values and input values are matched up according +to the model_set_axis before any other array broadcasting rules are applied. + +For example, in the previous section we created a model with array values +like:: + + >>> g = Gaussian1D(amplitude=[[1, 2], [3, 4]], mean=0.1, stddev=[0.1, 0.2]) + +If instead we treat the rows as values for two different model sets, this +particular instantiation will fail, since only one value is given for mean:: + + >>> g = Gaussian1D(amplitude=[[1, 2], [3, 4]], mean=0.1, stddev=[0.1, 0.2], + ... n_models=2) + Traceback (most recent call last): + ... + InputParameterError: All parameter values must be arrays of dimension at + least 1 for model_set_axis=0 (the value given for 'mean' is only + 0-dimensional) + +To get around this for now, provide two values for mean:: + + >>> g = Gaussian1D(amplitude=[[1, 2], [3, 4]], mean=[0.1, 0.1], + ... stddev=[0.1, 0.2], n_models=2) + +This is different from the case without ``n_models=2``. It does not mean that +the value of amplitude is a 2x2 array. Rather, it means there are *two* values +for amplitude (one for each model in the set), each of which is 1-D array of +length 2. The value for the first model is ``[1, 2]``, and the value for the +second model is ``[3, 4]``. Likewise, scalar values are given for the mean and +standard deviation of each model in the set. + +When evaluating this model on a single input we get a different result from the +single-model case:: + + >>> g(0) + array([[ 0.60653066, 1.21306132], + [ 2.64749071, 3.52998761]]) + +Each row in this output is the output for each model in the set. The first is +the value of the Gaussian with ``amplitude=[1, 2], mean=0.1, stddev=0.1``, and +the second is the value of the Gaussian with ``amplitude=[3, 4], mean=0.1, +stddev=0.2``. + +We can also pass a different input to each model in a model set by passing in +an array input:: + + >>> g([0, 1]) + array([[ 6.06530660e-01, 1.21306132e+00], + [ 1.20195892e-04, 1.60261190e-04]]) + +By default this uses the same concept of a ``model_set_axis``. The first +dimension of the input array is used to map inputs to corresponding models in +the model set. We can use this, for example, to evaluate the model on 1-D +array inputs with a different input to each model set:: + + >>> g([[0, 1], [2, 3]]) + array([[ 6.06530660e-01, 5.15351422e-18], + [ 7.57849134e-20, 8.84815213e-46]]) + +In this case the first model is evaluated on ``[0, 1]``, and the second model +is evaluated on ``[2, 3]``. If the input has length greater than the number of +models in the set then this is in error:: + + >>> g([0, 1, 2]) + Traceback (most recent call last): + ... + ValueError: Input argument 'x' does not have the correct dimensions in + model_set_axis=0 for a model set with n_models=2. + +And input like ``[0, 1, 2]`` wouldn't work anyways because it is not compatible +with the array dimensions of the parameter values. However, what if we wanted +to evaluate all models in the set on the input ``[0, 1]``? We could do this +by simply repeating:: + + >>> g([[0, 1], [0, 1]]) + array([[ 6.06530660e-01, 5.15351422e-18], + [ 2.64749071e+00, 1.60261190e-04]]) + +But there is a workaround for this use case that does not necessitate +duplication. This is to include the argument ``model_set_axis=False``:: + + >>> g([0, 1], model_set_axis=False) + array([[ 6.06530660e-01, 5.15351422e-18], + [ 2.64749071e+00, 1.60261190e-04]]) + +What ``model_set_axis=False`` implies is that an array-like input should not be +treated as though any of its dimensions map to models in a model set. And +rather, the given input should be used to evaluate all the models in the model +set. For scalar inputs like ``g(0)``, ``model_set_axis=False`` is implied +automatically. But for array inputs it is necessary to avoid ambiguity. + + +Inputs and Outputs +================== + +Models have an `~astropy.modeling.Model.n_inputs` attribute, which shows how +many coordinates the model expects as an input. All models expect coordinates +as separate arguments. For example a 2-D model expects x and y coordinate +values to be passed separately, i.e. as two scalars or array-like values. + +Models also have an attribute `~astropy.modeling.Model.n_outputs`, which shows +the number of output coordinates. The `~astropy.modeling.Model.n_inputs` and +`~astropy.modeling.Model.n_outputs` attributes can be used when chaining +transforms by adding models in :class:`series +` or in :class:`parallel +`. Because composite models can be +nested within other composite models, creating theoretically infinitely complex +models, a mechanism to map input data to models is needed. In this case the +input may be wrapped in a `~astropy.modeling.LabeledInput` object-- a dict-like +object whose items are ``{label: data}`` pairs. + + +Further examples +================ + +The examples here assume this import statement was executed:: + + >>> from astropy.modeling.models import Gaussian1D, Polynomial1D + >>> import numpy as np + +- Create a model set of two 1-D Gaussians:: + + >>> x = np.arange(1, 10, .1) + >>> g1 = Gaussian1D(amplitude=[10, 9], mean=[2, 3], + ... stddev=[0.15, .1], n_models=2) + >>> print g1 + Model: Gaussian1D + Inputs: 1 + Outputs: 1 + Model set size: 2 + Parameters: + amplitude mean stddev + --------- ---- ------ + 10.0 2.0 0.15 + 9.0 3.0 0.1 + + Evaluate all models in the set on one set of input coordinates:: + + >>> y = g1(x, model_set_axis=False) # broadcast the array to all models + >>> print(y.shape) + (2, 90) + + or different inputs for each model in the set:: + + >>> y = g1([x, x + 3]) + >>> print(y.shape) + (2, 90) + +.. plot:: + + import matplotlib.pyplot as plt + import numpy as np + from astropy.modeling import models, fitting + x = np.arange(1, 10, .1) + g1 = models.Gaussian1D(amplitude=[10, 9], mean=[2,3], stddev=[.15,.1], + n_models=2) + y = g1(x, model_set_axis=False) + plt.plot(x, y.T) + plt.title('Evaluate two Gaussian1D models on 1 set of input data') + plt.show() + +.. plot:: + + import matplotlib.pyplot as plt + import numpy as np + from astropy.modeling import models, fitting + x = np.arange(1, 10, .1) + g1 = models.Gaussian1D(amplitude=[10, 9], mean=[2,3], stddev=[.15,.1], + n_models=2) + y = g1([x, x - 3]) + plt.plot(x, y[0]) + plt.plot(x - 3, y[1]) + plt.title('Evaluate two Gaussian1D models with 2 sets of input data') + plt.show() + + +- Evaluating a set of multiple polynomial models with one input data set + creates multiple output data sets:: + + >>> p1 = Polynomial1D(degree=1, n_models=5) + >>> p1.c1 = [0, 1, 2, 3, 4] + >>> print p1 + Model: Polynomial1D + Inputs: 1 + Outputs: 1 + Model set size: 5 + Degree: 1 + Parameters: + c0 c1 + --- --- + 0.0 0.0 + 0.0 1.0 + 0.0 2.0 + 0.0 3.0 + 0.0 4.0 + >>> y = p1(x, model_set_axis=False) + + +.. plot:: + + import matplotlib.pyplot as plt + import numpy as np + from astropy.modeling import models, fitting + x = np.arange(1, 10, .1) + p1 = models.Polynomial1D(1, n_models=5) + p1.c1 = [0, 1, 2, 3, 4] + y = p1(x, model_set_axis=False) + plt.plot(x, y.T) + plt.title("Polynomial1D with a 5 model set on the same input") + plt.show() + +- When passed a 2-D array, the same polynomial will map each row of the array + to one model in the set, one for one:: + + >>> x = np.arange(30).reshape(5, 6) + >>> y = p1(x) + >>> y + array([[ 0., 0., 0., 0., 0., 0.], + [ 6., 7., 8., 9., 10., 11.], + [ 24., 26., 28., 30., 32., 34.], + [ 54., 57., 60., 63., 66., 69.], + [ 96., 100., 104., 108., 112., 116.]]) + >>> print(y.shape) + (5, 6) + + +Composite model examples +------------------------ + +.. note:: + + Composite models in Astropy are currently in the process of being reworked, + but in the meantime the existing implementation is still useful. + +Create and evaluate a parallel composite model:: + + >>> from astropy.modeling import SummedCompositeModel + >>> from astropy.modeling.models import Polynomial1D, Gaussian1D + >>> x = np.arange(1,10,.1) + >>> p1 = Polynomial1D(1) + >>> g1 = Gaussian1D(amplitude=10., stddev=2.1, mean=4.2) + >>> sum_of_models = SummedCompositeModel([g1, p1]) + >>> y = sum_of_models(x) + +This is equivalent to applying the two models in parallel:: + + >>> y = x + g1(x) + p1(x) + +In more complex cases the input and output may be mapped to transformations:: + + >>> from astropy.modeling import SerialCompositeModel + >>> from astropy.modeling.models import Polynomial2D, Shift + >>> y, x = np.mgrid[:5, :5] + >>> off = Shift(-3.2) + >>> poly2 = Polynomial2D(2) + >>> serial_composite_model = SerialCompositeModel( + ... [off, poly2], inmap=[['x'], ['x', 'y']], outmap=[['x'], ['z']]) + +The above composite transform will apply an inplace shift to x, followed by a +2-D polynomial and will save the result in an array, labeled 'z'. To evaluate +this model use a `~astropy.modeling.LabeledInput` object:: + + >>> from astropy.modeling import LabeledInput + >>> labeled_data = LabeledInput([x, y], ['x', 'y']) + >>> result = serial_composite_model(labeled_data) + +The output is also a `~astropy.modeling.LabeledInput` object and the +result is stored in label 'z':: + + >>> print(result) # doctest: +SKIP + {'x': array([[-3.2, -2.2, -1.2, -0.2, 0.8], + [-3.2, -2.2, -1.2, -0.2, 0.8], + [-3.2, -2.2, -1.2, -0.2, 0.8], + [-3.2, -2.2, -1.2, -0.2, 0.8], + [-3.2, -2.2, -1.2, -0.2, 0.8]]), + 'y': array([[0, 0, 0, 0, 0], + [1, 1, 1, 1, 1], + [2, 2, 2, 2, 2], + [3, 3, 3, 3, 3], + [4, 4, 4, 4, 4]]), + 'z': array([[ 0., 0., 0., 0., 0.], + [ 0., 0., 0., 0., 0.], + [ 0., 0., 0., 0., 0.], + [ 0., 0., 0., 0., 0.], + [ 0., 0., 0., 0., 0.]])} diff --git a/docs/modeling/new.rst b/docs/modeling/new.rst new file mode 100644 index 0000000..9ce916c --- /dev/null +++ b/docs/modeling/new.rst @@ -0,0 +1,391 @@ +Defining New Model Classes +========================== + +This document describes how to add a model to the package or to create a +user-defined model. In short, one needs to define all model parameters and +write an eval function which evaluates the model. If the model is fittable, a +function to compute the derivatives with respect to parameters is required +if a linear fitting algorithm is to be used and optional if a non-linear fitter is to be used. + + +Custom 1-D models +----------------- + +For 1-D models, the `~astropy.modeling.functional_models.custom_model_1d` +decorator is provided to make it very easy to define new models. The following +example demonstrates how to set up a model consisting of two Gaussians: + +.. plot:: + :include-source: + + import numpy as np + from astropy.modeling.models import custom_model_1d + from astropy.modeling.fitting import LevMarLSQFitter + + # Define model + @custom_model_1d + def sum_of_gaussians(x, amplitude1=1., mean1=-1., sigma1=1., + amplitude2=1., mean2=1., sigma2=1.): + return (amplitude1 * np.exp(-0.5 * ((x - mean1) / sigma1)**2) + + amplitude2 * np.exp(-0.5 * ((x - mean2) / sigma2)**2)) + + # Generate fake data + np.random.seed(0) + x = np.linspace(-5., 5., 200) + m_ref = sum_of_gaussians(amplitude1=2., mean1=-0.5, sigma1=0.4, + amplitude2=0.5, mean2=2., sigma2=1.0) + y = m_ref(x) + np.random.normal(0., 0.1, x.shape) + + # Fit model to data + m_init = sum_of_gaussians() + fit = LevMarLSQFitter() + m = fit(m_init, x, y) + + # Plot the data and the best fit + plt.plot(x, y, 'o', color='k') + plt.plot(x, m(x), color='r', lw=2) + +.. note:: + + Currently this shortcut for model definition only works for 1-D models, but + it is being expanded to support 2 or greater dimension models. + + +A step by step definition of a 1-D Gaussian model +------------------------------------------------- + +The example described in `Custom 1-D models`_ can be used for most 1-D cases, +but the following section described how to construct model classes in general. +The details are explained below with a 1-D Gaussian model as an example. There +are two base classes for models. If the model is fittable, it should inherit +from `~astropy.modeling.FittableModel`; if not it should subclass +`~astropy.modeling.Model`. + +If the model takes parameters they should be specified as class attributes in +the model's class definition using the `~astropy.modeling.Parameter` +descriptor. All arguments to the Parameter constructor are optional, and may +include a default value for that parameter, a text description of the parameter +(useful for `help` and documentation generation), as well default constraints +and custom getters/setters for the parameter value. + +If the first argument ``name`` is specified it must be identical to the class +attribute being assigned that Parameter. As such, Parameters take their name +from this attribute by default. In other words, ``amplitude = +Parameter('amplitude')`` is equivalent to ``amplitude = Parameter()``. This +differs from Astropy v0.3.x, where it was necessary to provide the name twice. + +:: + + from astropy.modeling import FittableModel, Parameter, formt_input + + class Gaussian1DModel(FittableModel): + amplitude = Parameter() + mean = Parameter() + stddev = Parameter() + +At a minimum, the ``__init__`` method takes all parameters and a few keyword +arguments such as values for constraints:: + + def __init__(self, amplitude, mean, stddev, **kwargs): + # Note that this __init__ does nothing different from the base class's + # __init__. The main point of defining it is so that the function + # signature is more informative. + super(Gaussian1DModel, self).__init__( + amplitude=amplitude, mean=mean, stddev=stddev, **kwargs) + +.. note:: + + If a parameter is defined with a default value you may make the argument + for that parameter in the ``__init__`` optional. Otherwise it is + recommended to make it a required argument. In the above example none of + the parameters have default values. + +Fittable models can be linear or nonlinear in a regression sense. The default +value of the `~astropy.modeling.Model.linear` attribute is ``False``. Linear +models should define the ``linear`` class attribute as ``True``. +The `~astropy.modeling.Model.n_inputs` attribute stores the number of +input variables the model expects. The +`~astropy.modeling.Model.n_outputs` attribute stores the number of output +variables returned after evaluating the model. These two attributes are used +with composite models. + +Next, provide methods called ``eval`` to evaluate the model and ``fit_deriv``, +to compute its derivatives with respect to parameters. These may be normal +methods, `classmethod`, or `staticmethod`, though the convention is to use +`staticmethod` when the function does not depend on any of the object's other +attributes (i.e., it does not reference ``self``). The evaluation method takes +all input coordinates as separate arguments and all of the model's parameters +in the same order they would be listed by +`~astropy.modeling.Model.param_names`. + +For this example:: + + @staticmethod + def eval(x, amplitude, mean, stddev): + return amplitude * np.exp((-(1 / (2. * stddev**2)) * (x - mean)**2)) + +The ``fit_deriv`` method takes as input all coordinates as separate arguments. +There is an option to compute numerical derivatives for nonlinear models in +which case the ``fit_deriv`` method should be ``None``:: + + @staticmethod + def fit_deriv(x, amplitude, mean, stddev): + d_amplitude = np.exp((-(1 / (stddev**2)) * (x - mean)**2)) + d_mean = (2 * amplitude * + np.exp((-(1 / (stddev**2)) * (x - mean)**2)) * + (x - mean) / (stddev**2)) + d_stddev = (2 * amplitude * + np.exp((-(1 / (stddev**2)) * (x - mean)**2)) * + ((x - mean)**2) / (stddev**3)) + return [d_amplitude, d_mean, d_stddev] + + +Finally, the ``__call__`` method takes input coordinates as separate arguments. +It reformats them (if necessary) using the `~astropy.modeling.format_input` +wrapper/decorator and calls the eval method to perform the model evaluation +using the input variables and a special property called +`~astropy.modeling.Model.param_sets` which returns a list of all the parameter +values over all models in the set. + +The reason there is a separate eval method is to allow fitters to call the eval +method with different parameters which is necessary for updating the +approximation while fitting, and for fitting with constraints.:: + + @format_input + def __call__(self, x): + return self.eval(x, *self.param_sets) + + +Full example +^^^^^^^^^^^^ + +:: + + from astropy.modeling import FittableModel, Parameter, format_input + + class Gaussian1DModel(FittableModel): + amplitude = Parameter() + mean = Parameter() + stddev = Parameter() + + def __init__(self, amplitude, mean, stddev, **kwargs): + # Note that this __init__ does nothing different from the base class's + # __init__. The main point of defining it is so that the function + # signature is more informative. + super(Gaussian1DModel, self).__init__( + amplitude=amplitude, mean=mean, stddev=stddev, **kwargs) + + @staticmethod + def eval(x, amplitude, mean, stddev): + return amplitude * np.exp((-(1 / (2. * stddev**2)) * (x - mean)**2)) + + @staticmethod + def fit_deriv(x, amplitude, mean, stddev): + d_amplitude = np.exp((-(1 / (stddev**2)) * (x - mean)**2)) + d_mean = (2 * amplitude * + np.exp((-(1 / (stddev**2)) * (x - mean)**2)) * + (x - mean) / (stddev**2)) + d_stddev = (2 * amplitude * + np.exp((-(1 / (stddev**2)) * (x - mean)**2)) * + ((x - mean)**2) / (stddev**3)) + return [d_amplitude, d_mean, d_stddev] + + @format_input + def __call__(self, x): + return self.eval(x, *self.param_sets) + + +A full example of a LineModel +----------------------------- + +:: + + from astropy.modeling import models, Parameter, format_input + import numpy as np + + class LineModel(models.PolynomialModel): + slope = Parameter() + intercept = Parameter() + linear = True + + def __init__(self, slope, intercept, **kwargs): + super(LineModel, self).__init__(slope=slope, intercept=intercept, + **kwargs) + + @staticmethod + def eval(x, slope, intercept): + return slope * x + intercept + + @staticmethod + def fit_deriv(x, slope, intercept): + d_slope = x + d_intercept = np.ones_like(x) + return [d_slope, d_intercept] + + @format_input + def __call__(self, x): + return self.eval(x, *self.param_sets) + + +Defining New Fitter Classes +=========================== + +This section describes how to add a new nonlinear fitting algorithm to this +package or write a user-defined fitter. In short, one needs to define an error +function and a ``__call__`` method and define the types of constraints which +work with this fitter (if any). + +The details are described below using scipy's SLSQP algorithm as an example. +The base class for all fitters is `~astropy.modeling.fitting.Fitter`:: + + class SLSQPFitter(Fitter): + supported_constraints = ['bounds', 'eqcons', 'ineqcons', 'fixed', + 'tied'] + + def __init__(self): + # Most currently defined fitters take no arguments in their + # __init__, but the option certainly exists for custom fitters + super(SLSQPFitter, self).__init__() + +All fitters take a model (their ``__call__`` method modifies the model's +parameters) as their first argument. + +Next, the error function takes a list of parameters returned by an iteration of +the fitting algorithm and input coordinates, evaluates the model with them and +returns some type of a measure for the fit. In the example the sum of the +squared residuals is used as a measure of fitting.:: + + def objective_function(self, fps, *args): + model = args[0] + meas = args[-1] + model.fitparams(fps) + res = self.model(*args[1:-1]) - meas + return np.sum(res**2) + +The ``__call__`` method performs the fitting. As a minimum it takes all +coordinates as separate arguments. Additional arguments are passed as +necessary.:: + + def __call__(self, model, x, y , maxiter=MAXITER, epsilon=EPS): + if model.linear: + raise ModelLinearityException( + 'Model is linear in parameters; ' + 'non-linear fitting methods should not be used.') + model_copy = model.copy() + init_values, _ = _model_to_fit_params(model_copy) + self.fitparams = optimize.fmin_slsqp(self.errorfunc, p0=init_values, + args=(y, x), + bounds=self.bounds, + eqcons=self.eqcons, + ineqcons=self.ineqcons) + return model_copy + + +Using a Custom Statistic Function +================================= + +This section describes how to write a new fitter with a user-defined statistic +function. The example below shows a specialized class which fits a straight +line with uncertainties in both variables. + +The following import statements are needed.:: + + import numpy as np + from astropy.modeling.fitting import (_validate_model, + _fitter_to_model_params, + _model_to_fit_params, Fitter, + _convert_input) + from astropy.modeling.optimizers import Simplex + +First one needs to define a statistic. This can be a function or a callable +class.:: + + def chi_line(measured_vals, updated_model, x_sigma, y_sigma, x): + """ + Chi^2 statistic for fitting a straight line with uncertainties in x and + y. + + Parameters + ---------- + measured_vals : array + updated_model : `~astropy.modeling.ParametricModel` + model with parameters set by the current iteration of the optimizer + x_sigma : array + uncertainties in x + y_sigma : array + uncertainties in y + + """ + model_vals = updated_model(x) + if x_sigma is None and y_sigma is None: + return np.sum((model_vals - measured_vals) ** 2) + elif x_sigma is not None and y_sigma is not None: + weights = 1 / (y_sigma ** 2 + updated_model.parameters[1] ** 2 * + x_sigma ** 2) + return np.sum((weights * (model_vals - measured_vals)) ** 2) + else: + if x_sigma is not None: + weights = 1 / x_sigma ** 2 + else: + weights = 1 / y_sigma ** 2 + return np.sum((weights * (model_vals - measured_vals)) ** 2) + +In general, to define a new fitter, all one needs to do is provide a statistic +function and an optimizer. In this example we will let the optimizer be an +optional argument to the fitter and will set the statistic to ``chi_line`` +above.:: + + class LineFitter(Fitter): + """ + Fit a straight line with uncertainties in both variables + + Parameters + ---------- + optimizer : class or callable + one of the classes in optimizers.py (default: Simplex) + """ + + def __init__(self, optimizer=Simplex): + self.statistic = chi_line + super(LineFitter, self).__init__(optimizer, + statistic=self.statistic) + +The last thing to define is the ``__call__`` method.:: + + def __call__(self, model, x, y, x_sigma=None, y_sigma=None, **kwargs): + """ + Fit data to this model. + + Parameters + ---------- + model : `~astropy.modeling.core.ParametricModel` + model to fit to x, y + x : array + input coordinates + y : array + input coordinates + x_sigma : array + uncertainties in x + y_sigma : array + uncertainties in y + kwargs : dict + optional keyword arguments to be passed to the optimizer + + Returns + ------ + model_copy : `~astropy.modeling.core.ParametricModel` + a copy of the input model with parameters set by the fitter + + """ + model_copy = _validate_model(model, + self._opt_method.supported_constraints) + + farg = _convert_input(x, y) + farg = (model_copy, x_sigma, y_sigma) + farg + p0, _ = _model_to_fit_params(model_copy) + + fitparams, self.fit_info = self._opt_method( + self.objective_function, p0, farg, **kwargs) + _fitter_to_model_params(model_copy, fitparams) + + return model_copy diff --git a/docs/modeling/parameters.rst b/docs/modeling/parameters.rst new file mode 100644 index 0000000..1287756 --- /dev/null +++ b/docs/modeling/parameters.rst @@ -0,0 +1,170 @@ +.. include:: links.inc + +.. _modeling-parameters: + +********** +Parameters +********** + +Most models in this package are "parametric" in the sense that each subclass +of `~astropy.modeling.Model` represents an entire family of models, each +member of which is distinguished by a fixed set of parameters that fit that +model to some some dependent and independent variable(s) (also referred to +throughout the the package as the outputs and inputs of the model). + +Parameters are used in three different contexts within this package: Basic +evaluation of models, fitting models to data, and providing information about +individual models to users (including documentation). + +Most subclasses of `~astropy.modeling.Model`--specifically those implementing a +specific physical or statistical model, have a fixed set of parameters that can +be specified for instances of that model. There are a few classes of models +(in particular polynomials) in which the number of parameters depends on some +other property of the model (the degree in the case of polynomials). + +Models maintain a list of parameter names, +`~astropy.modeling.Model.param_names`. Single parameters are instances of +`~astropy.modeling.Parameter` which provide a proxy for the actual parameter +values. Simple mathematical operations can be performed with them, but they +also contain additional attributes specific to model parameters, such as any +constraints on their values and documentation. + +Parameter values may be scalars *or* array values. Some parameters are +required by their very nature to be arrays (such as the transformation matrix +for an `~astropy.modeling.projections.AffineTransformation2D`). In most other +cases, however, array-valued parameters have no meaning specific to the model, +and are simply combined with input arrays during model evaluation according to +the standard `Numpy broadcasting rules`_. + + +Parameter examples +------------------ + +- Model classes can be introspected directly to find out what parameters they + accept:: + + >>> from astropy.modeling import models + >>> models.Gaussian1D.param_names + ['amplitude', 'mean', 'stddev'] + + The order of the items in the ``param_names`` list is relevant--this + is the same order in which values for those parameters should be passed in + when constructing an instance of that model:: + + >>> g = models.Gaussian1D(1.0, 0.0, 0.1) + >>> g + + + However, parameters may also be given as keyword arguments (in any order):: + + >>> g = models.Gaussian1D(mean=0.0, amplitude=2.0, stddev=0.2) + >>> g + + + So all that really matters is knowing the names (and meanings) of the + parameters that each model accepts. More information about an individual + model can also be obtained using the `help` built-in:: + + >>> help(models.Gaussian1D) # doctest: +SKIP + +- Some types of models can have different numbers of parameters depending + on other properties of the model. In particular, the parameters of + polynomial models are their coefficients, the number of which depends on the + polynomial's degree:: + + >>> p1 = models.Polynomial1D(degree=3, c0=1.0, c1=0.0, c2=2.0, c3=3.0) + >>> p1.param_names + ['c0', 'c1', 'c2', 'c3'] + >>> p1 + + + For the basic `~astropy.modeling.polynomial.Polynomial1D` class the + parameters are named ``c0`` through ``cN`` where ``N`` is the degree of the + polynomial. The above example represents the polynomial :math:`3x^3 + 2x^2 + + 1`. + +- Some models also have default values for one or more of their parameters. + For polynomial models, for example, the default value of all coefficients is + zero--this allows a polynomial instance to be created without specifying any + of the coefficients initially:: + + >>> p2 = models.Polynomial1D(degree=4) + >>> p2 + + +- Parameters can the be set/updated by accessing attributes on the model of + the same names as the parameters:: + + >>> p2.c4 = 1 + >>> p2.c2 = 3.5 + >>> p2.c0 = 2.0 + >>> p2 + + + This example now represents the polynomial :math:`x^4 + 3.5x^2 + 2`. + +- It is possible to set the coefficients of a polynomial by passing the + parameters in a dictionary, since all parameters can be provided as keyword + arguments:: + + >>> ch2 = models.Chebyshev2D(x_degree=2, y_degree=3) + >>> coeffs = dict((name, [idx, idx + 10]) + ... for idx, name in enumerate(ch2.param_names)) + >>> ch2 = models.Chebyshev2D(x_degree=2, y_degree=3, n_models=2, + ... **coeffs) + >>> ch2.param_sets + array([[ 0., 10.], + [ 1., 11.], + [ 2., 12.], + [ 3., 13.], + [ 4., 14.], + [ 5., 15.], + [ 6., 16.], + [ 7., 17.], + [ 8., 18.], + [ 9., 19.], + [ 10., 20.], + [ 11., 21.]]) + +- Or directly, using keyword arguments:: + + >>> ch2 = models.Chebyshev2D(x_degree=2, y_degree=3, + ... c0_0=[0, 10], c0_1=[3, 13], + ... c0_2=[6, 16], c0_3=[9, 19], + ... c1_0=[1, 11], c1_1=[4, 14], + ... c1_2=[7, 17], c1_3=[10, 20,], + ... c2_0=[2, 12], c2_1=[5, 15], + ... c2_2=[8, 18], c2_3=[11, 21]) + +- Individual parameters values may be arrays of different sizes and shapes:: + + >>> p3 = models.Polynomial1D(degree=2, c0=1.0, c1=[2.0, 3.0], + ... c2=[[4.0, 5.0], [6.0, 7.0], [8.0, 9.0]]) + >>> p3(2.0) + array([[ 21., 27.], + [ 29., 35.], + [ 37., 43.]]) + + This is equivalent to evaluating the Numpy expression:: + + >>> import numpy as np + >>> c2 = np.array([[4.0, 5.0], + ... [6.0, 7.0], + ... [8.0, 9.0]]) + >>> c1 = np.array([2.0, 3.0]) + >>> c2 * 2.0**2 + c1 * 2.0 + 1.0 + array([[ 21., 27.], + [ 29., 35.], + [ 37., 43.]]) + + Note that in most cases, when using array-valued parameters, the parameters + must obey the standard broadcasting rules for Numpy arrays with respect to + each other:: + + >>> models.Polynomial1D(degree=2, c0=1.0, c1=[2.0, 3.0], + ... c2=[4.0, 5.0, 6.0]) + Traceback (most recent call last): + ... + InputParameterError: Parameter u'c1' of shape (2,) cannot be broadcast + with parameter u'c2' of shape (3,). All parameter arrays must have + shapes that are mutually compatible according to the broadcasting rules. diff --git a/docs/nddata/index.rst b/docs/nddata/index.rst new file mode 100644 index 0000000..c771144 --- /dev/null +++ b/docs/nddata/index.rst @@ -0,0 +1,71 @@ +.. _astropy_nddata: + +***************************************** +N-dimensional datasets (`astropy.nddata`) +***************************************** + +Introduction +============ + +`astropy.nddata` provides the `~astropy.nddata.NDData` +class and related tools to manage n-dimensional array-based data (e.g. +CCD images, IFU data, grid-based simulation data, ...). This is more than +just `numpy.ndarray` objects, because it provides metadata that cannot +be easily provided by a single array. + +.. note:: The `~astropy.nddata.NDData` class is still under + development, and support for WCS and units is not yet implemented. + +Getting started +=============== + +An `~astropy.nddata.NDData` object can be instantiated by passing it an +n-dimensional Numpy array:: + + >>> import numpy as np + >>> from astropy.nddata import NDData + >>> array = np.zeros((12, 12, 12)) # a 3-dimensional array with all zeros + >>> ndd = NDData(array) + +This object has a few attributes in common with Numpy: + + >>> ndd.ndim + 3 + >>> ndd.shape + (12, 12, 12) + >>> ndd.dtype + dtype('float64') + +The underlying Numpy array can be accessed via the ``data`` attribute:: + + >>> ndd.data + array([[[ 0., 0., 0., ... + ... + +Values can be masked using the ``mask`` attribute, which should be a boolean +Numpy array with the same dimensions as the data, e.g.:: + + >>> ndd.mask = ndd.data > 0.9 + +A mask value of `True` indicates a value that should be ignored, while a mask +value of `False` indicates a valid value. + +Similarly, attributes are available to store generic meta-data, flags, and +uncertainties, and the `~astropy.nddata.NDData` class includes methods to +combine datasets with arithmetic operations (which include uncertainties propagation). +These are described in :doc:`nddata`. + +Using ``nddata`` +================ + +.. toctree:: + :maxdepth: 2 + + nddata.rst + subclassing.rst + +Reference/API +============= + +.. automodapi:: astropy.nddata + :no-inheritance-diagram: diff --git a/docs/nddata/nddata.rst b/docs/nddata/nddata.rst new file mode 100644 index 0000000..6410f31 --- /dev/null +++ b/docs/nddata/nddata.rst @@ -0,0 +1,158 @@ +NDData overview +=============== + +Initializing +------------ + +An `~astropy.nddata.NDData` object can be instantiated by passing it an +n-dimensional Numpy array:: + + >>> import numpy as np + >>> from astropy.nddata import NDData + >>> array = np.zeros((12, 12, 12)) # a 3-dimensional array with all zeros + >>> ndd = NDData(array) + +Note that the data in ``ndd`` is a reference to the original ``array``, so +changing the data in ``ndd`` will change the corresponding data in ``array`` +in most circumstances. + +An `~astropy.nddata.NDData` object can also be instantiated by passing it an +`~astropy.nddata.NDData` object: + + >>> ndd1 = NDData(array) + >>> ndd2 = NDData(ndd1) + +As above, the data in``ndd2`` is a reference to the data in ``ndd1``, so +changes to one will affect the other. + +This object has a few attributes in common with Numpy: + + >>> ndd.ndim + 3 + >>> ndd.shape + (12, 12, 12) + >>> ndd.dtype + dtype('float64') + +The underlying Numpy array can be accessed via the ``data`` attribute:: + + >>> ndd.data + array([[[ 0., 0., 0., ... + +Mask +---- + +Values can be masked using the ``mask`` attribute, which should be a boolean +Numpy array with the same dimensions as the data, e.g.:: + + >>> ndd.mask = ndd.data > 0.9 + +A mask value of `True` indicates a value that should be ignored, while a mask +value of `False` indicates a valid value. + +Flags +----- + +Values can be assigned one or more flags. The ``flags`` attribute is used to +store either a single Numpy array (of any type) with dimensions matching that +of the data, or a `~astropy.nddata.FlagCollection`, which is +essentially a dictionary of Numpy arrays (of any type) with the same shape as +the data. The following example demonstrates setting a single set of integer +flags:: + + >>> ndd.flags = np.zeros(ndd.shape) + >>> ndd.flags[ndd.data < 0.1] = 1 + >>> ndd.flags[ndd.data < 0.01] = 2 + +but one can also have multiple flag layers with different types:: + + >>> from astropy.nddata import FlagCollection + >>> ndd.flags = FlagCollection(shape=(12, 12, 12)) + >>> ndd.flags['photometry'] = np.zeros(ndd.shape, dtype=str) + >>> ndd.flags['photometry'][ndd.data > 0.9] = 's' + >>> ndd.flags['cosmic_rays'] = np.zeros(ndd.shape, dtype=int) + >>> ndd.flags['cosmic_rays'][ndd.data > 0.99] = 99 + +and flags can easily be used to set the mask:: + + >>> ndd.mask = ndd.flags['cosmic_rays'] == 99 + +Uncertainties +------------- + +`~astropy.nddata.NDData` objects have an ``uncertainty`` attribute that can be +used to set the uncertainty on the data values. This is done by using classes +to represent the uncertainties of a given type. For example, to set standard +deviation uncertainties on the pixel values, you can do:: + + >>> from astropy.nddata import StdDevUncertainty + >>> ndd.uncertainty = StdDevUncertainty(np.ones((12, 12, 12)) * 0.1) + +.. note:: For information on creating your own uncertainty classes, + see :doc:`subclassing`. + +Arithmetic +---------- + +Provided that the world coordinate system (WCS) and shape match, and that the +units are consisten, two :class:`~astropy.nddata.NDData` instances can be +added, subtracted, multiplied or divided from each other, with uncertainty +propagation, creating a new :class:`~astropy.nddata.NDData` object:: + + ndd3 = ndd1.add(ndd2) + ndd4 = ndd1.subtract(ndd2) + ndd5 = ndd1.multiply(ndd2) + ndd6 = ndd1.divide(ndd2) + +The purpose of the :meth:`~astropy.nddata.nddata.NDData.add`, +:meth:`~astropy.nddata.nddata.NDData.subtract`, +:meth:`~astropy.nddata.nddata.NDData.multiply` and +:meth:`~astropy.nddata.nddata.NDData.divide` methods is to allow the +combination of two data objects that have common WCS and shape and units +consistent with the operation performed, with consistent behavior for masks, +and with a framework to propagate uncertainties. Currently any flags on the +operands are dropped so that the result of the operation always has no flags. +These methods are intended for use by sub-classes and functions that deal with +more complex combinations. + +Entries that are masked in either of the operands are also masked in the +result. + +.. warning:: Uncertainty propagation is still experimental, and does not take + into account correlated uncertainties. + +Meta-data +--------- + +The :class:`~astropy.nddata.NDData` class includes a ``meta`` attribute +that defaults to an empty dictionary, and can be used to set overall meta-data +for the dataset:: + + ndd.meta['exposure_time'] = 340. + ndd.meta['filter'] = 'J' + +Elements of the meta-data dictionary can be set to any valid Python object:: + + ndd.meta['history'] = ['calibrated', 'aligned', 'flat-fielded'] + +Converting to Numpy arrays +-------------------------- + +`~astropy.nddata.NDData` objects can also be easily converted to +numpy arrays:: + + >>> import numpy as np + >>> arr = np.array(ndd) + >>> np.all(arr == mydataarray) # doctest: +SKIP + True + +If a ``mask`` is defined, this will result in a `~numpy.ma.MaskedArray`, so +in all cases a useable `numpy.ndarray` or subclass will result. This allows +straightforward plotting of `~astropy.nddata.NDData` objects with 1- +and 2-dimensional datasets using Matplotlib:: + + >>> from matplotlib import pyplot as plt # doctest: +SKIP + >>> plt.plot(ndd) # doctest: +SKIP + +This works because the Matplotlib plotting functions automatically convert +their inputs using `numpy.array`. diff --git a/docs/nddata/subclassing.rst b/docs/nddata/subclassing.rst new file mode 100644 index 0000000..2360aa2 --- /dev/null +++ b/docs/nddata/subclassing.rst @@ -0,0 +1,58 @@ +Subclassing `~astropy.nddata.NDData` and `~astropy.nddata.NDUncertainty` +======================================================================================= + +Subclassing `~astropy.nddata.NDUncertainty` +--------------------------------------------------- + +New error classes should sub-class from `~astropy.nddata.NDUncertainty`, and +should provide methods with the following API:: + + class MyUncertainty(NDUncertainty): + + def propagate_add(self, other_nddata, result_data): + ... + result_uncertainty = MyUncertainty(...) + return result_uncertainty + + def propagate_subtract(self, other_nddata, result_data): + ... + result_uncertainty = MyUncertainty(...) + return result_uncertainty + + def propagate_multiply(self, other_nddata, result_data): + ... + result_uncertainty = MyUncertainty(...) + return result_uncertainty + + def propagate_divide(self, other_nddata, result_data): + ... + result_uncertainty = MyUncertainty(...) + return result_uncertainty + +All error sub-classes inherit an attribute ``self.parent_nddata`` that is +automatically set to the parent `~astropy.nddata.NDData` object that they +are attached to. The arguments passed to the error propagation methods are +``other_nddata``, which is the `~astropy.nddata.NDData` object that is being +combined with ``self.parent_nddata``, and ``result_data``, which is a Numpy +array that contains the data array after the arithmetic operation. All these +methods should return an error instance ``result_uncertainty``, and should not +modify ``parent_nddata`` directly. For subtraction and division, the order of +the operations is ``parent_nddata - other_nddata`` and ``parent_nddata / +other_nddata``. + +To make it easier and clearer to code up the error propagation, you can use +variables with more explicit names, e.g:: + + class MyUncertainty(NDUncertainty): + + def propogate_add(self, other_nddata, result_data): + + left_uncertainty = self.parent.uncertainty.array + right_uncertainty = other_nddata.uncertainty.array + + ... + +Note that the above example assumes that the errors are stored in an ``array`` +attribute, but this does not have to be the case. + +For an example of a complete implementation, see `~astropy.nddata.StdDevUncertainty`. diff --git a/docs/nitpick-exceptions b/docs/nitpick-exceptions new file mode 100644 index 0000000..fb300c5 --- /dev/null +++ b/docs/nitpick-exceptions @@ -0,0 +1,55 @@ +# astropy.cosmology +py:class astropy.cosmology.Cosmology +py:class astropy.cosmology.core.Cosmology + +# astropy.io.votable +py:class astropy.io.votable.tree.Element +py:class astropy.io.votable.tree.SimpleElement +py:class astropy.io.votable.tree.SimpleElementWithContent + +# astropy.modeling +py:class astropy.modeling.projections.Zenithal +py:class astropy.modeling.projections.Cylindrical +py:class astropy.modeling.polynomial.PolynomialBase +py:class astropy.modeling.rotations.EulerAngleRotation +py:class astropy.modeling.projections.Projection + +# astropy.io.fits +py:class astropy.io.fits.hdu.base.ExtensionHDU + +# astropy.utils +py:class astropy.extern.six.Iterator +py:class type +py:class json.encoder.JSONEncoder + +# astropy.table +py:class astropy.table.column.BaseColumn +py:class astropy.table.groups.BaseGroups + +# astropy.time +py:class astropy.time.core.TimeUnique + +# numpy inherited docstrings +py:obj dtype +py:obj a +py:obj a.size == 1 +py:obj n +py:obj ndarray +py:obj args + +# other classes that cannot be linked to +py:class numpy.ma.core.MaskedArray +py:class numpy.core.records.recarray +py:class xmlrpclib.Fault +py:class xmlrpclib.Error + +# Pending on python docs links issue #11975 +py:class list +py:obj list.append +py:obj list.append +py:obj list.count +py:obj list.extend +py:obj list.index +py:obj list.insert +py:meth list.pop +py:obj list.remove \ No newline at end of file diff --git a/docs/overview.rst b/docs/overview.rst new file mode 100644 index 0000000..33860e1 --- /dev/null +++ b/docs/overview.rst @@ -0,0 +1,80 @@ +******** +Overview +******** + +Here we describe a broad overview of the Astropy project and its parts. + +Astropy Project Concept +======================= + +The "Astropy Project" is distinct from the ``astropy`` package. The +Astropy Project is a process intended to facilitate communication and +interoperability of python packages/codes in astronomy and astrophysics. +The project thus encompasses the ``astropy`` core package (which provides +a common framework), all "affiliated packages" (described below in +`Affiliated Packages`_), and a general community aimed at bringing +resources together and not duplicating efforts. + + +``astropy`` Core Package +======================== + +The ``astropy`` package (alternatively known as the "core" package) +contains various classes, utilities, and a packaging framework intended +to provide commonly-used astronomy tools. It is divided into a variety +of sub-packages, which are documented in the remainder of this +documentation (see :ref:`user-docs` for documentation of these +components). + +The core also provides this documentation, and a variety of utilities +that simplify starting other python astronomy/astrophysics packages. As +described in the following section, these simplify the process of +creating affiliated packages. + + +Affiliated Packages +=================== + +The Astropy project includes the concept of "affiliated packages." An +affiliated package is an astronomy-related python package that is not +part of the ``astropy`` core source code, but has requested to be included +in the general community effort of the Astropy project. Such a package +may be a candidate for eventual inclusion in the main ``astropy`` package +(although this is not required). Until then, however, it is a separate +package, and may not be in the ``astropy`` namespace. + +The authoritative list of current affiliated packages is available at +http://affiliated.astropy.org, including a machine-readable `JSON file +`_. + +If you are interested in starting an affiliated package, or have a +package you are interested in making more compatible with astropy, the +``astropy`` core package includes features that simplify and homogenize +package management. Astropy provides a `package template +`_ that provides a common +way to organize a package, to make your life simpler. You can use this +template either with a new package you are starting or an existing +package to give it most of the organizational tools Astropy provides, +including the documentation, testing, and Cython-building tools. See +the `usage instructions in the template `_ for further details. + +To then get your package listed on the registry, take a look at the +`guidelines for becoming an affiliated package +`_, and then post +your intent on the `astropy-dev mailing list`_. The Astropy +coordination committee, in consultation with the community, will provide +you feedback on the package, and will add it to the registry when it is +approved. + + +Community +========= + +Aside from the actual code, Astropy is also a community of astronomy- +associated users and developers that agree that sharing utilities is +healthy for the community and the science it produces. This community is +of course central to accomplishing anything with the code itself. We +follow the `Python Software Foundation Code of Conduct +`_ and welcome anyone who +wishes to contribute to the project. diff --git a/docs/rtd-pip-requirements b/docs/rtd-pip-requirements new file mode 100644 index 0000000..bfd058a --- /dev/null +++ b/docs/rtd-pip-requirements @@ -0,0 +1,4 @@ +-e git+http://github.com/astropy/astropy-helpers.git#egg=astropy_helpers +numpy>=1.5.0 +matplotlib +Cython diff --git a/docs/stability.rst b/docs/stability.rst new file mode 100644 index 0000000..7909132 --- /dev/null +++ b/docs/stability.rst @@ -0,0 +1,266 @@ +****************************** +Current status of sub-packages +****************************** + +Astropy has benefited from the addition of widely tested legacy code, as well +as new development, resulting in variations in stability across +sub-packages. This document summarizes the current status of the Astropy +sub-packages, so that users understand where they might expect changes in +future, and which sub-packages they can safely use for production code. + +Note that until version 1.0, even sub-packages considered *Mature* could +undergo some user interface changes as we work to integrate the packages +better. Thus, we cannot guarantee complete backward-compatibility between +versions at this stage. + +.. |planned| image:: _static/planned.png + +.. |dev| image:: _static/dev.png + +.. |stable| image:: _static/stable.png + +.. |mature| image:: _static/mature.png + +The classification is as follows: + +.. raw:: html + +
+ + + + + + + + + + + + + + + + +
Planned
Actively developed, be prepared for possible significant changes
Reasonably stable, no major changes likely
Mature
+ +The current planned and existing sub-packages are: + +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Sub-Package + +   + + Comments +
+ astropy.constants + + dev + + Constants have been changed to Quantity objects in v0.2. +
+ astropy.convolution + + dev + + New top-level package in v0.3 (was previously part of + astropy.nddata). + No major changes in v0.4. +
+ astropy.coordinates + + dev + + New in v0.2, major changes in v0.4. Subsequent versions should + maintain a stable/backwards-compatible API. +
+ astropy.cosmology + + stable + + Incremental improvements since v0.1, but mostly stable API. + Pure functional interface deprecated in v0.4. +
+ astropy.io.ascii + + mature + + Originally developed as asciitable, and has maintained a stable API. +
+ astropy.io.fits + + mature + + Originally developed as pyfits, and retains an API consistent with the standalone version. +
+ astropy.io.misc + + mature + + The functionality that is currently present is stable, but this sub-package will likely see major additions in future. +
+ astropy.io.votable + + mature + + Originally developed as vo.table, and has a stable API. +
+ astropy.modeling + + dev + + New in v0.3 +
+ astropy.nddata + + dev + + In development, and does not yet contain much functionality apart from a base class for N-dimensional datasets. +
+ astropy.photometry + + planned + +   +
+ astropy.stats + + dev + + Still in development, and does not yet contain much functionality. +
+ astropy.table + + stable + + Incremental improvements since v0.1, but mostly stable API. +
+ astropy.time + + stable + + Incremental improvements since v0.1, but mostly stable API. +
+ astropy.units + + stable + + New in v0.2. Adapted from pnbody and integrated into Astropy. +
+ astropy.utils + + dev + + This sub-package contains mostly utilities destined for use in other parts of Astropy, and is not yet stable. +
+ astropy.vo + + dev + + Virtual Observatory service access and validation. Currently, only Simple Cone Search and SAMP are supported. +
+ astropy.wcs + + stable + + Originally developed as pywcs, and has a stable API for now. However, there are plans to generalize the WCS interface to accommodate non-FITS WCS transformations, and this may lead to small changes in the user interface. +
+ diff --git a/docs/stats/index.rst b/docs/stats/index.rst new file mode 100644 index 0000000..c561b88 --- /dev/null +++ b/docs/stats/index.rst @@ -0,0 +1,32 @@ +.. _stats: + +*************************************** +Astrostatistics Tools (`astropy.stats`) +*************************************** + +Introduction +============ + +The `astropy.stats` package holds statistical functions or algorithms used +in astronomy and astropy. + +Getting Started +=============== + +The current tools are fairly self-contained, and include relevant examples in +their docstrings. + + +See Also +======== + +* :mod:`scipy.stats` + This scipy package contains a variety of useful statistical functions and + classes. The functionality in `astropy.stats` is intended to supplement + this, *not* replace it. + + +Reference/API +============= + +.. automodapi:: astropy.stats diff --git a/docs/table/access_table.rst b/docs/table/access_table.rst new file mode 100644 index 0000000..4051739 --- /dev/null +++ b/docs/table/access_table.rst @@ -0,0 +1,415 @@ +.. _access_table: + +.. include:: references.txt + +Accessing a table +----------------- + +Accessing the table properties and data is straightforward and is generally consistent with +the basic interface for `numpy` structured arrays. + +Quick overview +^^^^^^^^^^^^^^ + +For the impatient, the code below shows the basics of accessing table data. +Where relevant there is a comment about what sort of object. Except where +noted, the table access returns objects that can be modified in order to +update table data or properties. +In cases where is returned and how +the data contained in that object relate to the original table data +(i.e. whether it is a copy or reference, see :ref:`copy_versus_reference`). + +**Make table** +:: + + from astropy.table import Table + import numpy as np + + arr = np.arange(15).reshape(5, 3) + t = Table(arr, names=('a', 'b', 'c'), meta={'keywords': {'key1': 'val1'}}) + +**Table properties** +:: + + t.columns # Dict of table columns + t.colnames # List of column names + t.meta # Dict of meta-data + len(t) # Number of table rows + +**Access table data** +:: + + t['a'] # Column 'a' + t['a'][1] # Row 1 of column 'a' + t[1] # Row obj for with row 1 values + t[1]['a'] # Column 'a' of row 1 + t[2:5] # Table object with rows 2:5 + t[[1, 3, 4]] # Table object with rows 1, 3, 4 (copy) + t[np.array([1, 3, 4])] # Table object with rows 1, 3, 4 (copy) + t['a', 'c'] # Table with cols 'a', 'c' (copy) + dat = np.array(t) # Copy table data to numpy structured array object + +**Print table or column** +:: + + print t # Print formatted version of table to the screen + t.pprint() # Same as above + t.pprint(show_unit=True) # Show column unit + t.pprint(show_name=False) # Do not show column names + t.pprint(max_lines=-1, max_width=-1) # Print full table no matter how long / wide it is + + t.more() # Interactively scroll through table like Unix "more" + + print t['a'] # Formatted column values + t['a'].pprint() # Same as above, with same options as Table.pprint() + t['a'].more() # Interactively scroll through column + + lines = t.pformat() # Formatted table as a list of lines (same options as pprint) + lines = t['a'].pformat() # Formatted column values as a list + + +Details +^^^^^^^ + +For all the following examples it is assumed that the table has been created as below:: + + >>> from astropy.table import Table, Column + >>> import numpy as np + + >>> arr = np.arange(15).reshape(5, 3) + >>> t = Table(arr, names=('a', 'b', 'c'), meta={'keywords': {'key1': 'val1'}}) + >>> t['a'].format = "%6.3f" # print as a float with 3 digits after decimal point + >>> t['a'].unit = 'm sec^-1' + >>> t['a'].description = 'unladen swallow velocity' + >>> print t + a b c + m sec^-1 + -------- --- --- + 0.000 1 2 + 3.000 4 5 + 6.000 7 8 + 9.000 10 11 + 12.000 13 14 + +Accessing properties +"""""""""""""""""""" + +The code below shows accessing the table columns as a |TableColumns| object, +getting the column names, table meta-data, and number of table rows. The table +meta-data is simply an ordered dictionary (OrderedDict_) by default. +:: + + >>> t.columns + + + >>> t.colnames + ['a', 'b', 'c'] + + >>> t.meta # Dict of meta-data + {'keywords': {'key1': 'val1'}} + + >>> len(t) + 5 + + +Accessing data +"""""""""""""" + +As expected one can access a table column by name and get an element from that +column with a numerical index:: + + >>> t['a'] # Column 'a' + + array([ 0, 3, 6, 9, 12]) + + >>> t['a'][1] # Row 1 of column 'a' + 3 + +When a table column is printed, either with ``print`` or via the ``str()`` +built-in function, it is formatted according to the ``format`` attribute (see +:ref:`table_format_string`):: + + >>> print(t['a']) + a + ------ + 0.000 + 3.000 + 6.000 + 9.000 + 12.000 + +Likewise a table row and a column from that row can be selected:: + + >>> t[1] # Row object corresponding to row 1 + + + >>> t[1]['a'] # Column 'a' of row 1 + 3 + +A |Row| object has the same columns and meta-data as its parent table:: + + >>> t[1].columns + + + >>> t[1].colnames + ['a', 'b', 'c'] + +Slicing a table returns a new table object which references to the original +data within the slice region (See :ref:`copy_versus_reference`). The table +meta-data and column definitions are copied. +:: + + >>> t[2:5] # Table object with rows 2:5 (reference) + + array([(6, 7, 8), (9, 10, 11), (12, 13, 14)], + dtype=[('a', '>> print t[[1, 3, 4]] # Table object with rows 1, 3, 4 (copy) + a b c + m sec^-1 + -------- --- --- + 3.000 4 5 + 9.000 10 11 + 12.000 13 14 + + + >>> print t[np.array([1, 3, 4])] # Table object with rows 1, 3, 4 (copy) + a b c + m sec^-1 + -------- --- --- + 3.000 4 5 + 9.000 10 11 + 12.000 13 14 + + + >>> print t['a', 'c'] # or t[['a', 'c']] or t[('a', 'c')] + ... # Table with cols 'a', 'c' (copy) + a c + m sec^-1 + -------- --- + 0.000 2 + 3.000 5 + 6.000 8 + 9.000 11 + 12.000 14 + +Finally, one can access the underlying table data as a native `numpy` +structured array by creating a copy or reference with ``np.array``:: + + >>> data = np.array(t) # copy of data in t as a structured array + >>> data = np.array(t, copy=False) # reference to data in t + + +Formatted printing +"""""""""""""""""" + +The values in a table or column can be printed or retrieved as a formatted +table using one of several methods: + +- `print` statement (Python 2) or `print()` function (Python 3). +- Table :meth:`~astropy.table.Table.more` or Column + :meth:`~astropy.table.Column.more` methods to interactively scroll + through table values. +- Table :meth:`~astropy.table.Table.pprint` or Column + :func:`~astropy.table.Column.pprint` methods to print a formatted version of + the table to the screen. +- Table :meth:`~astropy.table.Table.pformat` or Column + :func:`~astropy.table.Column.pformat` methods to return the formatted table + or column as a list of fixed-width strings. This could be used as a quick + way to save a table. + +These methods use :ref:`table_format_string` +if available and strive to make the output readable. +By default, table and column printing will +not print the table larger than the available interactive screen size. If the +screen size cannot be determined (in a non-interactive environment or on +Windows) then a default size of 25 rows by 80 columns is used. If a table is +too large then rows and/or columns are cut from the middle so it fits. For example:: + + >>> arr = np.arange(3000).reshape(100, 30) # 100 rows x 30 columns array + >>> t = Table(arr) + >>> print t + col0 col1 col2 col3 col4 col5 col6 ... col24 col25 col26 col27 col28 col29 + ---- ---- ---- ---- ---- ---- ---- ... ----- ----- ----- ----- ----- ----- + 0 1 2 3 4 5 6 ... 24 25 26 27 28 29 + 30 31 32 33 34 35 36 ... 54 55 56 57 58 59 + 60 61 62 63 64 65 66 ... 84 85 86 87 88 89 + 90 91 92 93 94 95 96 ... 114 115 116 117 118 119 + 120 121 122 123 124 125 126 ... 144 145 146 147 148 149 + 150 151 152 153 154 155 156 ... 174 175 176 177 178 179 + 180 181 182 183 184 185 186 ... 204 205 206 207 208 209 + 210 211 212 213 214 215 216 ... 234 235 236 237 238 239 + 240 241 242 243 244 245 246 ... 264 265 266 267 268 269 + ... ... ... ... ... ... ... ... ... ... ... ... ... ... + 2760 2761 2762 2763 2764 2765 2766 ... 2784 2785 2786 2787 2788 2789 + 2790 2791 2792 2793 2794 2795 2796 ... 2814 2815 2816 2817 2818 2819 + 2820 2821 2822 2823 2824 2825 2826 ... 2844 2845 2846 2847 2848 2849 + 2850 2851 2852 2853 2854 2855 2856 ... 2874 2875 2876 2877 2878 2879 + 2880 2881 2882 2883 2884 2885 2886 ... 2904 2905 2906 2907 2908 2909 + 2910 2911 2912 2913 2914 2915 2916 ... 2934 2935 2936 2937 2938 2939 + 2940 2941 2942 2943 2944 2945 2946 ... 2964 2965 2966 2967 2968 2969 + 2970 2971 2972 2973 2974 2975 2976 ... 2994 2995 2996 2997 2998 2999 + +more() method +''''''''''''' + +In order to browse all rows of a table or column use the Table +:meth:`~astropy.table.Table.more` or Column :func:`~astropy.table.Column.more` +methods. These let you interactively scroll through the rows much like the +linux ``more`` command. Once part of the table or column is displayed the +supported navigation keys are: + +| **f, space** : forward one page +| **b** : back one page +| **r** : refresh same page +| **n** : next row +| **p** : previous row +| **<** : go to beginning +| **>** : go to end +| **q** : quit browsing +| **h** : print this help + +pprint() method +''''''''''''''' + +In order to fully control the print output use the Table +:meth:`~astropy.table.Table.pprint` or Column +:func:`~astropy.table.Column.pprint` methods. These have keyword +arguments ``max_lines``, ``max_width``, ``show_name``, ``show_unit`` with +meaning as shown below:: + + >>> arr = np.arange(3000, dtype=float).reshape(100, 30) + >>> t = Table(arr) + >>> t['col0'].format = '%e' + >>> t['col1'].format = '%.6f' + >>> t['col0'].unit = 'km**2' + >>> t['col29'].unit = 'kg sec m**-2' + + >>> t.pprint(max_lines=8, max_width=40) + col0 ... col29 + km2 ... kg sec m**-2 + ------------ ... ------------ + 0.000000e+00 ... 29.0 + 3.000000e+01 ... 59.0 + ... ... ... + 2.940000e+03 ... 2969.0 + 2.970000e+03 ... 2999.0 + + + >>> t.pprint(max_lines=8, max_width=40, show_unit=True) + col0 ... col29 + km2 ... kg sec m**-2 + ------------ ... ------------ + 0.000000e+00 ... 29.0 + 3.000000e+01 ... 59.0 + ... ... ... + 2.940000e+03 ... 2969.0 + 2.970000e+03 ... 2999.0 + + >>> t.pprint(max_lines=8, max_width=40, show_name=False) + km2 ... kg sec m**-2 + ------------ ... ------------ + 0.000000e+00 ... 29.0 + 3.000000e+01 ... 59.0 + 6.000000e+01 ... 89.0 + ... ... ... + 2.940000e+03 ... 2969.0 + 2.970000e+03 ... 2999.0 + +In order to force printing all values regardless of the output length or width +set ``max_lines`` or ``max_width`` to ``-1``, respectively. For the wide +table in this example one sees 6 lines of wrapped output like the following:: + + >>> t.pprint(max_lines=6, max_width=-1) # doctest: +SKIP + col0 col1 col2 col3 col4 col5 col6 col7 col8 col + 9 col10 col11 col12 col13 col14 col15 col16 col17 col18 col19 col20 + col21 col22 col23 col24 col25 col26 col27 col28 col29 + ------------ ----------- ------ ------ ------ ------ ------ ------ ------ ---- + -- ------ ------ ------ ------ ------ ------ ------ ------ ------ ------ ----- + - ------ ------ ------ ------ ------ ------ ------ ------ ------ + 0.000000e+00 1.000000 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9 + .0 10.0 11.0 12.0 13.0 14.0 15.0 16.0 17.0 18.0 19.0 20. + 0 21.0 22.0 23.0 24.0 25.0 26.0 27.0 28.0 29.0 + 3.000000e+01 31.000000 32.0 33.0 34.0 35.0 36.0 37.0 38.0 39 + .0 40.0 41.0 42.0 43.0 44.0 45.0 46.0 47.0 48.0 49.0 50. + 0 51.0 52.0 53.0 54.0 55.0 56.0 57.0 58.0 59.0 + ... ... ... ... ... ... ... ... ... . + .. ... ... ... ... ... ... ... ... ... ... .. + . ... ... ... ... ... ... ... ... ... + 2.970000e+03 2971.000000 2972.0 2973.0 2974.0 2975.0 2976.0 2977.0 2978.0 2979 + .0 2980.0 2981.0 2982.0 2983.0 2984.0 2985.0 2986.0 2987.0 2988.0 2989.0 2990. + 0 2991.0 2992.0 2993.0 2994.0 2995.0 2996.0 2997.0 2998.0 2999.0 + +For columns the syntax and behavior of +:func:`~astropy.table.Column.pprint` is the same except that there is no +``max_width`` keyword argument:: + + >>> t['col3'].pprint(max_lines=8) + col3 + ------ + 3.0 + 33.0 + 63.0 + ... + 2943.0 + 2973.0 + +pformat() method +'''''''''''''''' + +In order to get the formatted output for manipulation or writing to a file use +the Table :meth:`~astropy.table.Table.pformat` or Column +:func:`~astropy.table.Column.pformat` methods. These behave just as for +:meth:`~astropy.table.Table.pprint` but return a list corresponding to each formatted line in the +:meth:`~astropy.table.Table.pprint` output. + + >>> lines = t['col3'].pformat(max_lines=8) + >>> lines + [' col3 ', '------', ' 3.0', ' 33.0', ' 63.0', ' ...', '2943.0', '2973.0'] + +Multidimensional columns +'''''''''''''''''''''''' + +If a column has more than one dimension then each element of the column is +itself an array. In the example below there are 3 rows, each of which is a +``2 x 2`` array. The formatted output for such a column shows only the first +and last value of each row element and indicates the array dimensions in the +column name header:: + + >>> from astropy.table import Table, Column + >>> import numpy as np + >>> t = Table() + >>> arr = [ np.array([[ 1, 2], + ... [10, 20]]), + ... np.array([[ 3, 4], + ... [30, 40]]), + ... np.array([[ 5, 6], + ... [50, 60]]) ] + >>> t['a'] = arr + >>> t['a'].shape + (3, 2, 2) + >>> t.pprint() + a [2,2] + ------- + 1 .. 20 + 3 .. 40 + 5 .. 60 + +In order to see all the data values for a multidimensional column use the +column representation. This uses the standard `numpy` mechanism for printing +any array:: + + >>> t['a'] + + array([[[ 1, 2], + [10, 20]], + [[ 3, 4], + [30, 40]], + [[ 5, 6], + [50, 60]]]) diff --git a/docs/table/construct_table.rst b/docs/table/construct_table.rst new file mode 100644 index 0000000..a6879b9 --- /dev/null +++ b/docs/table/construct_table.rst @@ -0,0 +1,844 @@ +.. include:: references.txt + +.. _construct_table: + +Constructing a table +-------------------- + +There is great deal of flexibility in the way that a table can be initially +constructed. Details on the inputs to the |Table| +constructor are in the `Initialization Details`_ section. However, the +easiest way to understand how to make a table is by example. + +Examples +^^^^^^^^ + +Much of the flexibility lies in the types of data structures +which can be used to initialize the table data. The examples below show how to +create a table from scratch with no initial data, create a table with a list of +columns, a dictionary of columns, or from `numpy` arrays (either structured or +homogeneous). + +Setup +""""" +For the following examples you need to import the |Table| and |Column| classes +along with the `numpy` package:: + + >>> from astropy.table import Table, Column + >>> import numpy as np + +Creating from scratch +""""""""""""""""""""" +A Table can be created without any initial input data or even without any +initial columns. This is useful for building tables dynamically if the initial +size, columns, or data are not known. + +.. Note:: + Adding columns or rows requires making a new copy of the entire + table table each time, so in the case of large tables this may be slow. + +:: + + >>> t = Table() + >>> t['a'] = [1, 4] + >>> t['b'] = Column([2.0, 5.0], unit='cm', description='Velocity') + >>> t['c'] = ['x', 'y'] + + >>> t = Table(names=('a', 'b', 'c'), dtype=('f4', 'i4', 'S2')) + >>> t.add_row((1, 2.0, 'x')) + >>> t.add_row((4, 5.0, 'y')) + + +List of columns +""""""""""""""" +A typical case is where you have a number of data columns with the same length +defined in different variables. These might be Python lists or `numpy` arrays +or a mix of the two. These can be used to create a |Table| by putting the column +data variables into a Python list. In this case the column names are not +defined by the input data, so they must either be set using the ``names`` +keyword or they will be auto-generated as ``col``. + +:: + + >>> a = [1, 4] + >>> b = [2.0, 5.0] + >>> c = ['x', 'y'] + >>> t = Table([a, b, c], names=('a', 'b', 'c')) + >>> t +
+ array([(1, 2.0, 'x'), (4, 5.0, 'y')], + dtype=[('a', '>> Table([t['c'], t['a']]) +
+ array([('x', 1), ('y', 4)], + dtype=[('c', 'S1'), ('a', '>> Table([t['a']**2, t['b'] + 10]) +
+ array([(1, 12.0), (16, 15.0)], + dtype=[('a', '>> a = (1, 4) + >>> b = np.array([[2, 3], [5, 6]]) # vector column + >>> c = Column(['x', 'y'], name='axis') + >>> arr = (a, b, c) + >>> Table(arr) # Data column named "c" has a name "axis" that table +
+ array([(1, [2, 3], 'x'), (4, [5, 6], 'y')], + dtype=[('col0', '>> arr = {'a': [1, 4], + ... 'b': [2.0, 5.0], + ... 'c': ['x', 'y']} + >>> + >>> Table(arr) # doctest: +SKIP +
+ array([(1, 'x', 2.0), (4, 'y', 5.0)], + dtype=[('a', '>> Table(arr, names=('a', 'b', 'c'), dtype=('f4', 'i4', 'S2')) +
+ array([(1.0, 2, 'x'), (4.0, 5, 'y')], + dtype=[('a', '>> arr = {'a': (1, 4), + ... 'b': np.array([[2, 3], [5, 6]]), + ... 'c': Column(['x', 'y'], name='axis')} + >>> Table(arr, names=('a', 'b', 'c')) +
+ array([(1, [2, 3], 'x'), (4, [5, 6], 'y')], + dtype=[('a', '>> Table(arr, names=('a_new', 'b_new', 'c_new')) + Traceback (most recent call last): + ... + KeyError: 'a_new' + + +Row data +""""""""" +Row-oriented data can be used to create a table using the ``rows`` +keyword argument. + +**List of data records as list or tuple** + +If you have row-oriented input data such as a list of records, you +need to use the ``rows`` keyword to create a table:: + + >>> data_rows = [(1, 2.0, 'x'), + ... (4, 5.0, 'y'), + ... (5, 8.2, 'z')] + >>> t = Table(rows=data_rows, names=('a', 'b', 'c')) + >>> print(t) + a b c + --- --- --- + 1 2.0 x + 4 5.0 y + 5 8.2 z + +The data object passed as the ``rows`` argument can be any form which is +parsable by the ``np.rec.fromrecords()`` function. + +**List of dict objects** + +You can also initialize a table with row values. This is constructed as a +list of dict objects. The keys determine the column names:: + + >>> data = [{'a': 5, 'b': 10}, + ... {'a': 15, 'b': 20}] + >>> Table(rows=data) +
+ array([(5, 10), (15, 20)], + dtype=[('a', '>> t = Table(rows=[{'a': 5, 'b': 10}, {'a': 15, 'b': 30, 'c': 50}]) + Traceback (most recent call last): + ... + ValueError: Row 0 has no value for column c + +**Single row** + +You can also make a new table from a single row of an existing table:: + + >>> a = [1, 4] + >>> b = [2.0, 5.0] + >>> t = Table([a, b], names=('a', 'b')) + >>> t2 = Table(rows=t[1]) + +Remember that a |Row| has effectively a zero length compared to the +newly created |Table| which has a length of one. This is similar to +the difference between a scalar ``1`` (length 0) and an array like +``np.array([1])`` with length 1. + +.. Note:: + + In the case of input data as a list of dicts or a single Table row, it is + allowed to supply the data as the ``data`` argument since these forms + are always unambiguous. For example ``Table([{'a': 1}, {'a': 2}])`` is + accepted. However, a list of records must always be provided using the + ``rows`` keyword, otherwise it will be interpreted as a list of columns. + +NumPy structured array +"""""""""""""""""""""" +The structured array is the standard mechanism in `numpy` for storing heterogenous +table data. Most scientific I/O packages that read table files (e.g. +`PyFITS `_, +`vo.table `_, +`asciitable `_) +will return the table in an object that is based on the structured array. +A structured array can be created using:: + + >>> arr = np.array([(1, 2.0, 'x'), + ... (4, 5.0, 'y')], + ... dtype=[('a', 'i8'), ('b', 'f8'), ('c', 'S2')]) + +From ``arr`` it is simple to create the corresponding |Table| object:: + + >>> Table(arr) +
+ array([(1, 2.0, 'x'), (4, 5.0, 'y')], + dtype=[('a', '>> table = Table(arr) + >>> print table + a b c + --- --- --- + 1 2.0 x + 4 5.0 y + +**New column names** + +The column names can be changed from the original values by providing the +``names`` argument:: + + >>> Table(arr, names=('a_new', 'b_new', 'c_new')) +
+ array([(1, 2.0, 'x'), (4, 5.0, 'y')], + dtype=[('a_new', '>> Table(arr, dtype=('f4', 'i4', 'S4')) +
+ array([(1.0, 2, 'x'), (4.0, 5, 'y')], + dtype=[('a', '>> Table(arr, names=('a_new', 'b_new', 'c_new'), dtype=('f4', 'i4', 'S4')) +
+ array([(1.0, 2, 'x'), (4.0, 5, 'y')], + dtype=[('a_new', '`` where ```` is the column number. + +**Basic example with automatic column names** +:: + + >>> arr = np.array([[1, 2, 3], + ... [4, 5, 6]]) + >>> Table(arr) +
+ array([(1, 2, 3), (4, 5, 6)], + dtype=[('col0', '>> Table(arr, names=('a_new', 'b_new', 'c_new'), dtype=('f4', 'i4', 'S4')) +
+ array([(1.0, 2, '3'), (4.0, 5, '6')], + dtype=[('a_new', '>> t = Table(arr, copy=False) + +**Python arrays versus `numpy` arrays as input** + +There is a slightly subtle issue that is important to understand in the way +that |Table| objects are created. Any data input that looks like a Python list +(including a tuple) is considered to be a list of columns. In contrast an +homogeneous `numpy` array input is interpreted as a list of rows:: + + >>> arr = [[1, 2, 3], + ... [4, 5, 6]] + >>> np_arr = np.array(arr) + + >>> Table(arr) # Two columns, three rows +
+ array([(1, 4), (2, 5), (3, 6)], + dtype=[('col0', '>> Table(np_arr) # Three columns, two rows +
+ array([(1, 2, 3), (4, 5, 6)], + dtype=[('col0', '>> t = Table(names=('a', 'b', 'c')) + >>> t2 = t['c', 'b', 'a'] # Makes a copy of the data + >>> print t2 + c b a + --- --- --- + +An alternate way to use the ``columns`` attribute (explained in the +`TableColumns`_ section) to initialize a new table. This let's you choose +columns by their numerical index or name and supports slicing syntax:: + + >>> Table(t.columns[0:2]) +
+ array([], + dtype=[('a', '>> Table([t.columns[0], t.columns['c']]) +
+ array([], + dtype=[('a', '". If provided the ``dtype`` list overrides the + base column types and must match the length of ``names``. + +**dict-like** + The keys of the ``data`` object define the base column names. The + corresponding values can be Column objects, numpy arrays, or list-like + objects. The ``names`` list (optional) can be used to select + particular fields and/or reorder the base names. The ``dtype`` list + (optional) must match the length of ``names`` and is used to override + the existing or default data types. + +**list-like** + Each item in the ``data`` list provides a column of data values and + can be a Column object, numpy array, or list-like object. The + ``names`` list defines the name of each column. The names will be + auto-generated if not provided (either from the ``names`` argument or + by Column objects). If provided the ``names`` argument must match the + number of items in the ``data`` list. The optional ``dtype`` list + will override the existing or default data types and must match + ``names`` in length. + +**list-of-dicts** + Similar to Python's builtin ``csv.DictReader``, each item in the + ``data`` list provides a row of data values and must be a dict. The + key values in each dict define the column names and each row must + have identical column names. The ``names`` argument may be supplied + to specify column ordering. If it is not provided, the column order will + default to alphabetical. The ``dtype`` list may be specified, and must + correspond to the order of output columns. If any row's keys do no match + the rest of the rows, a ValueError will be thrown. + + +**None** + Initialize a zero-length table. If ``names`` and optionally ``dtype`` + are provided then the corresponding columns are created. + +names +""""" + +The ``names`` argument provides a way to specify the table column names or +override the existing ones. By default the column names are either taken +from existing names (for ``ndarray`` or ``Table`` input) or auto-generated +as ``col``. If ``names`` is provided then it must be a list with the +same length as the number of columns. Any list elements with value +``None`` fall back to the default name. + +In the case where ``data`` is provided as dict of columns, the ``names`` +argument can be supplied to specify the order of columns. The ``names`` list +must then contain each of the keys in the ``data`` dict. If ``names`` is not +supplied then the order of columns in the output table is not determinate. + +dtype +""""" + +The ``dtype`` argument provides a way to specify the table column data +types or override the existing types. By default the types are either +taken from existing types (for ``ndarray`` or ``Table`` input) or +auto-generated by the ``numpy.array()`` routine. If ``dtype`` is provided +then it must be a list with the same length as the number of columns. The +values must be valid ``numpy.dtype`` initializers or ``None``. Any list +elements with value ``None`` fall back to the default type. + +In the case where ``data`` is provided as dict of columns, the ``dtype`` argument +must be accompanied by a corresponding ``names`` argument in order to uniquely +specify the column ordering. + +meta +"""" + +The ``meta`` argument is simply an object that contains meta-data associated +with the table. It is recommended that this object be a dict or +OrderedDict_, but the only firm requirement is that it can be copied with +the standard library ``copy.deepcopy()`` routine. By default ``meta`` is +an empty OrderedDict_. + +copy +"""" + +By default the input ``data`` are copied into a new internal ``np.ndarray`` +object in the Table object. In the case where ``data`` is either an +``np.ndarray`` object or an existing ``Table``, it is possible to use a +reference to the existing data by setting ``copy=False``. This has the +advantage of reducing memory use and being faster. However one should take +care because any modifications to the new Table data will also be seen in the +original input data. See the `Copy versus Reference`_ section for more +information. + + +.. _copy_versus_reference: + +Copy versus Reference +^^^^^^^^^^^^^^^^^^^^^ + +Normally when a new |Table| object is created, the input data are *copied* into +a new internal array object. This ensures that if the new table elements are +modified then the original data will not be affected. However, when creating a +table from a numpy ndarray object (structured or homogeneous), it is possible to +disable copying so that instead a memory reference to the original data is +used. This has the advantage of being faster and using less memory. However, +caution must be exercised because the new table data and original data will be +linked, as shown below:: + + >>> arr = np.array([(1, 2.0, 'x'), + ... (4, 5.0, 'y')], + ... dtype=[('a', 'i8'), ('b', 'f8'), ('c', 'S2')]) + >>> print arr['a'] # column "a" of the input array + [1 4] + >>> t = Table(arr, copy=False) + >>> t['a'][1] = 99 + >>> print arr['a'] # arr['a'] got changed when we modified t['a'] + [ 1 99] + +Note that when referencing the data it is not possible to change the data types +since that operation requires making a copy of the data. In this case an error +occurs:: + + >>> t = Table(arr, copy=False, dtype=('f4', 'i4', 'S4')) + Traceback (most recent call last): + ... + ValueError: Cannot specify dtype when copy=False + +Another caveat in using referenced data is that you cannot add new row to the +table. This generates an error because of conflict between the two references +to the same underlying memory. Internally, adding a row may involve moving +the data to a new memory location which would corrupt the input data object. +`numpy` does not allow this:: + + >>> t.add_row([1, 2, 3]) + Traceback (most recent call last): + File "", line 1, in + File "astropy/table/table.py", line 760, in add_row + self._data.resize((newlen,), refcheck=False) + ValueError: cannot resize this array: it does not own its data + + +Column and TableColumns classes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +There are two classes, |Column| and |TableColumns|, that are useful when +constructing new tables. + +Column +"""""" + +A |Column| object can be created as follows, where in all cases the column +``name`` should be provided as a keyword argument and one can optionally provide +these values: + +``data`` : list, ndarray or None + Column data values +``dtype`` : numpy.dtype compatible value + Data type for column +``description`` : str + Full description of column +``unit`` : str + Physical unit +``format`` : str or function + `Format specifier`_ for outputting column values +``meta`` : dict + Meta-data associated with the column + +Initialization options +'''''''''''''''''''''' + +The column data values, shape, and data type are specified in one of two ways: + +**Provide a ``data`` value but not a ``length`` or ``shape``** + + Examples:: + + col = Column([1, 2], name='a') # shape=(2,) + col = Column([[1, 2], [3, 4]], name='a') # shape=(2, 2) + col = Column([1, 2], name='a', dtype=float) + col = Column(np.array([1, 2]), name='a') + col = Column(['hello', 'world'], name='a') + + The ``dtype`` argument can be any value which is an acceptable + fixed-size data-type initializer for the numpy.dtype() method. See + ``_. + Examples include: + + - Python non-string type (float, int, bool) + - Numpy non-string type (e.g. np.float32, np.int64, np.bool) + - Numpy.dtype array-protocol type strings (e.g. 'i4', 'f8', 'S15') + + If no ``dtype`` value is provided then the type is inferred using + ``np.array(data)``. When ``data`` is provided then the ``shape`` + and ``length`` arguments are ignored. + +**Provide ``length`` and optionally ``shape``, but not ``data``** + + Examples:: + + col = Column(name='a', length=5) + col = Column(name='a', dtype=int, length=10, shape=(3,4)) + + The default ``dtype`` is ``np.float64``. The ``shape`` argument is the array shape of a + single cell in the column. The default ``shape`` is () which means a single value in + each element. + +.. note:: + + After setting the type for a column, that type cannot be changed. + If data values of a different type are assigned to the column then they + will be cast to the existing column type. + +.. _table_format_string: + +Format specifier +'''''''''''''''' + +The format specifier controls the output of column values when a table or column +is printed or written to an ASCII table. In the simplest case, it is a string +that can be passed to python's built-in `format +`_ function. For more +complicated formatting, one can also give "old-style" or "new-style" +format strings, or even a function: + +**Plain format specification** + +This type of string specifies directly how the value should be formatted, +using a `format specification mini-language +`_ that is +quite similar to C. + + ``".4f"`` will give four digits after the decimal in float format, or + + ``"6d"`` will give integers in 6-character fields. + +**Old-style format string** + +This corresponds to syntax like ``"%.4f" % value`` as documented in +`String formatting operations `_. + + ``"%.4f"`` to print four digits after the decimal in float format, or + + ``"%6d"`` to print an integer in a 6-character wide field. + +**New-style format string** + +This corresponds to syntax like ``"{:.4f}".format(value)`` as documented in +`format string syntax +`_. + + ``"{:.4f}"`` to print four digits after the decimal in float format, or + + ``"{:6d}"`` to print an integer in a 6-character wide field. + +Note that in either format string case any Python string that formats exactly +one value is valid, so ``{:.4f} angstroms`` or ``Value: %12.2f`` would both +work. + +**Function** + +The greatest flexibility can be achieved by setting a formatting function. This +function must accept a single argument (the value) and return a string. In the +following example this is used to make a LaTeX ready output:: + + >>> t = Table([[1,2],[1.234e9,2.34e-12]], names = ('a','b')) + >>> def latex_exp(value): + ... val = '{0:8.2}'.format(value) + ... mant, exp = val.split('e') + ... # remove leading zeros + ... exp = exp[0] + exp[1:].lstrip('0') + ... return '$ {0} \\times 10^{{ {1} }}$' .format(mant, exp) + >>> t['b'].format = latex_exp + >>> t['a'].format = '.4f' + >>> import sys + >>> t.write(sys.stdout, format='latex') + \begin{table} + \begin{tabular}{cc} + a & b \\ + 1.0000 & $ 1.2 \times 10^{ +9 }$ \\ + 2.0000 & $ 2.3 \times 10^{ -12 }$ \\ + \end{tabular} + \end{table} + + +TableColumns +"""""""""""" + +Each |Table| object has an attribute ``columns`` which is an ordered dictionary +that stores all of the |Column| objects in the table (see also the `Column`_ +section). Technically the ``columns`` attribute is a |TableColumns| object, +which is an enhanced ordered dictionary that provides easier ways to select +multiple columns. There are a few key points to remember: + +- A |Table| can be initialized from a |TableColumns| object (copy is always True). +- Selecting multiple columns from a |TableColumns| object returns another + |TableColumns| object. +- Select one column from a |TableColumns| object returns a |Column|. + +So now look at the ways to select columns from a |TableColumns| object: + +**Select columns by name** +:: + + >>> t = Table(names=('a', 'b', 'c', 'd')) + + >>> t.columns['d', 'c', 'b'] + + +**Select columns by index slicing** +:: + + >>> t.columns[0:2] # Select first two columns + + + >>> t.columns[::-1] # Reverse column order + + +**Select column by index or name** +:: + + >>> t.columns[1] # Choose columns by index + + array([], dtype=float64) + + >>> t.columns['b'] # Choose column by name + + array([], dtype=float64) + + +.. _subclassing_table: + +Subclassing Table +^^^^^^^^^^^^^^^^^ + +For some applications it can be useful to subclass the |Table| class in order +to introduce specialized behavior. In addition to subclassing |Table| it is +frequently desirable to change the behavior of the internal class objects which +are contained or created by a Table. This includes rows, columns, formatting, +and the columns container. In order to do this the subclass needs to declare +what class to use (if it is different from the built-in version). This is done by +specifying one or more of the class attributes ``Row``, ``Column``, +``MaskedColumn``, ``TableColumns``, or ``TableFormatter``. + +The following trivial example overrides all of these with do-nothing +subclasses, but in practice you would override only the necessary subcomponents:: + + >>> from astropy.table import Table, Row, Column, MaskedColumn, TableColumns, TableFormatter + + >>> class MyRow(Row): pass + >>> class MyColumn(Column): pass + >>> class MyMaskedColumn(MaskedColumn): pass + >>> class MyTableColumns(TableColumns): pass + >>> class MyTableFormatter(TableFormatter): pass + + >>> class MyTable(Table): + ... """ + ... Custom subclass of astropy.table.Table + ... """ + ... Row = MyRow # Use MyRow to create a row object + ... Column = MyColumn # Column + ... MaskedColumn = MyMaskedColumn # Masked Column + ... TableColumns = MyTableColumns # Ordered dict holding Column objects + ... TableFormatter = MyTableFormatter # Controls table output + + +Example +"""""""" + +As a more practical example, suppose you have a table of data with a certain set of fixed +columns, but you also want to carry an arbitrary dictionary of keyword=value +parameters for each row and then access those values using the same item access +syntax as if they were columns. It is assumed here that the extra parameters +are contained in a numpy object-dtype column named ``params``:: + + >>> from astropy.table import Table, Row + >>> class ParamsRow(Row): + ... """ + ... Row class that allows access to an arbitrary dict of parameters + ... stored as a dict object in the ``params`` column. + ... """ + ... def __getitem__(self, item): + ... if item not in self.colnames: + ... return self.data['params'][item] + ... else: + ... return self.data[item] + ... + ... def keys(self): + ... out = [name for name in self.colnames if name != 'params'] + ... params = [key.lower() for key in sorted(self.data['params'])] + ... return out + params + ... + ... def values(self): + ... return [self[key] for key in self.keys()] + +Now we put this into action with a trival |Table| subclass:: + + >>> class ParamsTable(Table): + ... Row = ParamsRow + +First make a table and add a couple of rows:: + + >>> t = ParamsTable(names=['a', 'b', 'params'], dtype=['i', 'f', 'O']) + >>> t.add_row((1, 2.0, {'x': 1.5, 'y': 2.5})) + >>> t.add_row((2, 3.0, {'z': 'hello', 'id': 123123})) + >>> print(t) # doctest: +SKIP + a b params + --- --- ---------------------------- + 1 2.0 {'y': 2.5, 'x': 1.5} + 2 3.0 {'z': 'hello', 'id': 123123} + +Now see what we have from our specialized ``ParamsRow`` object:: + + >>> t[0]['y'] + 2.5 + >>> t[1]['id'] + 123123 + >>> t[1].keys() + ['a', 'b', 'id', 'z'] + >>> t[1].values() + [2, 3.0, 123123, 'hello'] + +To make this example really useful you might want to override +``Table.__getitem__`` in order to allow table-level access to the parameter +fields. This might look something like:: + + class ParamsTable(table.Table): + Row = ParamsRow + + def __getitem__(self, item): + if isinstance(item, six.string_types): + if item in self.colnames: + return self.columns[item] + else: + # If item is not a column name then create a new MaskedArray + # corresponding to self['params'][item] for each row. This + # might not exist in some rows so mark as masked (missing) in + # those cases. + mask = np.zeros(len(self), dtype=np.bool) + item = item.upper() + values = [params.get(item) for params in self['params']] + for ii, value in enumerate(values): + if value is None: + mask[ii] = True + values[ii] = '' + return self.MaskedColumn(name=item, data=values, mask=mask) + + # ... and then the rest of the original __getitem__ ... diff --git a/docs/table/index.rst b/docs/table/index.rst new file mode 100644 index 0000000..3632bc6 --- /dev/null +++ b/docs/table/index.rst @@ -0,0 +1,263 @@ +.. include:: references.txt + +.. _astropy-table: + +***************************** +Data Tables (`astropy.table`) +***************************** + +Introduction +============ + +`astropy.table` provides functionality for storing and manipulating +heterogeneous tables of data in a way that is familiar to `numpy` users. A few +notable features of this package are: + +* Initialize a table from a wide variety of input data structures and types. +* Modify a table by adding or removing columns, changing column names, + or adding new rows of data. +* Handle tables containing missing values. +* Include table and column metadata as flexible data structures. +* Specify a description, units and output formatting for columns. +* Interactively scroll through long tables similar to using ``more``. +* Create a new table by selecting rows or columns from a table. +* Perform :ref:`table_operations` like database joins and concatenation. +* Manipulate multidimensional columns. +* Methods for :ref:`read_write_tables` to files +* Hooks for :ref:`subclassing_table` and its component classes + +Currently `astropy.table` is used when reading an ASCII table using +`astropy.io.ascii`. Future releases of AstroPy are expected to use +the |Table| class for other subpackages such as `astropy.io.votable` and `astropy.io.fits` . + +Getting Started +=============== + +The basic workflow for creating a table, accessing table elements, +and modifying the table is shown below. These examples show a very simple +case, while the full `astropy.table` documentation is available from the +:ref:`using_astropy_table` section. + +First create a simple table with three columns of data named ``a``, ``b``, +and ``c``. These columns have integer, float, and string values respectively:: + + >>> from astropy.table import Table + >>> a = [1, 4, 5] + >>> b = [2.0, 5.0, 8.2] + >>> c = ['x', 'y', 'z'] + >>> t = Table([a, b, c], names=('a', 'b', 'c'), meta={'name': 'first table'}) + +If you have row-oriented input data such as a list of records, use the ``rows`` +keyword:: + + >>> data_rows = [(1, 2.0, 'x'), + ... (4, 5.0, 'y'), + ... (5, 8.2, 'z')] + >>> t = Table(rows=data_rows, names=('a', 'b', 'c'), meta={'name': 'first table'}) + +There are a few ways to examine the table. You can get detailed information +about the table values and column definitions as follows:: + + >>> t +
+ array([(1, 2.0, 'x'), (4, 5.0, 'y'), (5, 8..., 'z')], + dtype=[('a', '>> t['b'].unit = 's' + >>> t +
+ array([(1, 2.0, 'x'), (4, 5.0, 'y'), (5, 8..., 'z')], + dtype=[('a', '>> print(t) + a b c + s + --- --- --- + 1 2.0 x + 4 5.0 y + 5 8.2 z + +If you do not like the format of a particular column, you can change it:: + + >>> t['b'].format = '7.3f' + >>> print(t) + a b c + s + --- ------- --- + 1 2.000 x + 4 5.000 y + 5 8.200 z + +For a long table you can scroll up and down through the table one page at +time:: + + >>> t.more() # doctest: +SKIP + +You can also display it as an HTML-formatted table in the browser:: + + >>> t.show_in_browser() # doctest: +SKIP + +or as an interactive (searchable & sortable) javascript table:: + + >>> t.show_in_browser(jsviewer=True) # doctest: +SKIP + +Now examine some high-level information about the table:: + + >>> t.colnames + ['a', 'b', 'c'] + >>> len(t) + 3 + >>> t.meta + {'name': 'first table'} + +Access the data by column or row using familiar `numpy` structured array syntax:: + + >>> t['a'] # Column 'a' + + array([1, 4, 5]) + + >>> t['a'][1] # Row 1 of column 'a' + 4 + + >>> t[1] # Row obj for with row 1 values + + + >>> t[1]['a'] # Column 'a' of row 1 + 4 + +One can retrieve a subset of a table by rows (using a slice) or +columns (using column names), where the subset is returned as a new table:: + + >>> print(t[0:2]) # Table object with rows 0 and 1 + a b c + s + --- ------- --- + 1 2.000 x + 4 5.000 y + + >>> print(t['a', 'c']) # Table with cols 'a', 'c' + a c + --- --- + 1 x + 4 y + 5 z + +Modifying table values in place is flexible and works as one would expect:: + + >>> t['a'] = [-1, -2, -3] # Set all column values + >>> t['a'][2] = 30 # Set row 2 of column 'a' + >>> t[1] = (8, 9.0, "W") # Set all row values + >>> t[1]['b'] = -9 # Set column 'b' of row 1 + >>> t[0:2]['b'] = 100.0 # Set column 'b' of rows 0 and 1 + >>> print(t) + a b c + s + --- ------- --- + -1 100.000 x + 8 100.000 W + 30 8.200 z + +Add, remove, and rename columns with the following:: + + >>> t['d'] = [1, 2, 3] + >>> del t['c'] + >>> t.rename_column('a', 'A') + >>> t.colnames + ['A', 'b', 'd'] + +Adding a new row of data to the table is as follows:: + + >>> t.add_row([-8, -9, 10]) + >>> len(t) + 4 + +Lastly, one can create a table with support for missing values, for example by setting +``masked=True``:: + + >>> t = Table([a, b, c], names=('a', 'b', 'c'), masked=True) + >>> t['a'].mask = [True, True, False] + >>> t +
+ masked_array(data = [(--, 2.0, 'x') (--, 5.0, 'y') (5, 8..., 'z')], + mask = [(True, False, False) (True, False, False) (False, False, False)], + fill_value = (999999, 1e+20, 'N'), + dtype = [('a', '>> print(t) + a b c + --- --- --- + -- 2.0 x + -- 5.0 y + 5 8.2 z + +.. _using_astropy_table: + +Using ``table`` +=============== + +The details of using `astropy.table` are provided in the following sections: + +Construct table +--------------- + +.. toctree:: + :maxdepth: 2 + + construct_table.rst + +Access table +--------------- + +.. toctree:: + :maxdepth: 2 + + access_table.rst + +Modify table +--------------- + +.. toctree:: + :maxdepth: 2 + + modify_table.rst + +Table operations +----------------- + +.. toctree:: + :maxdepth: 2 + + operations.rst + +Masking +--------------- + +.. toctree:: + :maxdepth: 2 + + masking.rst + +I/O with tables +---------------- + +.. toctree:: + :maxdepth: 2 + + io.rst + +Reference/API +============= + +.. automodapi:: astropy.table diff --git a/docs/table/io.rst b/docs/table/io.rst new file mode 100644 index 0000000..b88e7eb --- /dev/null +++ b/docs/table/io.rst @@ -0,0 +1,65 @@ +.. doctest-skip-all + +.. _read_write_tables: + +Reading and writing Table objects +=================================== + +Astropy provides a unified interface for reading and writing data +in different formats. For many common cases this will +simplify the process of file I/O and reduce the need to master +the separate details of all the I/O packages within Astropy. For details and +examples of using this interface see the :ref:`table_io` +section. + +Getting started +---------------- + +The :class:`~astropy.table.Table` class includes two methods, +:meth:`~astropy.table.Table.read` and +:meth:`~astropy.table.Table.write`, that make it possible to read from +and write to files. A number of formats are automatically supported (see +:ref:`built_in_readers_writers`) and new file formats and extensions can be +registered with the :class:`~astropy.table.Table` class (see +:ref:`io_registry`). + +To use this interface, first import the :class:`~astropy.table.Table` class, then +simply call the :class:`~astropy.table.Table` +:meth:`~astropy.table.Table.read` method with the name of the file and +the file format, for instance ``'ascii.daophot'``:: + + >>> from astropy.table import Table + >>> t = Table.read('photometry.dat', format='ascii.daophot') + +It is possible to load tables directly from the Internet using URLs. For example, +download tables from Vizier catalogues in CDS format (``'ascii.cds'``):: + + >>> t = Table.read("ftp://cdsarc.u-strasbg.fr/pub/cats/VII/253/snrs.dat", + ... readme="ftp://cdsarc.u-strasbg.fr/pub/cats/VII/253/ReadMe", + ... format="ascii.cds") + +For certain file formats, the format can be automatically detected, for +example from the filename extension:: + + >>> t = Table.read('table.tex') + +Similarly, for writing, the format can be explicitly specified:: + + >>> t.write(filename, format='latex') + +As for the :meth:`~astropy.table.Table.read` method, the format may +be automatically identified in some cases. + +Any additional arguments specified will depend on the format. For examples of this see the +section :ref:`built_in_readers_writers`. This section also provides the full list of +choices for the ``format`` argument. + +Supported formats +------------------ + +The :ref:`table_io` has built-in support for the following data file formats: + +* :ref:`table_io_ascii` +* :ref:`table_io_hdf5` +* :ref:`table_io_fits` +* :ref:`table_io_votable` diff --git a/docs/table/masking.rst b/docs/table/masking.rst new file mode 100644 index 0000000..29b6ceb --- /dev/null +++ b/docs/table/masking.rst @@ -0,0 +1,176 @@ +.. include:: references.txt + +Masking and missing values +-------------------------- + +The `astropy.table` package provides support for masking and missing +values in a table by wrapping the ``numpy.ma`` masked array package. +This allows handling tables with missing or invalid entries in much +the same manner as for standard (unmasked) tables. It +is useful to be familiar with the `masked array +`_ +documentation when using masked tables within `astropy.table`. + +In a nutshell, the concept is to define a boolean mask that mirrors +the structure of the table data array. Wherever a mask value is +`True`, the corresponding entry is considered to be missing or invalid. +Operations involving column or row access and slicing are unchanged. +The key difference is that arithmetic or reduction operations involving +columns or column slices follow the rules for `operations +on masked arrays +`_. + +.. Note:: + + Reduction operations like `numpy.sum` or `numpy.mean` follow the + convention of ignoring masked (invalid) values. This differs from + the behavior of the floating point ``NaN``, for which the sum of an + array including one or more ``NaN's`` will result in ``NaN``. + See ``_ for a very + interesting discussion of different strategies for handling + missing data in the context of `numpy`. + +Table creation +^^^^^^^^^^^^^^^ + +A masked table can be created in several ways: + +**Create a new table object and specify masked=True** :: + + >>> from astropy.table import Table, Column, MaskedColumn + >>> t = Table([(1, 2), (3, 4)], names=('a', 'b'), masked=True) + >>> t +
+ masked_array(data = [(1, 3) (2, 4)], + mask = [(False, False) (False, False)], + fill_value = (999999, 999999), + dtype = [('a', ' + +Notice the table attributes ``mask`` and ``fill_value`` that are +available for a masked table. + +**Create a table with one or more columns as a MaskedColumn object** + + >>> a = MaskedColumn([1, 2], name='a') + >>> b = Column([3, 4], name='b') + >>> t = Table([a, b]) + +The |MaskedColumn| is the masked analog of the |Column| class and +provides the interface for creating and manipulating a column of +masked data. The |MaskedColumn| class inherits from +`numpy.ma.MaskedArray`, in contrast to |Column| which inherits from +`numpy.ndarray`. This distinction is the main reason there are +different classes for these two cases. + +**Create a table with one or more columns as a numpy MaskedArray** + + >>> from numpy import ma # masked array package + >>> a = ma.array([1, 2]) + >>> b = [3, 4] + >>> t = Table([a, b], names=('a', 'b')) + +**Add a MaskedColumn object to an existing table** + + >>> t = Table([[1, 2]], names=['a']) + >>> b = MaskedColumn([3, 4], mask=[True, False]) + >>> t['b'] = b + INFO: Upgrading Table to masked Table. Use Table.filled() to convert to unmasked table. [astropy.table.table] + +Note the INFO message because the underlying type of the table is modified in this operation. + +**Add a new row to an existing table and specify a mask argument** + + >>> a = Column([1, 2], name='a') + >>> b = Column([3, 4], name='b') + >>> t = Table([a, b]) + >>> t.add_row([3, 6], mask=[True, False]) + INFO: Upgrading Table to masked Table. Use Table.filled() to convert to unmasked table. [astropy.table.table] + +**Convert an existing table to a masked table** + + >>> t = Table([[1, 2], ['x', 'y']]) # standard (unmasked) table + >>> t = Table(t, masked=True) # convert to masked table + +Table access +^^^^^^^^^^^^ + +Nearly all the of standard methods for accessing and modifying data +columns, rows, and individual elements also apply to masked tables. + +There are two minor differences for the |Row| object that is obtained by +indexing a single row of a table: + +- For standard tables, two such rows can be compared for equality, but + in masked tables this comparison will produce an exception. +- For standard tables a |Row| object provides a view of the underlying + table data so that it is possible to modify a table by modifying the + row values. In masked tables this is a copy so that modifying the + |Row| object has no effect on the original table data. + +Both of these differences are due to issues in the underlying +`numpy.ma.MaskedArray` implementation. + +Masking and filling +^^^^^^^^^^^^^^^^^^^^ + +Both the |Table| and |MaskedColumn| classes provide +attributes and methods to support manipulating tables with missing or +invalid data. + +Mask +"""" + +The actual mask for the table as a whole or a single column can be +viewed and modified via the ``mask`` attribute:: + + >>> t = Table([(1, 2), (3, 4)], names=('a', 'b'), masked=True) + >>> t['a'].mask = [False, True] # Modify column mask (boolean array) + >>> t['b'].mask = [True, False] # Modify column mask (boolean array) + >>> print(t) + a b + --- --- + 1 -- + -- 4 + +Masked entries are shown as ``--`` when the table is printed. + +Filling +""""""" + +The entries which are masked (i.e. missing or invalid) can be replaced +with specified fill values. In this case the |MaskedColumn| or masked +|Table| will be converted to a standard |Column| or table. Each column +in a masked table has a ``fill_value`` attribute that specifies the +default fill value for that column. To perform the actual replacement +operation the ``filled()`` method is called. This takes an optional +argument which can override the default column ``fill_value`` +attribute. +:: + + >>> t['a'].fill_value = -99 + >>> t['b'].fill_value = 33 + + >>> print t.filled() + a b + --- --- + 1 33 + -99 4 + + >>> print t['a'].filled() + a + --- + 1 + -99 + + >>> print t['a'].filled(999) + a + --- + 1 + 999 + + >>> print t.filled(1000) + a b + ---- ---- + 1 1000 + 1000 4 diff --git a/docs/table/modify_table.rst b/docs/table/modify_table.rst new file mode 100644 index 0000000..2b01e68 --- /dev/null +++ b/docs/table/modify_table.rst @@ -0,0 +1,177 @@ +.. _modify_table: + +.. include:: references.txt + +Modifying a table +----------------- + +The data values within a |Table| object can be modified in much the same manner +as for `numpy` structured arrays by accessing columns or rows of data and +assigning values appropriately. A key enhancement provided by the |Table| class +is the ability to easily modify the structure of the table: one can add or +remove columns, and add new rows of data. + +Quick overview +^^^^^^^^^^^^^^ + +The code below shows the basics of modifying a table and its data. + + +**Make a table** +:: + + >>> from astropy.table import Table + >>> import numpy as np + >>> arr = np.arange(15).reshape(5, 3) + >>> t = Table(arr, names=('a', 'b', 'c'), meta={'keywords': {'key1': 'val1'}}) + +**Modify data values** +:: + + >>> t['a'] = [1, -2, 3, -4, 5] # Set all column values + >>> t['a'][2] = 30 # Set row 2 of column 'a' + >>> t[1] = (8, 9, 10) # Set all row values + >>> t[1]['b'] = -9 # Set column 'b' of row 1 + >>> t[0:3]['c'] = 100 # Set column 'c' of rows 0, 1, 2 + +Note that ``table[row][column]`` assignments will not work with +`numpy` "fancy" ``row`` indexing (in that case ``table[row]`` would be +a *copy* instead of a *view*). "Fancy" `numpy` indices include a +`list`, `numpy.ndarray`, or `tuple` of `numpy.ndarray` (e.g. the +return from `numpy.where`):: + + >>> t[[1, 2]]['a'] = [3., 5.] # doesn't change table t + >>> t[np.array([1, 2])]['a'] = [3., 5.] # doesn't change table t + >>> t[np.where(t['a'] > 3)]['a'] = 3. # doesn't change table t + +Instead use ``table[column][row]`` order:: + + >>> t['a'][[1, 2]] = [3., 5.] + >>> t['a'][np.array([1, 2])] = [3., 5.] + >>> t['a'][np.where(t['a'] > 3)] = 3. + +**Add a column or columns** + +A single column can be added to a table using syntax like adding a dict value. +The value on the right hand side can be a list or array +of the correct size, or a scalar value that will be broadcast:: + + >>> t['d1'] = np.arange(5) + >>> t['d2'] = [1, 2, 3, 4, 5] + >>> t['d3'] = 6 # all 5 rows set to 6 + +For more explicit control the :meth:`~astropy.table.Table.add_column` and +:meth:`~astropy.table.Table.add_columns` methods can be used to add one or multiple +columns to a table. In both cases the new columns must be specified as |Column| or +|MaskedColumn| objects with the ``name`` defined:: + + >>> from astropy.table import Column + >>> aa = Column(np.arange(5), name='aa') + >>> t.add_column(aa, index=0) # Insert before the first table column + + # Make a new table with the same number of rows and add columns to original table + >>> t2 = Table(np.arange(25).reshape(5, 5), names=('e', 'f', 'g', 'h', 'i')) + >>> t.add_columns(t2.columns.values()) + +Finally, columns can also be added from +:class:`~astropy.units.Quantity` objects, which automatically sets the +``.unit`` attribute on the column: + + >>> from astropy import units as u + >>> t['d'] = np.arange(1., 6.) * u.m + >>> t['d'] + + array([ 1., 2., 3., 4., 5.]) + +**Remove columns** +:: + + >>> t.remove_column('f') + >>> t.remove_columns(['aa', 'd1', 'd2', 'd3', 'e']) + >>> del t['g'] + >>> del t['h', 'i'] + >>> t.keep_columns(['a', 'b']) + +**Rename columns** +:: + + >>> t.rename_column('a', 'a_new') + >>> t['b'].name = 'b_new' + +**Add a row of data** +:: + + >>> t.add_row([-8, -9]) + +**Remove rows** +:: + + >>> t.remove_row(0) + >>> t.remove_rows(slice(4, 5)) + >>> t.remove_rows([1, 2]) + +**Sort by one more more columns** +:: + + >>> t.sort('b_new') + >>> t.sort(['a_new', 'b_new']) + +**Reverse table rows** +:: + + >>> t.reverse() + +**Modify meta-data** +:: + + >>> t.meta['key'] = 'value' + +**Select or reorder columns** + +A new table with a subset or reordered list of columns can be +created as shown in the following example:: + + >>> t = Table(arr, names=('a', 'b', 'c')) + >>> t_acb = t['a', 'c', 'b'] + +Another way to do the same thing is to provide a list or tuple +as the item as shown below:: + + >>> new_order = ['a', 'c', 'b'] # List or tuple + >>> t_acb = t[new_order] + +Caveats +^^^^^^^ + +Modifying the table data and properties is fairly straightforward. There are +only a few things to keep in mind: + +- The data type for a column cannot be changed in place. In order to do this + you must make a copy of the table with the column type changed appropriately. +- Adding or removing a column will generate a new copy + in memory of all the data. If the table is very large this may be slow. +- Adding a row *may* require a new copy in memory of the table data. This + depends on the detailed layout of Python objects in memory and cannot be + reliably controlled. In some cases it may be possible to build a table + row by row in less than O(N**2) time but you cannot count on it. + +Another subtlety to keep in mind are cases where the return value of an +operation results in a new table in memory versus a view of the existing +table data. As an example, imagine trying to set two table elements +using column selection with ``t['a', 'c']`` in combination with row index selection:: + + >>> t = Table([[1, 2], [3, 4], [5, 6]], names=('a', 'b', 'c')) + >>> t['a', 'c'][1] = (100, 100) + >>> print t + a b c + --- --- --- + 1 3 5 + 2 4 6 + +This might be surprising because the data values did not change and there +was no error. In fact what happened is that ``t['a', 'c']`` created a +new temporary table in memory as a *copy* of the original and then updated +row 1 of the copy. The original ``t`` table was unaffected and the new +temporary table disappeared once the statement was complete. The takeaway +is to pay attention to how certain operations are performed one step at +a time. diff --git a/docs/table/operations.rst b/docs/table/operations.rst new file mode 100644 index 0000000..7d770a1 --- /dev/null +++ b/docs/table/operations.rst @@ -0,0 +1,771 @@ +.. include:: references.txt +.. |join| replace:: :func:`~astropy.table.join` + +.. _table_operations: + +Table operations +----------------- + +In this section we describe higher-level operations that can be used to generate a new +table from one or more input tables. This includes: + +======================= + +.. list-table:: + :header-rows: 1 + :widths: 28 52 20 + + * - Documentation + - Description + - Function + * - `Grouped operations`_ + - Group tables and columns by keys + - `~astropy.table.Table.group_by` + * - `Stack vertically`_ + - Concatenate input tables along rows + - `~astropy.table.vstack` + * - `Stack horizontally`_ + - Concatenate input tables along columns + - `~astropy.table.hstack` + * - `Join`_ + - Database-style join of two tables + - `~astropy.table.join` + + +.. _grouped-operations: + +Grouped operations +^^^^^^^^^^^^^^^^^^ + +Sometimes in a table or table column there are natural groups within the dataset for which +it makes sense to compute some derived values. A simple example is a list of objects with +photometry from various observing runs:: + + >>> from astropy.table import Table + >>> obs = Table.read("""name obs_date mag_b mag_v + ... M31 2012-01-02 17.0 17.5 + ... M31 2012-01-02 17.1 17.4 + ... M101 2012-01-02 15.1 13.5 + ... M82 2012-02-14 16.2 14.5 + ... M31 2012-02-14 16.9 17.3 + ... M82 2012-02-14 15.2 15.5 + ... M101 2012-02-14 15.0 13.6 + ... M82 2012-03-26 15.7 16.5 + ... M101 2012-03-26 15.1 13.5 + ... M101 2012-03-26 14.8 14.3 + ... """, format='ascii') + +Table groups +~~~~~~~~~~~~~~ + +Now suppose we want the mean magnitudes for each object. We first group the data by the +``name`` column with the :func:`~astropy.table.Table.group_by` method. This returns +a new table sorted by ``name`` which has a ``groups`` property specifying the unique +values of ``name`` and the corresponding table rows:: + + >>> obs_by_name = obs.group_by('name') + >>> print obs_by_name # doctest: +SKIP + name obs_date mag_b mag_v + ---- ---------- ----- ----- + M101 2012-01-02 15.1 13.5 << First group (index=0, key='M101') + M101 2012-02-14 15.0 13.6 + M101 2012-03-26 15.1 13.5 + M101 2012-03-26 14.8 14.3 + M31 2012-01-02 17.0 17.5 << Second group (index=4, key='M31') + M31 2012-01-02 17.1 17.4 + M31 2012-02-14 16.9 17.3 + M82 2012-02-14 16.2 14.5 << Third group (index=7, key='M83') + M82 2012-02-14 15.2 15.5 + M82 2012-03-26 15.7 16.5 + << End of groups (index=10) + >>> print obs_by_name.groups.keys + name + ---- + M101 + M31 + M82 + >>> print obs_by_name.groups.indices + [ 0 4 7 10] + +The ``groups`` property is the portal to all grouped operations with tables and columns. +It defines how the table is grouped via an array of the unique row key values and the +indices of the group boundaries for those key values. The groups here correspond to the +row slices ``0:4``, ``4:7``, and ``7:10`` in the ``obs_by_name`` table. + +The initial argument (``keys``) for the `~astropy.table.Table.group_by` function +can take a number of input data types: + +- Single string value with a table column name (as shown above) +- List of string values with table column names +- Another |Table| or |Column| with same length as table +- Numpy structured array with same length as table +- Numpy homogeneous array with same length as table + +In all cases the corresponding row elements are considered as a tuple of values which +form a key value that is used to sort the original table and generate +the required groups. + +As an example, to get the average magnitudes for each object on each observing +night, we would first group the table on both ``name`` and ``obs_date`` as follows:: + + >>> print obs.group_by(['name', 'obs_date']).groups.keys + name obs_date + ---- ---------- + M101 2012-01-02 + M101 2012-02-14 + M101 2012-03-26 + M31 2012-01-02 + M31 2012-02-14 + M82 2012-02-14 + M82 2012-03-26 + + +Manipulating groups +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Once you have applied grouping to a table then you can easily access the individual +groups or subsets of groups. In all cases this returns a new grouped table. +For instance to get the sub-table which corresponds to the second group (index=1) +do:: + + >>> print obs_by_name.groups[1] + name obs_date mag_b mag_v + ---- ---------- ----- ----- + M31 2012-01-02 17.0 17.5 + M31 2012-01-02 17.1 17.4 + M31 2012-02-14 16.9 17.3 + +To get the first and second groups together use a slice:: + + >>> groups01 = obs_by_name.groups[0:2] + >>> print groups01 + name obs_date mag_b mag_v + ---- ---------- ----- ----- + M101 2012-01-02 15.1 13.5 + M101 2012-02-14 15.0 13.6 + M101 2012-03-26 15.1 13.5 + M101 2012-03-26 14.8 14.3 + M31 2012-01-02 17.0 17.5 + M31 2012-01-02 17.1 17.4 + M31 2012-02-14 16.9 17.3 + >>> print groups01.groups.keys + name + ---- + M101 + M31 + +You can also supply a numpy array of indices or a boolean mask to select particular +groups, e.g.:: + + >>> mask = obs_by_name.groups.keys['name'] == 'M101' + >>> print obs_by_name.groups[mask] + name obs_date mag_b mag_v + ---- ---------- ----- ----- + M101 2012-01-02 15.1 13.5 + M101 2012-02-14 15.0 13.6 + M101 2012-03-26 15.1 13.5 + M101 2012-03-26 14.8 14.3 + +One can iterate over the group sub-tables and corresponding keys with:: + + >>> from itertools import izip + >>> for key, group in izip(obs_by_name.groups.keys, obs_by_name.groups): + ... print('****** {0} *******'.format(key['name'])) + ... print group + ... print + ... + ****** M101 ******* + name obs_date mag_b mag_v + ---- ---------- ----- ----- + M101 2012-01-02 15.1 13.5 + M101 2012-02-14 15.0 13.6 + M101 2012-03-26 15.1 13.5 + M101 2012-03-26 14.8 14.3 + ****** M31 ******* + name obs_date mag_b mag_v + ---- ---------- ----- ----- + M31 2012-01-02 17.0 17.5 + M31 2012-01-02 17.1 17.4 + M31 2012-02-14 16.9 17.3 + ****** M82 ******* + name obs_date mag_b mag_v + ---- ---------- ----- ----- + M82 2012-02-14 16.2 14.5 + M82 2012-02-14 15.2 15.5 + M82 2012-03-26 15.7 16.5 + +Column Groups +~~~~~~~~~~~~~~ + +Like |Table| objects, |Column| objects can also be grouped for subsequent +manipulation with grouped operations. This can apply both to columns within a +|Table| or bare |Column| objects. + +As for |Table|, the grouping is generated with the +`~astropy.table.Table.group_by` method. The difference here is that +there is no option of providing one or more column names since that +doesn't make sense for a |Column|. + +Examples:: + + >>> from astropy.table import Column + >>> import numpy as np + >>> c = Column([1, 2, 3, 4, 5, 6], name='a') + >>> key_vals = np.array(['foo', 'bar', 'foo', 'foo', 'qux', 'qux']) + >>> cg = c.group_by(key_vals) + + >>> for key, group in izip(cg.groups.keys, cg.groups): + ... print('****** {0} *******'.format(key)) + ... print group + ... print + ... + ****** bar ******* + a + --- + 2 + ****** foo ******* + a + --- + 1 + 3 + 4 + ****** qux ******* + a + --- + 5 + 6 + + +Aggregation +~~~~~~~~~~~~~~ + +Aggregation is the process of applying a +specified reduction function to the values within each group for each +non-key column. This function must accept a numpy array as the first +argument and return a single scalar value. Common function examples are +`numpy.sum`, `numpy.mean`, and `numpy.std`. + +For the example grouped table ``obs_by_name`` from above we compute the group means with +the `~astropy.table.groups.TableGroups.aggregate` method:: + + >>> obs_mean = obs_by_name.groups.aggregate(np.mean) # doctest: +SKIP + WARNING: Cannot aggregate column 'obs_date' [astropy.table.groups] + >>> print obs_mean # doctest: +SKIP + name mag_b mag_v + ---- ----- ------ + M101 15.0 13.725 + M31 17.0 17.4 + M82 15.7 15.5 + +It seems the magnitude values were successfully averaged, but what +about the WARNING? Since the ``obs_date`` column is a string-type +array, the `numpy.mean` function failed and raised an exception. +Any time this happens then `~astropy.table.groups.TableGroups.aggregate` +will issue a warning and then +drop that column from the output result. Note that the ``name`` +column is one of the ``keys`` used to determine the grouping so +it is automatically ignored from aggregation. + +From a grouped table it is possible to select one or more columns on which +to perform the aggregation:: + + >>> print obs_by_name['mag_b'].groups.aggregate(np.mean) + mag_b + ----- + 15.0 + 17.0 + 15.7 + + >>> print obs_by_name['name', 'mag_v', 'mag_b'].groups.aggregate(np.mean) + name mag_v mag_b + ---- ------ ----- + M101 13.725 15.0 + M31 17.4 17.0 + M82 15.5 15.7 + +A single column of data can be aggregated as well:: + + >>> c = Column([1, 2, 3, 4, 5, 6], name='a') + >>> key_vals = np.array(['foo', 'bar', 'foo', 'foo', 'qux', 'qux']) + >>> cg = c.group_by(key_vals) + >>> cg_sums = cg.groups.aggregate(np.sum) + >>> for key, cg_sum in izip(cg.groups.keys, cg_sums): + ... print 'Sum for {0} = {1}'.format(key, cg_sum) + ... + Sum for bar = 2 + Sum for foo = 8 + Sum for qux = 11 + +If the specified function has a `numpy.ufunc.reduceat` method, this will be called instead. +This can improve the performance by a factor of 10 to 100 (or more) for large unmasked +tables or columns with many relatively small groups. It also allows for the use of +certain numpy functions which normally take more than one input array but also work as +reduction functions, like `numpy.add`. The numpy functions which should take advantage of +using `numpy.ufunc.reduceat` include: + +`numpy.add`, `numpy.arctan2`, `numpy.bitwise_and`, `numpy.bitwise_or`, `numpy.bitwise_xor`, +`numpy.copysign`, `numpy.divide`, `numpy.equal`, `numpy.floor_divide`, `numpy.fmax`, +`numpy.fmin`, `numpy.fmod`, `numpy.greater_equal`, `numpy.greater`, `numpy.hypot`, +`numpy.left_shift`, `numpy.less_equal`, `numpy.less`, `numpy.logaddexp2`, +`numpy.logaddexp`, `numpy.logical_and`, `numpy.logical_or`, `numpy.logical_xor`, +`numpy.maximum`, `numpy.minimum`, `numpy.mod`, `numpy.multiply`, `numpy.not_equal`, +`numpy.power`, `numpy.remainder`, `numpy.right_shift`, `numpy.subtract` and `numpy.true_divide`. + +As special cases `numpy.sum` and `numpy.mean` are substituted with their +respective reduceat methods. + + +Filtering +~~~~~~~~~~ + +Table groups can be filtered by means of the +`~astropy.table.groups.TableGroups.filter` method. This is done by +supplying a function which is called for each group. The function +which is passed to this method must accept two arguments: + +- ``table`` : |Table| object +- ``key_colnames`` : list of columns in ``table`` used as keys for grouping + +It must then return either `True` or `False`. As an example, the following +will select all table groups with only positive values in the non-key columns:: + + >>> def all_positive(table, key_colnames): + ... colnames = [name for name in table.colnames if name not in key_colnames] + ... for colname in colnames: + ... if np.any(table[colname] < 0): + ... return False + ... return True + +An example of using this function is:: + + >>> t = Table.read(""" a b c + ... -2 7.0 0 + ... -2 5.0 1 + ... 1 3.0 -5 + ... 1 -2.0 -6 + ... 1 1.0 7 + ... 0 0.0 4 + ... 3 3.0 5 + ... 3 -2.0 6 + ... 3 1.0 7""", format='ascii') + >>> tg = t.group_by('a') + >>> t_positive = tg.groups.filter(all_positive) + >>> for group in t_positive.groups: + ... print group + ... print + ... + a b c + --- --- --- + -2 7.0 0 + -2 5.0 1 + + a b c + --- --- --- + 0 0.0 4 + +As can be seen only the groups with ``a == -2`` and ``a == 0`` have all positive values +in the non-key columns, so those are the ones that are selected. + +Likewise a grouped column can be filtered with the +`~astropy.table.groups.ColumnGroups.filter`, method but in this case the filtering +function takes only a single argument which is the column group. It still must return +either `True` or `False`. For example:: + + def all_positive(column): + if np.any(column < 0): + return False + return True + +.. _stack-vertically: + +Stack vertically +^^^^^^^^^^^^^^^^^^^^ + +The |Table| class supports stacking tables vertically with the +`~astropy.table.vstack` function. This process is also commonly known as +concatenating or appending tables in the row direction. It corresponds roughly +to the `numpy.vstack` function. + +For example, suppose one has two tables of observations with several +column names in common:: + + >>> from astropy.table import Table, vstack + >>> obs1 = Table.read("""name obs_date mag_b logLx + ... M31 2012-01-02 17.0 42.5 + ... M82 2012-10-29 16.2 43.5 + ... M101 2012-10-31 15.1 44.5""", format='ascii') + + >>> obs2 = Table.read("""name obs_date logLx + ... NGC3516 2011-11-11 42.1 + ... M31 1999-01-05 43.1 + ... M82 2012-10-30 45.0""", format='ascii') + +Now we can stack these two tables:: + + >>> print vstack([obs1, obs2]) + name obs_date mag_b logLx + ------- ---------- ----- ----- + M31 2012-01-02 17.0 42.5 + M82 2012-10-29 16.2 43.5 + M101 2012-10-31 15.1 44.5 + NGC3516 2011-11-11 -- 42.1 + M31 1999-01-05 -- 43.1 + M82 2012-10-30 -- 45.0 + +Notice that the ``obs2`` table is missing the ``mag_b`` column, so in the stacked output +table those values are marked as missing. This is the default behavior and corresponds to +``join_type='outer'``. There are two other allowed values for the ``join_type`` argument, +``'inner'`` and ``'exact'``:: + + >>> print vstack([obs1, obs2], join_type='inner') + name obs_date logLx + ------- ---------- ----- + M31 2012-01-02 42.5 + M82 2012-10-29 43.5 + M101 2012-10-31 44.5 + NGC3516 2011-11-11 42.1 + M31 1999-01-05 43.1 + M82 2012-10-30 45.0 + + >>> print vstack([obs1, obs2], join_type='exact') + Traceback (most recent call last): + ... + TableMergeError: Inconsistent columns in input arrays (use 'inner' + or 'outer' join_type to allow non-matching columns) + +In the case of ``join_type='inner'``, only the common columns (the intersection) are +present in the output table. When ``join_type='exact'`` is specified then +`~astropy.table.vstack` requires that all the input tables +have exactly the same column names. + +More than two tables can be stacked by supplying a list of table objects:: + + >>> obs3 = Table.read("""name obs_date mag_b logLx + ... M45 2012-02-03 15.0 40.5""", format='ascii') + >>> print vstack([obs1, obs2, obs3]) + name obs_date mag_b logLx + ------- ---------- ----- ----- + M31 2012-01-02 17.0 42.5 + M82 2012-10-29 16.2 43.5 + M101 2012-10-31 15.1 44.5 + NGC3516 2011-11-11 -- 42.1 + M31 1999-01-05 -- 43.1 + M82 2012-10-30 -- 45.0 + M45 2012-02-03 15.0 40.5 + +See also the sections on `Merging metadata`_ and `Merging column +attributes`_ for details on how these characteristics of the input tables are merged in +the single output table. Note also that you can use a single table row instead of a +full table as one of the inputs. + +.. _stack-horizontally: + +Stack horizontally +^^^^^^^^^^^^^^^^^^^^^ + +The |Table| class supports stacking tables horizontally (in the column-wise direction) with the +`~astropy.table.hstack` function. It corresponds roughly +to the `numpy.hstack` function. + +For example, suppose one has the following two tables:: + + >>> from astropy.table import Table, hstack + >>> t1 = Table.read("""a b c + ... 1 foo 1.4 + ... 2 bar 2.1 + ... 3 baz 2.8""", format='ascii') + >>> t2 = Table.read("""d e + ... ham eggs + ... spam toast""", format='ascii') + +Now we can stack these two tables horizontally:: + + >>> print hstack([t1, t2]) + a b c d e + --- --- --- ---- ----- + 1 foo 1.4 ham eggs + 2 bar 2.1 spam toast + 3 baz 2.8 -- -- + +As with `~astropy.table.vstack`, there is an optional ``join_type`` argument +that can take values ``'inner'``, ``'exact'``, and ``'outer'``. The default is +``'outer'``, which effectively takes the union of available rows and masks out any missing +values. This is illustrated in the example above. The other options give the +intersection of rows, where ``'exact'`` requires that all tables have exactly the same +number of rows:: + + >>> print hstack([t1, t2], join_type='inner') + a b c d e + --- --- --- ---- ----- + 1 foo 1.4 ham eggs + 2 bar 2.1 spam toast + + >>> print hstack([t1, t2], join_type='exact') + Traceback (most recent call last): + ... + TableMergeError: Inconsistent number of rows in input arrays (use 'inner' or + 'outer' join_type to allow non-matching rows) + +More than two tables can be stacked by supplying a list of table objects. The example +below also illustrates the behavior when there is a conflict in the input column names +(see the section on `Column renaming`_ for details):: + + >>> t3 = Table.read("""a b + ... M45 2012-02-03""", format='ascii') + >>> print hstack([t1, t2, t3]) + a_1 b_1 c d e a_3 b_3 + --- --- --- ---- ----- --- ---------- + 1 foo 1.4 ham eggs M45 2012-02-03 + 2 bar 2.1 spam toast -- -- + 3 baz 2.8 -- -- -- -- + + +The metadata from the input tables is merged by the process described in the `Merging +metadata`_ section. Note also that you can use a single table row instead of a +full table as one of the inputs. + +.. _table-join: + +Join +^^^^^^^^^^^^^^ + +The |Table| class supports the `database join `_ +operation. This provides a flexible and powerful way to combine tables based on the +values in one or more key columns. + +For example, suppose one has two tables of observations, the first with B and V magnitudes +and the second with X-ray luminosities of an overlapping (but not identical) sample:: + + >>> from astropy.table import Table, join + >>> optical = Table.read("""name obs_date mag_b mag_v + ... M31 2012-01-02 17.0 16.0 + ... M82 2012-10-29 16.2 15.2 + ... M101 2012-10-31 15.1 15.5""", format='ascii') + >>> xray = Table.read(""" name obs_date logLx + ... NGC3516 2011-11-11 42.1 + ... M31 1999-01-05 43.1 + ... M82 2012-10-29 45.0""", format='ascii') + +The |join| method allows one to merge these two tables into a single table based on +matching values in the "key columns". By default the key columns are the set of columns +that are common to both tables. In this case the key columns are ``name`` and +``obs_date``. We can find all the observations of the same object on the same date as +follows:: + + >>> opt_xray = join(optical, xray) + >>> print opt_xray + name obs_date mag_b mag_v logLx + ---- ---------- ----- ----- ----- + M82 2012-10-29 16.2 15.2 45.0 + +We can perform the match only by ``name`` by providing the ``keys`` argument, which can be +either a single column name or a list of column names:: + + >>> print join(optical, xray, keys='name') + name obs_date_1 mag_b mag_v obs_date_2 logLx + ---- ---------- ----- ----- ---------- ----- + M31 2012-01-02 17.0 16.0 1999-01-05 43.1 + M82 2012-10-29 16.2 15.2 2012-10-29 45.0 + +This output table has all observations that have both optical and X-ray data for an object +(M31 and M82). Notice that since the ``obs_date`` column occurs in both tables it has +been split into two columns, ``obs_date_1`` and ``obs_date_2``. The values are taken from +the "left" (``optical``) and "right" (``xray``) tables, respectively. + +The table joins so far are known as "inner" joins and represent the strict intersection of +the two tables on the key columns. + +If one wants to make a new table which has *every* row from the left table and includes +matching values from the right table when available, this is known as a left join:: + + >>> print join(optical, xray, join_type='left') + name obs_date mag_b mag_v logLx + ---- ---------- ----- ----- ----- + M101 2012-10-31 15.1 15.5 -- + M31 2012-01-02 17.0 16.0 -- + M82 2012-10-29 16.2 15.2 45.0 + +Two of the observations do not have X-ray data, as indicated by the "--" in the table. +When there are any missing values the output will be a masked table. You might be +surprised that there is no X-ray data for M31 in the output. Remember that the default +matching key includes both ``name`` and ``obs_date``. Specifying the key as only the +``name`` column gives:: + + >>> print join(optical, xray, join_type='left', keys='name') + name obs_date_1 mag_b mag_v obs_date_2 logLx + ---- ---------- ----- ----- ---------- ----- + M101 2012-10-31 15.1 15.5 -- -- + M31 2012-01-02 17.0 16.0 1999-01-05 43.1 + M82 2012-10-29 16.2 15.2 2012-10-29 45.0 + +Likewise one can construct a new table with every row of the right table and matching left +values (when available) using ``join_type='right'``. + +Finally, to make a table with the union of rows from both tables do an "outer" join:: + + >>> print join(optical, xray, join_type='outer') + name obs_date mag_b mag_v logLx + ------- ---------- ----- ----- ----- + M101 2012-10-31 15.1 15.5 -- + M31 1999-01-05 -- -- 43.1 + M31 2012-01-02 17.0 16.0 -- + M82 2012-10-29 16.2 15.2 45.0 + NGC3516 2011-11-11 -- -- 42.1 + + +Identical keys +~~~~~~~~~~~~~~ + +The |Table| join operation works even if there are multiple rows with identical key +values. For example the following tables have multiple rows for the key column ``x``:: + + >>> from astropy.table import Table, join + >>> left = Table([[0, 1, 1, 2], ['L1', 'L2', 'L3', 'L4']], names=('key', 'L')) + >>> right = Table([[1, 1, 2, 4], ['R1', 'R2', 'R3', 'R4']], names=('key', 'R')) + >>> print left + key L + --- --- + 0 L1 + 1 L2 + 1 L3 + 2 L4 + >>> print right + key R + --- --- + 1 R1 + 1 R2 + 2 R3 + 4 R4 + +Doing an outer join on these tables shows that what is really happening is a `Cartesian +product `_. For each matching key, every +combination of the left and right tables is represented. When there is no match in either +the left or right table, the corresponding column values are designated as missing. + +.. doctest-skip:: win32 + + >>> print join(left, right, join_type='outer') + key L R + --- --- --- + 0 L1 -- + 1 L2 R1 + 1 L2 R2 + 1 L3 R1 + 1 L3 R2 + 2 L4 R3 + 4 -- R4 + +.. note:: + + The output table is sorted on the key columns, but when there are rows with identical + keys the output order in the non-key columns is not guaranteed to be identical across + installations. In the example above the order within the four rows with ``key == 1`` + can vary. + +An inner join is the same but only returns rows where there is a key match in both the +left and right tables: + +.. doctest-skip:: win32 + + >>> print join(left, right, join_type='inner') + key L R + --- --- --- + 1 L2 R1 + 1 L2 R2 + 1 L3 R1 + 1 L3 R2 + 2 L4 R3 + +Conflicts in the input table names are handled by the process described in the section on +`Column renaming`_. See also the sections on `Merging metadata`_ and `Merging column +attributes`_ for details on how these characteristics of the input tables are merged in +the single output table. + +Merging details +^^^^^^^^^^^^^^^^^^^^ + +When combining two or more tables there is the need to merge certain +characteristics in the inputs and potentially resolve conflicts. This +section describes the process. + +Column renaming +~~~~~~~~~~~~~~~~~ + + +In cases where the input tables have conflicting column names, there +is a mechanism to generate unique output column names. There are two +keyword arguments that control the renaming behavior: + +``table_names`` + Two-element list of strings that provide a name for the tables being joined. + By default this is ``['1', '2', ...]``, where the numbers correspond to + the input tables. + +``uniq_col_name`` + String format specifier with a default value of ``'{col_name}_{table_name}'``. + +This is most easily understood by example using the ``optical`` and ``xray`` tables +in the |join| example defined previously:: + + >>> print join(optical, xray, keys='name', + ... table_names=['OPTICAL', 'XRAY'], + ... uniq_col_name='{table_name}_{col_name}') + name OPTICAL_obs_date mag_b mag_v XRAY_obs_date logLx + ---- ---------------- ----- ----- ------------- ----- + M31 2012-01-02 17.0 16.0 1999-01-05 43.1 + M82 2012-10-29 16.2 15.2 2012-10-29 45.0 + + +Merging metadata +~~~~~~~~~~~~~~~~~~~ + +|Table| objects can have associated metadata: + +- ``Table.meta``: table-level metadata as an ordered dictionary +- ``Column.meta``: per-column metadata as an ordered dictionary + +The table operations described here handle the task of merging the metadata in the input +tables into a single output structure. Because the metadata can be arbitrarily complex +there is no unique way to do the merge. The current implementation uses a simple +recursive algorithm with four rules: + +- `dict` elements are merged by keys +- Conflicting `list` or `tuple` elements are concatenated +- Conflicting `dict` elements are merged by recursively calling the merge function +- Conflicting elements that are not both `list`, `tuple`, or `dict` will follow the following rules: + - If both metadata values are identical, the output is set to this value + - If one of the conflicting metadata values is `None`, the other value is picked + - If both metadata values are different and neither is `None`, the one for the last table in the list is picked + +By default, a warning is emitted in the last case (both metadata values are not +`None`). The warning can be silenced or made into an exception using the +``metadata_conflicts`` argument to :func:`~astropy.table.hstack`, +:func:`~astropy.table.vstack`, or +:func:`~astropy.table.join`. The ``metadata_conflicts`` option can be set to: + +- ``'silent'`` - no warning is emitted, the value for the last table is silently picked +- ``'warn'`` - a warning is emitted, the value for the last table is picked +- ``'error'`` - an exception is raised + +Merging column attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In addition to the table and column ``meta`` attributes, the column attributes ``unit``, +``format``, and ``description`` are merged by going through the input tables in +order and taking the first value which is defined (i.e. is not None). For example:: + + >>> from astropy.table import Column, Table, vstack + >>> col1 = Column([1], name='a') + >>> col2 = Column([2], name='a', unit='cm') + >>> col3 = Column([3], name='a', unit='m') + >>> t1 = Table([col1]) + >>> t2 = Table([col2]) + >>> t3 = Table([col3]) + >>> out = vstack([t1, t2, t3]) # doctest: +SKIP + WARNING: MergeConflictWarning: In merged column 'a' the 'unit' attribute does + not match (cm != m). Using m for merged output [astropy.table.operations] + >>> out['a'].unit # doctest: +SKIP + Unit("m") + +The rules for merging are as for `Merging metadata`_, and the +``metadata_conflicts`` option also controls the merging of column attributes. diff --git a/docs/table/references.txt b/docs/table/references.txt new file mode 100644 index 0000000..383dab7 --- /dev/null +++ b/docs/table/references.txt @@ -0,0 +1,6 @@ +.. |Row| replace:: :class:`~astropy.table.Row` +.. |Table| replace:: :class:`~astropy.table.Table` +.. |Column| replace:: :class:`~astropy.table.Column` +.. |MaskedColumn| replace:: :class:`~astropy.table.MaskedColumn` +.. |TableColumns| replace:: :class:`~astropy.table.TableColumns` +.. _OrderedDict: http://docs.python.org/library/collections.html#collections.OrderedDict diff --git a/docs/table/table_repr_html.png b/docs/table/table_repr_html.png new file mode 100644 index 0000000000000000000000000000000000000000..b57ea0f31476ea9b8dbbdb45d19f04ef6a322dd3 GIT binary patch literal 9278 zcmb_i2UJtpx@IgR3If7VhAP%kqzwuxO;G_yB`Q*sCKfsdL5h^%AVoyMj!22ihRO%vy8bdw1Pg_ug+I!6cmQv-keL@^4}fT9_?gCcjKb zNND+9^WBGqgccr!pUWi{z<2XR6&-k3N;Ea~gGX_Au|#O`y0mfhq>uSgcv3_X{rj$| zlf*?&2KelB6%s;MTWl!Q`Kueey1H)f?wv=1+8CYZeLl5R8vBqV*Q!daz9wf~b0Th) z^etVz1rA>Bhujo4KHup0c9nmUN6Sk8AK7XP#eOb)e%s?qTiy?C3m$$pQMVsGd?-Ec zy9Em_k6dqzTzlMB@`gc`aRcL>UyR6s)BDCN9K%mh{L)x=CQ`J%wo|vQm%M8F@WVH0 z7>v-44)eyv=m!a6Z3Olh`0qZ-4@Lw@VOM@}af!IHK@fL#>}!2OYcs;PAK~H`*lSfW ztVQ_UfB4f=yW;rWtFD{>rrT<2V*Z=dS2K4Er!gGEa01~$+IMh)OsA|T2yfpY+}rHl z^xNK>lNj3UJtfa}ER~K}uoL?vjZ^xn^rcAzMy|9-U8>e;+m*ube4|!m3Z3S#&SDe# zpU_#ezxkN4^JRpZO^$u3pX(aQ*efb3q-jQbF)Np371*VvuCC4~rJtaU{_*R=C2!si zlW5WEIfT^h{zJ_h!gbN-OgTJkOZj0zwjUWykbosZX27<}AigkyysEm>A-;U?I~4?WFV90!;fm(j2<@%h%+IL<6* zQ8jKNfC3kFyCScolu)J)A!X~?-pN7LUD2YYe0Bzz#2zH^8UkGQ0Q{%! z@4WcEy6v^=fE&-wPYk>lH=K+y=JtpO(s*ywax0$s5AV~goNS`;1B&z`pO{>68%68x zQ`)>y<{iMR%Ru+tvN3Bnabs%5#(-Z~MAAeu&8w@UGGvCSS=sy?rVVb~r)Ifc#pL14 zqVfs{E!T#WZKb6N#{Ai@+qRMUlNrXHMT{>E9eK_*(F>PchI@PacK!alT0Niaax16X zJnf1^ZD0+~bkL0w$z*aBiT5&yD)Ebqw6wI6zO|*LrPNi&d;Gl?2x9&b>A#0@&}JaS z_lPfrvEC<@?)DUkmM<&yqcM{)HbLS@LF?S*`bZT=| z(d0C1CjKNNs>bXLop`TT>^||DKpMaK^qArJtJMjoTw~bfmKheQ@S&X4mI7C5T2j(u zz=34SXtjtYw_h_of;HMx6GKQ7Rlf~rX>st&?1>jzUv!JsqE|O;kiTl0Thfc_oj}gvq~5-9<3>}-Sj1uxnU1sdy)WZriC^HU z8{;DdO8NmKvp31L7LVe0Hxv!Ns-O8?Gi-2Qs(#EXt&7+V}E$j%DWtn`UU#*RNj>HuhAvE^|D)=dK6ZKAope zw*V4#%@6F~zn@D#c6kdxV1IH2x*$8#I=>t0TkKY%$~ND=SC1{z*Ls~_(N;rcw=Z@M zs~PpHk5|%;jjqrldviD^Zv%IcD6x~gJK;*tdwCthBb)(_}I#ocFr zWd^Mqt)sjctrKawpb4mN0M*0!ImGW zH;(rn?1Gy}biC}n)U&IlUgI08>%osyFF~~|Wi)D=Ujvu15s;N-dPPqA#OoV|A=4w< zf;X!H)V-%iz7+WREfJN|y6@Qe|bl{$6PN_>r$=R=USCes~#Ya%RX!PcKLYl$( zCbATaVn^7D$41KGu(R(l*_|IsdC2EdZcfS>APaY zZYLuAqf{Odj^2@H^~Cz=vBJyq8hLp!x{sC2ySqiv$Np{k`nxjs=le3^>mok59ROIh zJcbEs;b$3!ScCkH&(T+3H($f$z9+H~=`p#EmKl#go?cRV#=b(4Zauyd86=+#EgNM- zRXf{>f>FfplY{ehzu6l$EQvqW>J-kqqYl%N9O&%i$?8J?pCa=&!!pkGy+;0l8_3cM z4RUsN?&*F}CE&5b!eEOZMsdcY zfRWGnCli!co5d@h=S|IYg|I4F6*{Ni+}e=eT4yn6=$GX+l9BI!KK=BE2dL=8)io)1 zVercOW7E|EFT9K2C+WCau(~jbOiIEeGOS2v-!HmepYwWV$9e&jFiaw0SV$sktkT1E zi49aLwMx#(N_ed?I}`9zGf|#`_GPGnV3mx4Zyy+%+3x1YH8J8UXIhy8$Bf)C4vEK} zQq$nSb>fdF`VrEfUR|QHNkc;;KGjA4L@hEFXF%IEZCqQLk62k*MSZQUeFUpf0>rkO1Ka+R-MF>kBu{0t49>|jK>3<-9LPpt?8t9gMJi}i_jAxT`Fwcw2%Xat zL#&+oWapYT0Y6dZ29mwoPPw|e)?2@hy0A$xrjv9scYA;rQm2OL4)ZnI0L9k#j4i9K zM*?KjF~QvZz*%#k$lK4?*H^=|@HkEcN`_@fDV^2iYGl!!B+%hbRMW}_AH}GUW_t=f z(Jpar&1rQEVHBRMpGL-IV&Dt-4$@5LHot*)rE>%ID&gF*&z^S0%nx9TB*e~i86b<6 zZlX2PXLoS4fXo|lg?hei6EM{L*m5h0RT%{ZEBa(A%59`NVCQG2t=vs&P>J>Njik|T zU=Y2S>CY1hieHdWws`Y;?u9sM^y0z7;?TKoFs_)`8h3Y0j`}sJDck&95;ZP)ik2<-H?ad$_ol|+D|&ITBT)g`DH8`WNaVvTvS$m81C zxq<0r$C(|DmBCZn3wVK5989Er6gjnd?_U88o~XUFqCTDh6G!myu(r1T1VsuvGa$oa z;rF6)$B!c!IGDKS!3TC(9W7~kE=Z16{d7fX;|A0GImIMr{6-sqMozops9 z>r=J{+>m>F5dk(%w3q{$o3c^skBo=S;bgIGPsDPj`5MX6 zZ>zaY27AbG86nv_f^!)?VrN#|H9|vbShW-FAE6x;D@2f9?CAK{kJWn+BUo8U$EFkD zS1y$8_&*wLp0W|RL_A@+dW zqSYdCXWgJ+O@~dWeRPCCEUTWmD!;{}Ir9+aWV|_`E=XcB(1fy|k0G|6a`=rIxx(ky zC4msVQv4&s$Kss0y|Pua?BU!hK6@AmQ&Q3w4ElkXGeZ34-PVjEf?@CvG6p zD1(6iYD6J--GO_PSX6Ti7jS3-{!En(1}Y-KY|u7rWc@=oY5eKpDjtK#DjUuzXO^jf zpn{}+vcnib51W=~abxaijkvLBPd5@Ri<3Hb8c$RL2DgV;u^mPwn(D7oHI_jbdxEE@OQ9bpk|FM8Q2l zxc@`N1lmap9)4Qf6|5GSoZdMLTj2uRVvqRpKp!$>$jC4xgrJ6!9^V(A=|q>tf>_De ztLsSJmbyl}7dUTcRB>+UJX!n}gC)vOhrxOy6~u&8EmxF&1cQBrVH}lLgw5}BcifR3 z#Sm01!a-DgU*n*hJ|iwH-od;i5anOFN5V$LSpyp@VEKaE~DQ$F8!-3*1D8#gq%{G3R0{fSPbZHk)6 zsXW-zGh3%XtQf0P&~!{a?rjgI2b@pXR9Q+ySDL?){?qV_Rt z5b*iy6luP1)r&iP-=amxvbau3HJsw+$clU?U!e%-HX<>EH#}aA|Jh8L48FQ-1veWWi zh*TNvGkC@1DG*2Q1?T(w(TU@IuOS9>A^W3lcNkK!rRDkqSVty=SxQfXZxvy_tV+y{ zIIq5ih%h@Fxq93DRn1x!pc3G@2u5>#Hwi}Ws71CT%+*dvp4m-Vqu|t>%h)oB!n8!D z%p!QWP_p`OK1#(xYs>A3S)&kEfx9Pb`g>_IaEZ*f@`DTHi{g|lqDJ#6yN@RhcrShi z&mWvg`tO-(!B=cn(!B6u(HKcZIRh3ILI(hf7?Kp`Ew)TyT@+?HYLvk~YwMt6-7*zy zae8X-ZG8+Ut&Ftbp~o3%Stz(a9y3I>TBs_sCg=7Y1Py#=m13A*-Ketgl6LRn&H(s! z8l>OVRTlYBY3UyjkXO2OEAnoxZlK5*4K?A;bRM%sC_$~CC)SvyiCCHCN&(J4mKZZnaVMz80`A0*7_b{J9LCA(Im*0h)+rpLbV(mP z2#MJEBSJ{jKqS-hq*aPwFjgx?!1h!IHCqMu7ZBf@WLgm+O2=7;OntNkI@kOS)K0cK| zHkkf=GQn{AGiuzO>6mTv#JWg0AyTn)r^JT%g3)#q<2($syUFu6k?vmi6|0{|-d==- z>G3}3>`=+Z;Yg(JsO!&!RcA%y&s^bc&OT!6h(O}A6th;;+@j|xpRgrrH?Lt^0Dazf z){2$WD1$5v8nnIE8)q`J%-=+Y7e>eaL8t8U`XR`LD|@VZ-5R=}XfFoUMfT9uVowbt zV>CW4iNK9Ry1YEnV`s^w>U$R;o#;`aqRH>~r(?4$}(YjihYibyrk!N_w-jQdMu=ZraDsJcj}3S20ti=N49=KrzhIVK2+ z6ktO{i3L<6&PV|@`u2ClsjAais9ke!i(1N$$ErQTFdCWZ(g$6J9#hPgMe~HT!Sd1m zq@PcNTUu>;Wh=RO`HOz!-V&5MQ6GZG!CNOUos(~&#ZuD$6o@tSFJgD;vQo8r7cjZSN z-CAL(7<3V6sNmdARNRc>-bJu#TF3toqG`e{H#-H0{N5wR-1ki746n`K26IKf?yZHF zge3lD&wWJZml~xGNZyhk#1rGf?ckU0YrrOQ`>YYLRl2&KE%~d%y<+KCKn*)OT_v^A zRcn>8bF|c{S<`p|3tn=tFyB*QW*b#(pMXJ-zc2Z5``t z!x2lB`Fn!526v4wr%c34Tv#})DwOSKyb(9jA03m(L4hb?)Ns2L}Z@E~-!Olo%QASC)*RE;1gnjsC zQDky(!nVbP}}pR z9I+TZic`r zMLYIlDMp0hyu4Oo3xzxWR$YuG;w3Lr|1{LVN#x=m~)Boat;M);%~T0q+~4 zn3f3%qc-)%jn%b%D4M}B8U!xLH)VO{I%xm;18M#t_z8pX$CR2i{Y#XCx_@1w+=z~3 z8%OVcGXttzw_+G|r@J>D`6u}@pDzokquqGs**FoM7&M>0dhWaB$&4);y!K_Mkg_s$ zV&&ikq272~G`9cb@mk3=a3&(o>jkggBNz%YusnK49d*H>!lBIqh?7CZrVS|stcMIV z(Pd>@2aIq5^2Xe~1&V^3-EZ7O2%euns7r8>_y1zla5_~$Cl%LM7pL_gUZRHcCnib- zsBum+^;e9hRW`9U{gC$XMEIp>5qEddPF0g zXWZd=PK;JKQ_>Ty00drRY4K9{&;U{@(H|v^c+D9`Q;!K@Z~j+6W?+_6i8F6g&(9YI zmbU0=S`IU~xizbaq|Hm~(m+liS#lHZq*XnxcoQ)b_8#w#L?1k4%b{sxui1JYi*v|- z!Vx1%bZ|C;r~j!jcHe%2F>mrC!MGA9^$Ol}JF$uvfDvcg+@t}_;X&D1LLj6!qXeIg z*`EUE$q7wzc_1;91EV;62;l=hJy)UC0g?MWhIAQCMHcP|;LHm397<^$_kRuJ&(X=@ z{5e_2pq?mMN8yi`$)V$~*ONoxkQika%E7RoK{KFt9glZP@D_~&w9BAj_L7M^?NKsw zbH|IY1|JWsGvU2V+UL)J&%yuJz}*zXRwKJ_0nCiZ;2J!#O}&Fit#rbcHrnC2Ai9M| z0TB!54Lga?w_FN-)>REVHL%l*twlT)znjB*svY#9xi`^&K-!!GBv%n znWPM1*~_6iI20$H`KDHKDh718k!A!3s?d^7rCQ44tteyOAR(Onk^BjaKLiLZX+5uw z6lU`eR{tBH$a S!steX_U^IRonvx5;(q{#fgZyE literal 0 HcmV?d00001 diff --git a/docs/time/index.rst b/docs/time/index.rst new file mode 100644 index 0000000..95063aa --- /dev/null +++ b/docs/time/index.rst @@ -0,0 +1,799 @@ +.. include:: references.txt + +.. _astropy-time: + +**************************************************** +Time and Dates (`astropy.time`) +**************************************************** + +.. |Quantity| replace:: :class:`~astropy.units.Quantity` +.. |Longitude| replace:: :class:`~astropy.coordinates.Longitude` +.. |EarthLocation| replace:: :class:`~astropy.coordinates.EarthLocation` + +Introduction +============ + +The `astropy.time` package provides functionality for manipulating times and +dates. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI, +UT1, TDB) and time representations (e.g. JD, MJD, ISO 8601) that are used in +astronomy and required to calculate, e.g., sidereal times and barycentric +corrections. +It uses Cython to wrap the C language `ERFA`_ time and calendar +routines, using a fast and memory efficient vectorization scheme. + +All time manipulations and arithmetic operations are done internally using two +64-bit floats to represent time. Floating point algorithms from [#]_ are used so +that the |Time| object maintains sub-nanosecond precision over times spanning +the age of the universe. + +.. [#] `Shewchuk, 1997, Discrete & Computational Geometry 18(3):305-363 + `_ + +Getting Started +=============== + +The basic way to use `astropy.time` is to create a |Time| +object by supplying one or more input time values as well as the `time format`_ and +`time scale`_ of those values. The input time(s) can either be a single scalar like +``"2010-01-01 00:00:00"`` or a list or a `numpy` array of values as shown below. +In general any output values have the same shape (scalar or array) as the input. + + >>> from astropy.time import Time + >>> times = ['1999-01-01T00:00:00.123456789', '2010-01-01T00:00:00'] + >>> t = Time(times, format='isot', scale='utc') + >>> t +
+ +To repeat the above and suppress *all* the screen outputs (not recommended): + +>>> import warnings +>>> with warnings.catch_warnings(): +... warnings.simplefilter('ignore') +... result = vos_catalog.call_vo_service( +... 'conesearch_good', +... kwargs={'RA': c.ra.degree, 'DEC': c.dec.degree, 'SR': sr.value}, +... catalog_db='The PMM USNO-A1.0 Catalogue (Monet 1997) 1', +... verbose=False) + +You can also use custom VO database, say, ``'my_vo_database.json'`` from +:ref:`VO database examples `: + +>>> import os +>>> from astropy.vo.client.vos_catalog import BASEURL +>>> with BASEURL.set_temp(os.curdir): +... try: +... result = vos_catalog.call_vo_service( +... 'my_vo_database', +... kwargs={'RA': c.ra.degree, 'DEC': c.dec.degree, +... 'SR': sr.value}) +... except Exception as e: +... print(e) +Trying http://ex.org/cgi-bin/cs.pl? +Downloading http://ex.org/cgi-bin/cs.pl?SR=0.5&DEC=-72.0814444&RA=6.0223292 +|===========================================| 1.8k/1.8k (100.00%) 00s +None of the available catalogs returned valid results. + + +.. _vo-sec-client-scs: + +Simple Cone Search +------------------ + +`astropy.vo.client.conesearch` supports VO Simple Cone Search capabilities. + +Available databases are generated on the server-side hosted by STScI +using :ref:`vo-sec-validator-validate`. The database used is +controlled by `astropy.vo.Conf.conesearch_dbname`, which can be +changed in :ref:`vo-sec-scs-config` below. Here are the available +options: + +#. ``'conesearch_good'`` + Default. Passed validation without critical warnings and exceptions. +#. ``'conesearch_warn'`` + Has critical warnings but no exceptions. Use at your own risk. +#. ``'conesearch_exception'`` + Has some exceptions. *Never* use this. +#. ``'conesearch_error'`` + Has network connection error. *Never* use this. + +In the default setting, it searches the good Cone Search services one by one, +stops at the first one that gives non-zero match(es), and returns the result. +Since the list of services are extracted from a Python dictionary, the search +order might differ from call to call. + +There are also functions, both synchronously and asynchronously, available to +return *all* the Cone Search query results. However, this is not recommended +unless one knows what one is getting into, as it could potentially take up +significant run time and computing resources. + +:ref:`vo-sec-scs-examples` below show how to use non-default search behaviors, +where the user has more control of which catalog(s) to search, et cetera. + +.. note:: + + Most services currently fail to parse when ``pedantic=True``. + +.. warning:: + + When Cone Search returns warnings, you should decide + whether the results are reliable by inspecting the + warning codes in `astropy.io.votable.exceptions`. + +.. _vo-sec-scs-config: + +Configurable Items +^^^^^^^^^^^^^^^^^^ + +These parameters are set via :ref:`astropy_config`: + +* `astropy.vo.Conf.conesearch_dbname` + Cone Search database name to query. + +Also depends on +:ref:`General VO Services Access Configurable Items `. + +.. _vo-sec-scs-examples: + +Examples +^^^^^^^^ + +>>> from astropy.vo.client import conesearch + +Shows a sorted list of Cone Search services to be searched: + +>>> conesearch.list_catalogs() +[u'Guide Star Catalog 2.3 1', + u'SDSS DR7 - Sloan Digital Sky Survey Data Release 7 1', + u'SDSS DR7 - Sloan Digital Sky Survey Data Release 7 2', + u'SDSS DR7 - Sloan Digital Sky Survey Data Release 7 3', + u'SDSS DR7 - Sloan Digital Sky Survey Data Release 7 4', + u'SDSS DR8 - Sloan Digital Sky Survey Data Release 8 1', + u'SDSS DR8 - Sloan Digital Sky Survey Data Release 8 2', + u'The HST Guide Star Catalog, Version 1.1 (Lasker+ 1992) 1', + u'The HST Guide Star Catalog, Version 1.2 (Lasker+ 1996) 1', + u'The HST Guide Star Catalog, Version GSC-ACT (Lasker+ 1996-99) 1', + u'The PMM USNO-A1.0 Catalogue (Monet 1997) 1', + u'The USNO-A2.0 Catalogue (Monet+ 1998) 1', + u'Two Micron All Sky Survey (2MASS) 1', + u'Two Micron All Sky Survey (2MASS) 2', + u'USNO-A2 Catalogue 1', + u'USNO-A2.0 1'] + +To inspect them in detail, do the following and then refer to the examples in +:ref:`vo-sec-client-db-manip`: + +>>> from astropy.vo.client import vos_catalog +>>> good_db = vos_catalog.get_remote_catalog_db('conesearch_good') + +Select a catalog to search: + +>>> my_catname = 'The PMM USNO-A1.0 Catalogue (Monet 1997) 1' + +By default, pedantic is ``False``: + +>>> from astropy.io.votable import conf +>>> conf.pedantic +False + +Perform Cone Search in the selected catalog above for 0.5 degree radius +around 47 Tucanae with minimum verbosity, if supported. +The ``catalog_db`` keyword gives control over which catalog(s) to use. +If running this for the first time, a copy of the catalogs database will be +downloaded to local cache. To run this again without +using cached data, set ``cache=False``: + +>>> from astropy import coordinates as coord +>>> from astropy import units as u +>>> c = coord.SkyCoord.from_name('47 Tuc') +>>> c + +>>> sr = 0.5 * u.degree +>>> sr + +>>> result = conesearch.conesearch(c, sr, catalog_db=my_catname) +Trying http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/243/out& +Downloading ... +WARNING: W22: ... The DEFINITIONS element is deprecated in VOTable 1.1... + +To run the command above using custom timeout of +30 seconds for each Cone Search service query: + +>>> from astropy.utils import data +>>> with data.conf.set_temp('remote_timeout', 30): +... result = conesearch.conesearch(c, sr, catalog_db=my_catname) + +To suppress *all* the screen outputs (not recommended): + +>>> import warnings +>>> with warnings.catch_warnings(): +... warnings.simplefilter('ignore') +... result = conesearch.conesearch(c, sr, catalog_db=my_catname, +... verbose=False) + +Extract Numpy array containing the matched objects. See +`numpy` for available operations: + +>>> cone_arr = result.array.data +>>> cone_arr +array([(0.499298, 4.403473, -72.124045, '0150-00088188'), + (0.499075, 4.403906, -72.122762, '0150-00088198'), + (0.499528, 4.404531, -72.045198, '0150-00088210'), ..., + (0.4988, 7.641731, -72.113156, '0150-00225965'), + (0.499554, 7.645489, -72.103167, '0150-00226134'), + (0.499917, 7.6474, -72.0876, '0150-00226223')], + dtype=[('_r', '>> cone_arr.dtype.names +('_r', '_RAJ2000', '_DEJ2000', 'USNO-A1.0') +>>> cone_arr.size +36184 +>>> ra_list = cone_arr['_RAJ2000'] +>>> ra_list +array([ 4.403473, 4.403906, 4.404531, ..., 7.641731, 7.645489, 7.6474 ]) +>>> cone_arr[0] # First row +(0.499298, 4.403473, -72.124045, '0150-00088188') +>>> cone_arr[-1] # Last row +(0.499917, 7.6474, -72.0876, '0150-00226223') +>>> cone_arr[:10] # First 10 rows +array([(0.499298, 4.403473, -72.124045, '0150-00088188'), + (0.499075, 4.403906, -72.122762, '0150-00088198'), + (0.499528, 4.404531, -72.045198, '0150-00088210'), + (0.497252, 4.406078, -72.095045, '0150-00088245'), + (0.499739, 4.406462, -72.139545, '0150-00088254'), + (0.496312, 4.410623, -72.110492, '0150-00088372'), + (0.49473, 4.415053, -72.071217, '0150-00088494'), + (0.494171, 4.415939, -72.087512, '0150-00088517'), + (0.493722, 4.417678, -72.0972, '0150-00088572'), + (0.495147, 4.418262, -72.047142, '0150-00088595')], + dtype=[('_r', '>> import numpy as np +>>> sep = cone_arr['_r'] +>>> i_sorted = np.argsort(sep) +>>> cone_arr[i_sorted] +array([(0.081971, 5.917787, -72.006075, '0150-00145335'), + (0.083181, 6.020339, -72.164623, '0150-00149799'), + (0.089166, 5.732798, -72.077698, '0150-00137181'), ..., + (0.499981, 7.024962, -72.477503, '0150-00198745'), + (0.499987, 6.423773, -71.597364, '0150-00168596'), + (0.499989, 6.899589, -72.5043, '0150-00192872')], + dtype=[('_r', '>> from astropy import units as u +>>> ra_field = result.get_field_by_id('_RAJ2000') +>>> ra_field.title +u'Right ascension (FK5, Equinox=J2000.0) (computed by VizieR, ...)' +>>> ra_field.unit +Unit("deg") +>>> ra_field.unit.to(u.arcsec) * ra_list +array([ 15852.5028, 15854.0616, 15856.3116, ..., 27510.2316, + 27523.7604, 27530.64 ]) + +Perform the same Cone Search as above but asynchronously using +`~astropy.vo.client.conesearch.AsyncConeSearch`. Queries to +individual Cone Search services are still governed by +`astropy.utils.data.Conf.remote_timeout`. Cone Search is forced to run +in silent mode asynchronously, but warnings are still controlled by +:py:mod:`warnings`: + +>>> async_search = conesearch.AsyncConeSearch(c, sr, catalog_db=my_catname) + +Check asynchronous search status: + +>>> async_search.running() +True +>>> async_search.done() +False + +Get search results after a 30-second wait (not to be confused with +`astropy.utils.data.Conf.remote_timeout` that governs individual Cone +Search queries). If search is still not done after 30 seconds, +``TimeoutError`` is raised. Otherwise, Cone Search result is returned +and can be manipulated as above. If no ``timeout`` keyword given, it +waits until completion: + +>>> async_result = async_search.get(timeout=30) +>>> cone_arr = async_result.array.data +>>> cone_arr.size +36184 + +Estimate the execution time and the number of objects for +the Cone Search service URL from above. The prediction naively +assumes a linear model, which might not be accurate for some cases. +It also uses the normal :func:`~astropy.vo.client.conesearch.conesearch`, +not the asynchronous version. This example uses a custom +timeout of 30 seconds and runs silently (except for warnings): + +>>> result.url +u'http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/243/out&' +>>> with data.conf.set_temp('remote_timeout', 30): +... t_est, n_est = conesearch.predict_search( +... result.url, c, sr, verbose=False, plot=True) +WARNING: W22: ... The DEFINITIONS element is deprecated in VOTable 1.1... +# ... +>>> t_est # Predicted execution time +10.757875269998323 +>>> n_est # Predicted number of objects +37340 + +.. image:: images/client_predict_search_t.png + :width: 450px + :alt: Example plot from conesearch.predict_search() for t_est + +.. image:: images/client_predict_search_n.png + :width: 450px + :alt: Example plot from conesearch.predict_search() for n_est + +For debugging purpose, one can obtain the actual execution time +and number of objects, and compare them with the predicted values +above. The INFO message shown in controlled by `astropy.logger`. +Keep in mind that running this for every prediction +would defeat the purpose of the prediction itself: + +>>> t_real, tab = conesearch.conesearch_timer( +... c, sr, catalog_db=result.url, verbose=False) +INFO: conesearch_timer took 11.5103080273 s on AVERAGE for 1 call(s). [...] +>>> t_real # Actual execution time +9.33926796913147 +>>> tab.array.size # Actual number of objects +36184 + +One can also search in a list of catalogs instead of a single one. +In this example, we look for all catalogs containing ``'guide*star'`` in their +titles and only perform Cone Search using those services. +The first catalog in the list to successfully return non-zero result is used. +Therefore, the order of catalog names given in ``catalog_db`` is important: + +>>> gsc_cats = conesearch.list_catalogs(pattern='guide*star') +>>> gsc_cats +[u'Guide Star Catalog 2.3 1', + u'The HST Guide Star Catalog, Version 1.1 (Lasker+ 1992) 1', + u'The HST Guide Star Catalog, Version 1.2 (Lasker+ 1996) 1', + u'The HST Guide Star Catalog, Version GSC-ACT (Lasker+ 1996-99) 1'] +>>> gsc_result = conesearch.conesearch(c, sr, catalog_db=gsc_cats) +Trying http://gsss.stsci.edu/webservices/vo/ConeSearch.aspx?CAT=GSC23& +WARNING: W50: ... Invalid unit string 'pixel' [...] +WARNING: W48: ... Unknown attribute 'nrows' on TABLEDATA [...] +>>> gsc_result.array.size +74276 +>>> gsc_result.url +u'http://gsss.stsci.edu/webservices/vo/ConeSearch.aspx?CAT=GSC23&' + +To repeat the Cone Search above with the services listed in a +different order: + +>>> gsc_cats_reordered = [gsc_cats[i] for i in (3, 1, 2, 0)] +>>> gsc_cats_reordered +[u'The HST Guide Star Catalog, Version GSC-ACT (Lasker+ 1996-99) 1', + u'The HST Guide Star Catalog, Version 1.1 (Lasker+ 1992) 1', + u'The HST Guide Star Catalog, Version 1.2 (Lasker+ 1996) 1', + u'Guide Star Catalog 2.3 1'] +>>> gsc_result = conesearch.conesearch(c, sr, catalog_db=gsc_cats_reordered) +Trying http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/255/out& +Downloading ... +WARNING: W22: ... The DEFINITIONS element is deprecated in VOTable 1.1... +>>> gsc_result.array.size +2997 +>>> gsc_result.url +u'http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/255/out&' + +To obtain results from *all* the services above: + +>>> all_gsc_results = conesearch.search_all(c, sr, catalog_db=gsc_cats) +Trying http://gsss.stsci.edu/webservices/vo/ConeSearch.aspx?CAT=GSC23& +Downloading ... +Trying http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/220/out& +Downloading ... +WARNING: W22: ... The DEFINITIONS element is deprecated in VOTable 1.1... +Trying http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/254/out& +Downloading ... +WARNING: W22: ... The DEFINITIONS element is deprecated in VOTable 1.1... +Trying http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/255/out& +Downloading ... +>>> len(all_gsc_results) +4 +>>> for url, tab in all_gsc_results.items(): +... print('{0} has {1} results'.format(url, tab.array.size)) +http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/254/out& has 2998 results +http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/255/out& has 2997 results +http://gsss.stsci.edu/webservices/vo/ConeSearch.aspx?CAT=GSC23& has 74276 results +http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/220/out& has 2997 results + +To repeat the above asynchronously: + +>>> async_search_all = conesearch.AsyncSearchAll(c, sr, catalog_db=gsc_cats) +>>> async_search_all.running() +True +>>> async_search_all.done() +False +>>> all_gsc_results = async_search_all.get() + +If one is unable to obtain any results using the default +Cone Search database, ``'conesearch_good'``, that only contains +sites that cleanly passed validation, one can use :ref:`astropy_config` +to use another database, ``'conesearch_warn'``, containing sites with +validation warnings. One should use these sites with caution: + +>>> from astropy.vo import conf +>>> conf.conesearch_dbname = 'conesearch_warn' +>>> conesearch.list_catalogs() +Downloading http://stsdas.stsci.edu/astrolib/vo_databases/conesearch_warn.json +|===========================================| 87k/ 87k (100.00%) 00s +[u'2MASS All-Sky Catalog of Point Sources (Cutri+ 2003) 1', + u'2MASS All-Sky Point Source Catalog 1', + u'Data release 7 of Sloan Digital Sky Survey catalogs 1', + u'Data release 7 of Sloan Digital Sky Survey catalogs 2', + u'Data release 7 of Sloan Digital Sky Survey catalogs 3', + u'Data release 7 of Sloan Digital Sky Survey catalogs 4', + u'Data release 7 of Sloan Digital Sky Survey catalogs 5', + u'Data release 7 of Sloan Digital Sky Survey catalogs 6', + u'The 2MASS All-Sky Catalog 1', + u'The 2MASS All-Sky Catalog 2', + u'The USNO-B1.0 Catalog (Monet+ 2003) 1', + u'The USNO-B1.0 Catalog 1', + u'USNO-A V2.0, A Catalog of Astrometric Standards 1', + u'USNO-B1 Catalogue 1'] +>>> result = conesearch.conesearch(c, sr) +Trying http://vizier.u-strasbg.fr/viz-bin/votable/-A?-source=I/284/out& +Downloading ... +WARNING: W22: ... The DEFINITIONS element is deprecated in VOTable 1.1... +>>> result.array.data.size +50000 + +You can also use custom Cone Search database, say, ``'my_vo_database.json'`` +from :ref:`VO database examples `: + +>>> import os +>>> from astropy.vo.client.vos_catalog import BASEURL +>>> BASEURL.set(os.curdir) +>>> conesearch.CONESEARCH_DBNAME.set('my_vo_database') +>>> conesearch.list_catalogs() +[u'My Catalog 1'] +>>> result = conesearch.conesearch(c, sr) +Trying http://ex.org/cgi-bin/cs.pl? +Downloading ... +|===========================================| 1.8k/1.8k (100.00%) 00s +# ... +VOSError: None of the available catalogs returned valid results. diff --git a/docs/vo/conesearch/images/astropy_vo_flowchart.png b/docs/vo/conesearch/images/astropy_vo_flowchart.png new file mode 100644 index 0000000000000000000000000000000000000000..dea5962babeacb494a2843a7a3d6af2f17aa48ac GIT binary patch literal 20057 zcmeIacTkhvyEY0K5o|Q+(wovD^o~kzA{|5!Fp=ItniNF^DFOlpLJ^eSLlF?cLJvW@ z^xh#TRrtRoTWR9hlh7TP4&ht zJiJp-JUsk*A_DM}X?8|?@DINGEtP9{MVQNr;6D(!qLv~aUTO5XgS)4}|Cy~-Z)xG- z`Eugn1wF>Y+XFueTEN5e6vV?@vc$uadWDBa?Sib)mBGW)8c@5TsQ=*3YRY)Kmx|}n z#-YXD(@l0s)$vor#@EP{R7t~Q)2%4g-P6ygUEOY=x)x3}#46THclF6FGveHEj&v&4 zx5T|SRWpi9yKY#MM~rMjVVY@DkCL&$rc3VX-+~g;{^*5 zO`w}{p!ts#7zrd$GPlFd;uA$^;K9Y5R!t-!7>Wi&3S1R}+kggv1auFdfK-($2&w^Z zP9Y7#gKUFuFPtM%U{1lq@TM?$U&A9HZ9sILA`J>kz{6;^$KA($N&=dB20Y3GJix!NaMN3hPNApA-ly27M4%Ux%5Ep#)!zS5SjfaVQQ*LB;(HS0LcbQk<5a zu!mYVo=7>7B@p711Y2QvuPE|R)_i~9kd#yzjC-Imb0cE)gLDx2TgsqX=C2~b!WakJ zcEfjuQI1Dg1I$XEPcoJ@7A%$k$w(QbN>d4oy5Vjlo9G&4MOgoE)2@#ki2-M{BEIAW zQ(1Hrf7n}#&{dyx`{o&5o9$aFLrVhA%RGb_>}-k}3y_d^*N-t9nHeK}>6x<(J0|L68EB!2*apP~W6y;1_>{r@ zDTB25Khlu7L=DOY2IC-M4gxOrT^r%M$~sNlC(uDodizhe|_Wq({^*Bsq1(I zqok^?u=d7bdE#W3b*6(ET3|oQHY|}qZCczmK1}Id#QVTuwY7og)a^v(8QjK824#FW`*lFXx&-pDa}l zaeRZ&bHg4m{|0xf2fhm)K^-RHL`E)WAfye%S{$EG=y-5WS zGqPsYME-rgf)qZ|L`oX__}@+52NEck2_yVB0{k6JeuW8qK71r1fvXFr?cdK|1gBkG zc}MYYnD}2I{a*{ofq%%{dvw+}C|{ePNUFp060tPSKnOtqJyB3CKi1>I*G8$m+mJrn z@*)Y8@;Q|u(wq=HOhLHt?Zn6SLE`W1I0PL#lf|9y3^1dS*A&*!5+g+$#>oZDCG6?G z$*1568MO?fkD&Q|@pXtqJ3p`95=UMo@F_XR%yk<{G5kI`1GReYw9YF5=sY7;0%mtV&`Kd)lz_XHswwOiKcUgJ2GNFMu<7)n)Oe9$ z3EK&@sb5B{0OS%xKb9{78nv=rehX<*ShUjdUVj*s9pz}nCjo8Li=rXD9~q=x7V4UG zQ#1B9Sp#Ac{q2$)feF&2BSqZ_9-+ykBVPP>y!{x$v zjUrRxoXYv2$bi*YS6Fqb|CdmOj*|_z7hV$fqXW+Idk4@$@pn83An&N$wMrGpY58^q z`Ax@8DrwXr(iK)HtJ_LVaP~x$wx()NwHV=s`8fZ;fUPlxYZ3>y6XU)HzdX2V`!-P;I)AOG?1ls?RjN9#!p0Rw{o#hC9g#xAHHC$o z)PQBFTzmesII?$#0tyElL(+^q)Q|*#1x=q=1!l5Xc zMw(N>V<#Jf+psu!uO>>P*qYrpPn~K{c`ps4OLv2%UOjt1+|I9ZI4u$u1?4G`enmjg zccLooOggCtL!l{%MrQ^Sp7!MEjaSC-O8Yrp#WM9re?-+@BS3U9SpDP_+3yd8rhN#C z@bB=sG584YY^@&jSRM~VH)(pWKJ?yFIzDDRef8S1cpNalT{)I(mw>6gCDHBCdJL`AMP zRoU@(uxJ^$waSsh*wxxokKwS5QpEUa<<*g#k9n(udd*^`R7ey3G$;*03MtMKwJE~Y zVtELtrJ$X|p-0}_-aFsBWpc+lMr5CAwD6gD=nuzL3TUPW!YlW~xvRUHtA&1&oo$~C zBsJ>j3*(sZ(f%oUn?Mog{h<}ni|i0P7uYbv%aw<7^c-t2UtY_b^c<)3hxNL-A*B=x zBM6Q)oz|lJQmO?4Y6>YX8ftG!A>GJ915%vhTPN}<_g@D#FA#paM|)6PG|jW{Lj^-U zT{XSS(UjOqm(iS0H0(TQLUfv)Uw@DDBbyK?&u)RUZIN*^skB^2^GkMPm=ZBcmHEV$ zNIO;?5Wtq;_A-q2m3p;=PWz*8cZUzLUJY%C!q^`=F`aQ*F$TjZR90c2BnTo{Cseb% zFXCRb;7W_JN9lC*W;x@;J;RrR&ml3msI!!)Av{8!(~Q{F=?zhFQuM2eFPJDDDK{{h z(<}>&R0N{J08!z?=K{N?Z4XBeCnO8*IKIwgTexT3E>tn!Wq+Iizol_eL%p{6d&vBE z#tTUhp&a|8#Ms*W?_X|H+4I~|E=d^Z;kkVz6H2;5DQ>rE_X_vmv-V6qYDAaZVYUHs zKh5)2ui8?Ed?VIhMMqtXGb)aZih3@KaoGPRozy;U9pVHVAmNMuWW^j3#vuVc_syK` zUN#wWYQfXSrOqq+15aD(7ha9Agw8k}y2s4;t)G=~y7L;evXKQ@JofO_)n~tj+Z_bm zkS1v_ZNv76+`k`%dBFSh%8`(wdnIyc;aIzAgwa=N@a3Q;@+rmbH81|?Q_^{5^^RVA zp4Y61Q7fr(zL)*W((`dneWC~n)Qqh0gmT~ZN`dXNg?1kXz5ZoOzC1)n@3=H!oW+DO z<0IUz0Chg~^D4}9Qz<%aLjwAIQZ;{+tjLn3vWJ5mVX6G+Nbc3Ez+1JA(FQ)a7~Ct6 zX!Brl9&II!{1Fvo=hwX(N-Q#sj&Oc>I_;${$x1qlgxf>xGSxXsndDN5coC9v-ug{Xit_#e;n*o_+Hf?BOY$|#>Z9}lm9ZJ`LS*^D{kdW{c>4x08H`PjZ-of45h{z zvOG&sSOuGYHpJj=6OqA~qj;S(f=UKDJQOqWW}c*>Qu*Tojf_{1^{vSM>=Nj?O2b~V zr26qn_uUH?f>j%PMIatuu5}P9!JSzC z;WLh3A3H+Jzm@gfcqnf9cp|#G>|PV}b-Kk!hfM#7F@|VWKhbc_-#4MV1)ghBX3)Jh zbv~myg|CMaOL>yi;BYOoV>dd0&ZsLci^($!xWZKWn>Sv{s(+i(kNTAQctVJkDlSQ^ zsqD^c=th-cLE$D-Ag9EHeyMn}32(0Rn%lH?+zL3R`CH5@2QW0XVudV6;Wbj}U_#_z_w0)4h0dVD>r%c& zYlMTQk*g({TC-^jEp$#Pw*^bzQkaKsNkiq0GbznZn+#rUzf?=%Bcaj2s9WLJ6MsDB zl>~3lwQzJibTST$!r5Rmzga}VQ;=ZGn4qV19XoyZ9A9YWdBKL^$;ULUYr|34QAU+j zx&3(oI;MnDLMzjP@p)ovtW@4T|9a?f>NzL(bK0eTFDgB`YP~jEhOh)9r|C5XRGE1m zv$f~na>1SDLEKVbFW;pS!_y9~IIHUL=q@$plvm0yMc;Cg{=)&92>%H*(Zb%P0M!&HNm? z>~>uf)?Q!UCw634E)$f1;*7;N)A~o+E-+DN9=Asd{3j=) zz2Pox#`y>TV3I12W+%{>R&~x3l`Efs$*pz{TI@qiLB$x;6^p*@Hyh^wvb<2PVZsf8 z{Uw|Rgh@wU#%~YBVYI*S=&z`D`_y~10T2FfcVh=YtwN&}G;n@iI016M^)@%~lIi$o z^SJ;1UU%Y$&98HN`}N2BJ`E%fzn?0+mit?wF0i3U1refh#sAo1WRyVcTa+ONCb_d! z8Xd5bvgV&X%uqGvyUbJ{FzP)XkT!HX?dKP+IsB^ehqbk{3~LXXpUQ2rP|uV$GwzQc zAC5!A@_);EMRV9)0Al*fOg;@Ydys=_fUoe?@8xF&0(Wc253wkp}?Qk$HV3x;1$W-KC3HpTr`Q;>^ ztW4N7f=O7UOUG(c!*?Q`{ilP^FuGh^{LFOpeO~@}H#_a2uP6W9SF$H1GSJbX4Xr5@$THR$iA3KJS)Y{R}4Yw_NT7b;$w2nKUvu=ferIiv(J~d~3pVWMN&} z3x?uyXPUR5752Wiy^(f&upV3eNF?!zMw3Z-@x!frv%svO)Wl*BT$4acMp=!V!0MMz zX!-9O7uyHzLRUr)rlrSsF&^5L>o!=5Jq$5Npa-u-BiHk}?&QP4f%+p)qlQSw3k|nL+=h=ry%Sh41p{0H%OOG0 zC(4uR=~ql}pKiLaL!7<%IBT&zz_LByV*e|vZJcnxdm$=eHk~o8)wYUBK)Gc?3H=i5~uC$Isi%Xr^n0Q1|6EOKuT6sCu zVYYK*JYp$#P}%q#reLv|r}a8>3?9Y-5+G3BG|Pn?J3iWx4&06lFV0T&wUgpXmKog4 z-yR)*=du6E99zrLB}5G|gp^63!gttmL1V-jw8VZ0wkTRIc8+Zoc~|RQnRimRL5KFNK>ua4 zadl&fDg}jWcyL3kNsA_F4lW_FrbB*^x*V;j$nV8$D2yM>iH|<5LMr)F6h8Xo6Fuz1iU<7L6_!uHsu!1?*uaPbG z{>F!+y=5%w1=iV3nrri$b#yfwkGIQg5eR3GuyvT?fK~fznR8h#Y0mC^Cc$Yr$YFvQhH zT-bR?cJ#3H`1IKFdk}~clvl@(z@Njy_CwB#$J@KOhPmyVS|sIuocwNFYk2ke_~a)k z@wubQ?sYzm9cS?h4NEZ_sE@bIT;-$MA7g%Np2sf7p2uFt@1~=qla2~$r^^ikdHSTG zJI~K}TLgYrB2q|aB*4;g?kcHVdpeNzzAVF>ptx>333%+p5(tLLf&6Y_b{NX)_QQK` zd{FPw*j<>&+!Otv9!D7)cY9Q$DcwJgUUFg;Pf7L=M{uS2uTMT@^7@>$R`at*euO?H zT4wzj^w}jcf?48)l)>^Rea|V6s%#0|6GiII#*TBi-12aNap6b#*Y4X#V{f({DjDyH zS0^ev_T^x?h(}bujLeRmzkr_6)xel;st- za=*!p%E{&`N}vEk5Wp!&FS^4b=xrlvp<-TldD=-0WyNzO0i`P=B&bH3T(D<5Ly6z3 zu2fbpA%QxwlUfh|S`(9R(K+M_+v?32)4ZAQYwzv}`ZxJJ4F~E3_fujyOK*Xe-mTV1 z4<7Jeg@RuttuuT0uw3Kc>baU?mHN4+DK*fx~RNS<2`L+P2 z08`T&PibdhBx~132shLdZJFi8FURsKVvWG&uU^?^JXNt&_h}Kdgg|E#vqzNfxz%U$ zCBg;9W+>~`kz>GH>e@7oZpz&v;!#>w&T%`m2Ynff%w_0}!lGiuLfQ$mUEz73x10&9Bq-reK{8}$ylcbF3&+WpGf1!mn}H(-$I zS!Y;T=sAfV${P?J%rG2qalUJaG&yI$s=|X`j!_SHl>~=(>t5v$v+L8~*jjEtEF#E% zNDxc&;yjuIxaM1&m7iC=Rcy>Fi8>F8)6f$Hb>Qu>h7N7+04iUQuR zoYK;*j8}+qwbH)>@cVoJ{by08Te{7c^Fg;OT^*ed_GuIYALGDB(jJm>2APJ^MoJOD z#q4JHZ;P7_z|GCfC;^lSH;)mp^E|yJHunWlq03cPPNuNzfTW2}1f3v(ak-34!+>6) zW^NalWDfZTH`u4a3?k7Rx)>}$kYjMUI7WFIt(@m|Y_GJuSL}L@X9ism%59#)X6Na7 zdMSkoZs_dT!kda`O5GVDy$Md|n{}vkqi`=j*G@?SN~t#0tAJOS|N87q5a4@0L2l*? zX6mX;pp^^#)0UN&_X_Oo^FJXf3`=>|l?sc}$C{85D6yp*31YyIA}KQ0K?I$h9aiz^ z&5M?rGd_*sJ!4>XM+vX6He67Lk z1NN&2x+=hG*Jq(+!1D=_t4f|tNO0f$f&udxB(3cKta7~7@ZiewkBQR_?^<&(xdJXKr>>)0a64m}Rs5n=T1dOy20RX;;mW{m7@Yl2H5C>PGX=OwG-M zedn7IeP;B+&$)v78==z|WkG&QwGAZG%|EisGbO`lD#c2O0@t$3A&>;h3I3l|&I#Gb zqKQ{=7E0#{!8r7`?NmFJ7j@QDR&pav>?vzmEqa&EE;*tBV#w6Cv1JSDACwT{767T^ z&WC^-DusJF)=>t)J8yE#834`Yw?GBfVBM!&fmur2VT)`+MCz{|4TSe^Sxi$Iwjh94 z*GQ@KE-jtym6pd8j19dok-;zLPo?!<0H(L8x&Qo`>6VdRU$_g5JN8R%FrKN=b;=fe zxH8HAta8kO7j%)_1@8E&D~An4CBqM$o|&Ej{S_4klW5cYZ_fLQj z*lJ;m)}(SY%x`82JUH54^BxbSgKz?(N9r{fh(M`%gIH4+*lIt&^IYJTQQ&NnwO@7d ze9Ef@Er10?4fta(;D}yUQSf))%}_dbmowH?qh7;pi0Wb&OGE35qIP1q7lR+HAE0Xx z6@^m4Zc+yYy&a-EBn0WRe)fk>p$p#73=jZ~oikXHS3#N_i)!DQT>2Jq3S|?^F#Zg> z{O;W;{tIw1%PyYV!kB_j8Tv1~*KxlNKdYz3ErGIAS7apsJI23p4G9pRbY#gg-bN|y z8JPKiWPs}l*KEQBmJbl66=FjUJ~N0m-SBW|3O2b^?n?MOZ!{tzZ_e%wJcH)fD^tT4 zqB@*L-hGU=xpVu8X*RHbQ`&WP7C?8uZj!YFPZI5_nyR+wso8ID_-S#okwweK8azbTp=p?%A<@42))A?7f^pOSoL(wgm2x?2w_g`7h0QR!{=dH_dPJh)y zv6r9s2#pFMSy}Zy8?b~S5-JZH_>)Gaf6)hO1gf zrj$Fnv%=H4Ujf>s>U_0gx;X)No!%^WJ8v82>Qn$+sZ z2|vYg7IdCrMPuC=!U^n@qSTlIL!)ij9X_YtoNXRS687JGo7s~ZvsWRno^{@abT+fPcW6Zo&J9O<$@3p3U*pm z=16l8LF$?OT?TR|`#2$`jGogQ45oA&bH9p|LM81ZajcW_=`<*G(Ryjs5=ewn@g8Sc zxwgHcfuJjBB+!CJLM=GB!L4Lhn92QVu)`*BVqV8QqCD`Ne4q9g^pGKr^zJL6cTBB8 z7vxPw)8y|D{SwQ6=pk`|I}NI5ILVRhq3Sw zm1~1Nf^*2@r+>kkPKtJknq)YZ>|WvXO3e&9lCbq|oHK;^{}D3)IB9`1B*9WZr|R6u z1Bike(z(;10U=2dHXYmfV?5~RPl54dH_3U;cc&Z~CDeRfJ+|J$;mK4}!B1f(uNb8o+D z*q=ut;`MvqeptwcjgfSH4>``{TTY=L zJOdLfl0-QoOXTLW?lAS00V$M$doi-3e__QmZL~Ha?ZLSxql}A#TtmL1lTnVYncTH{ zbQ-|xI9FV*0k?$W$1AIa#&sg%zf}K34(4aryFpbx9*n18O(RQ3tlsm`aC!TKV(6Op?oFo-A|6Gv{8LXc})U}!E zK(Njdf60IMu2)+g?#`UXT(YK8xQExv=NnwX-kma?8YY$9A3O23$p7#ec zw8ph*iu6QDQWyZM<&f8sP4iNX8r+L4P0Lg@6PW3Fw<<@({@UwNjtF;lRFSniEFsj3 z7Vb4udJs@tV?Ti4aHzf~2Mlxd6*NbE%LQhulO=4%$yar5je7#kT%70Z6w4jimYXSo zV)ZcRIk@JbWnOzhj7fs8zJlRBgO>=59xmxDs6Eu{)ZG`EAi>pD`A+wFxnmpqiS*(M z9~^D71xPSeFA|+Dx%4Vyk^Ea+r`D~vx>xsJ`I5k4xijx~T2ltsBkO1DjYb1S90sgA zEF$Xc#ql*>3_dL+TuZvvNwc3i&M^2_m@F^C9O`SdFRj;}wY)0WI7`#c{v!E}dgdix z+sI%jDmI+Xr~$#SiZH*9^5gfk82}SQN%x~HcS7^&=O6PWvwZ+SZ0%=iA4%S$#O6T<*)t3o1mqw;l z+{)=kHl)O;lnNAlY}|9>%x+Unj>a46NP5w;Nw#cDmN~GT+sF|mG_#UE@y{i`K5lEZ z%X!uMu_<3eN66W~PAMCBX30c>4eV_cAMX`=kGM9&_HV8RJ<-x<YP;U)}7cI|YYvP>N#T+i-cah!g&s z%g}Zyh)26v@A~1l;ucK1Eh-;C56h?fx^nx(&g#Z*c|G`hdcZ+lK$=OOR#dA^a5%e?k7s4f?nx_gfYZ=p?^tvYs; z{Jv|ZVJ(BcpuHTlc4y4b2-^`b+sc*p(!sqQ<>HMtz@Q&!PDx6YA!mOE=zOJ|L{}P) z^(eV(JsN#Aao-h2ywp*PApZKPN|l{nFFq`&P4n@-We^$nD^b98*CM>feCDU4<>e^1 z55z3w=Dj*g0sjY^FWtJL%SEafHxo_eq583^=s-n6b4q4w zbnITM&CVsL@!^EIa&qbfih{~hUoT!c{-rz8glg#GG7p$H00)5C>M7l%#3^QD}f?Ak)L2lN_jxVSnF)0H+0s@_!wJr4#~oR7!^k<;kXB>rW{2mUvQ81! z9T1;WE=OeEs?<=~=`}2&vUMTV+{CU*n)8{5J*%v%jFad#zfR7# zGf4H!5UVSf8xRQ6Ip^ysi7Vgctly_7{K*rB%V?Pw?8E*8mmy5wOS9Q(vt|~20%ur4 zjw}a07Tns}e}ARQdnP{w<1j$(7(+NvDnNW$QxU1dmoQ?Tu zS^M5uB~L{Cbz(n_1e7N?$CbQQuIKs4G7E?~k#_a5j|>E#dcw9G`o8IffBUgj@KW>1 zFMt^D4UPW>qC7I+kbuug|DLzHbuJD9*scC}`JOZT-g~HBi4f;!<`<7ZI(+kqmZ%8< zY)1lhnO>6;^(Uyf!t%DYqpXF@^2Oi8&QB?WiNgQDGn44>)tbj)e{>Gmpx1E~RbQl_ zt1CAiDR2Uy0TksUt{27tPq2+v`vg)-{OHxlw*cV9ZHg%Zh)DS>n|nhwru_Z*0q9x+ zNBbADJZw6RPt>agVjrjCr3+J|nq)2G7p6=4NAs`42mVSVuXos=KUEO>54?RD!#m7R z0daE8RIadS{Rm(=R1r=ACIY{a-n8xIsA-!=zx)QE=<@ZyFx9`nJ@zbD8k8r@{f8vV zYyaIo8enB3k5XH09B$v)k2-yAg8XkFwJwR$*uLHU<_`uO%z^qLev>SJCA9fJoBypq zSJ?G6H7b%|hhJ5|x+Ijt3yBC!Je&_anin7caD-hu!oIFs21HYG^TE$%c`B!2D)GEB z>9?i&07-wk$aM+AEJ$(F_NBhWF49B|`MeWRDg+~t|M`&w+2Mv=zRtAwReJ2l#VVb# zjntikAHxHuNMyd})-9*{VG+$EzRQK!A`<_JQ-QmQfxB-70v79!mg_MS0D0Z%YRr|wL~@rJzj8EZhsRiUgbj@A?F_ea;TQl(6X3n~KdJ*&OB1NXLPkK7vaoz3fiHVVw) zSh43ls@@&1Q+?)W;k}Ih-xc9U$PM>nPq?k}^Zq|RMDGa`a0SrP6G-tN!!YOR9nGIw z`S2ss`JFgN3XA)i{KIWj+NdmzyWt9#{BD?E{B*@%qQ{9VTDb1N}l z!gqN@=taFP=d^nbsT_@x{*)&a@Fwd^1f(sB|A9Q0ah%tjcJ|1b-6}2{%!}$BrqSUT z-7oBqQZ(G}xY%|b0XFU{!|OsXKVaXVxKv1SmKy~p7Xp*t@yfGbJ2OF{%3J=6?8??_ zCwZauWF_9{PAh?k@PbVjU=hrA_~+ZwD&Fa`;lHFW3IP_KEPtU@9=I*8=*UCn9qkR^ zmSE_Y^4ruj1ay~omU~$|mwVcdTiQtAbfkD$x(s!1Kvu_Y<6zq1wER&f8*p#gr!3Q> zxGEXy7x#}@wOz?f{(7<kG@+@aXI z_Bols{xq$$7wrc7F;jW^ByhvkN0DE^C_C=Uy`D%D^M}wf3j{ZU~O}3wmVL1-iA+3RHHXul1hFwfL!^x?3`N>oH~ve!dwDoiM?(i zrRxLM;n?}hqt0;8z{VDna-E0kaabeW+Z8FC*SEof7h1Y zc`zT)awu)a(NxqM*BWqTD~D1dfa~SsNPDneV!G7x?M0oyp+jH69X$d}#$j{d!PB}v zOoYdi6&%C1&qou-;vPutZ`(4GU0j*QW&b1Bq^2knlYOg>%I`efOZnM5YJz*9NX&Z! z=m+)gqt)9WiWHBe&3ZB3-;>?KWQ+#vj26+XffzI5mx8^balaQtp`n2f+kk-t1R0Zu z6WSd6RN1Kh^k)PL5ZO<$YYzZL$N`S9kyH~Up0B=Tx>AaRMdj7No|A~*jqml2kW5db z`5z{L?XR~BNIw^=z*tAi&W$a5sF-ME&T01HLeTNP*+5nOp`-UqrQc!a@j<6ZlX=?y z$b5p~3J9pXz1ebG3z^Y1k7{O#2W?tLbRuv(9wRBxgh0`hAdfW~|obaOFsP zZhikw#c=ioH4#ifla=J>8E__yt(?a2DD;F0efjDKWh%-F`=$d?|9Ctw7@A+IvD88g zH&kJ0nc~IgiU2ju(`ES1I3z|!T6X8Ur6L}@UqM<^*>R7r0+X+mxSw;>UsRoVdIJx> zGV)V|r!pi2YM-vbuTBQ{B72ZUN%7$42E8Sw44n8VYuM^exW@BPfy;c@g)7m_{!D6 zP~~1z2OxKxR#$nN5rZa?V&rf!(?)5tH4jU#`4n%t&9B$fcG2+L^0mh`fw(*~*8@xG zt9cD3TeY1{~rBfwT&l-=sp+j#c2zuFl@5%-Bl3O?Y7>*(QR zGI;-;4)uExP#&cuA9c{qSHB61A_Yy;XQVjS)#a&~cm`fy7+VO#Gab+q9X1DS`RGu5 z`{fGm=9&rwj(dsl4*;ygUM3+}k}mJ2-^Cn*RmvKEdE_8q!Ax}U3VWa&Rx)5o8PpG~ zLSa*WwPz#1i&vW}09^$5hQoQ#>aA~@H+p880{>9spLXiCi;&7LAcrShi}bVO2E;K6mJ_n6eP)jj!MZ6h)Qi;Te_F*GHM-{SawVSHXgM04OmTDE8|~`+u97 ztYWD+D-YD7`s`tPr|`>*o^Zc71zw9GqEIOO6rs!z45hDF4=AE9Tt*cY7M_Lon>H4; zP*>E1_|m&ip@(uPOoo6C#Iz&YKyFA#bY|+(&6itxWlagH0{t)_O28X)ucK=L*OAHe zKTAEXJ8{aAs^C?GPm6ZnM-Hm?VFL^An;(lh-3YuFLceAj9<*xY}=O;rRK;o1awRA)9z=LgTxYbxCo8U!Tr z2PO7lt6N{Mli;5Pt!pH0$5#sc)JL)gsL=`+SijS1860~NZHPfLys4fpU!j01N`uI< z+8eI<8T<>fVv7#&#xz$H^j&9MArc|2YVRe=o*D|czH^2lcWwX}2h!^y1K2j*CzS~{ z(0g;hjF8clFqeQARx&w!!QacdNvlPt;oxEFHb=dBIvtn@rdD3X_Z8rUpkK)axQogK z@H|od*GreN^~VnfII!OfmUz{$tUxZ$8=#I<;9p*2VmiQTxd9ky} zOp50s2tlE=LK$EeC@;G{_bgv{a3r4e1urOo<;A3N=@^?DmfL_)p8?h7~sm&*DP-kTvHJ}q>Q;TI(X4(({uG%;pZflub1%S`%_-N z%NcfsX;sj)UQ@WC@Ye8?gl?O`XUi%tT~VS__(U_feo1W7BWQcygObZ-TKMy z4#*~~i`%0H@FiXHEBfDWh$*vN0@ z#tUkzO~#=^q4civC3F1%$f~o_H8BIqrY6FlC7n6_7SG+wnXo?Sd3e^s4J!p^0bC)W z&xkY`OJ$aVEV1K0fQkbofK|JT=83SE`$y=&Db;R# zrU4VYeks3OBZZG$Ba#d{1*^7eo@;u6@#E$HuSEHR7qcSD4@S|P{yoRY&@#X9AP``2 zE~q`$C(!qA6Gf<=m<>m$jxaUG;Y-TrBr{waEbKlFfTgC(!t0?yAwlZ@oCAr-pa{bc ze*a4!U^vDvWPvm%?&JgNXVYvzk1M54%aGKV9CU1Mh(ss?-xGu=pJ@EINr+8h?sgQC zfdW@;7FgY#S@2r>66YV2XfLe8vc@q$n7`-!-zcWwN}9z!i`@C8eA-5Y zLEjbIRnI>2(@2y4C7TPW#aF6(41u{t>7tfsy{e(@qwKpw(iSrW7zdFR;$^LO92Oe5 zQVxi7ed{_`jP;K3%7dp@Clj zoVhr!Y3%%oZ#SU;xOrPoEHpX1?tNv&2*bqgl*2j04;|3<5%&b4_VcnGcE<^UyS6>M zANk#fJKW=MxuETW>a(a?HeQX^)g=d#W^(apoI)cYT&7+VBac7I$roGwF>s7D%0|nq zq9f1U>Rv~8cWMB`h<85z@XZ9rGWwY-y>_Se(k|7O`Miw1=RCCK@4fG5C`B!0h=1@{ zHh@O|(`btdBqX#_M$2lT=$B!Y14`uF$V(NGk z6x4)B=@ls~FxhUzqQu%lR@zeT+12Zr%uXDSPaIvCn|9^a^<_kV$xx!wQq@D5v$?jeb(a9!k6p^qT z803Yc&v3mPGhsOJ^Q49QwF8bR^qFH2W$S2$Kpe4{X2>-@XWf6|l;;>CrAtIdg)ezS z^$zboD~oKj!~7y3v>X`n!J|vYe1h9#%>JgdiS>?i#*2Et zd}+u9RvS-*RXBF4@|Y>=5FVq6A1i9%uYa(K+?KD8%pKYqD7*6fEm2cx!GN&tR}<5j z?u>|!l#Y}g77Wu%R!Vk^#s5U+l?bSGu`N~cnoktrY~#;~azBD;Dhcw>B08kCLVQWK zgUKHyu7yeK5vVZ{@+7}EKDU%kn1?g+@_96PucLOhviL#Nw!6#xPYeCiY5fU_oUIA> z2ZU3Bwn}PgGHEa&wS~IhX)UW-%!t}25*|lO{6y^6x{AX>@-N!G2>3T%#6vZD%JY$k z_I~G|Q7_$-OJ;)^^i6BSojC4mjPa9wteX=LO{h3iaYgLR@q>1@302F(PU(Fs>vjhA z3Buor2MjD2^sGpgj0yq%IsUxK`+7K+nUr(~;`B#fWTaA~jNQTyse~=l7p8eGGv_ir z&cb2($jrOiVcN=90{6k44fzUoC8TDFRkHpWW!6=?O|7=feSUbG%$cLdW|p@;w~76?e3)4$y+OZTf6z_+@`&T z;Hu?8dhi{r`sxtH9n|SWipwRnV-x0zq155-Hwg_+QaCQ}P;QCF`_OuN$!t(pS&vlB z2{0;sgV9~KMf!%zkldMFV=0fUKVK1Q8O%WMj#wps28kj|cP*M&Kim~ao_V%mhi*NH z{>EQ>FWJBaQF=-rU>57%0^cXq70p&V3kKILPVcC?)WmD<&gvK0)&_jYpJ5y|b2S@v zQY?>{2#k8bK9xfeRX$MWr6K(B;>c;?{I6t8(V9NOBu}>F>ko1XjeJFLO}+SR4tS-9 zWbG=Jw!~9aWj|27rBbWv^MSPc{-`BEzFA0^597SblFOxy`)0nCKe&6@@?E!N86TZ=Utu zEhaZ!Ea=kpVf71Vyxn{2O~Q-Aif{2H=~Bh&UcEBJ>*X~{|Kyf$v~bQYk@$i_G3S!y z@}#+e?wQraf!$25P+hx9-`U_w@)x6L!jHY4_dXE$xM3`^J=x_O&3O6fq7Z-o@eP!H zIu2RsKr!e$Pe#}tm3hw0xN#{T2t%ys(=`oCQGbw@)FRqSBf;6C0?c`oaS9|@& zl^$JupUS0Vqf#Hyb8}kS28H(=YuWutUr*d%_e~9weGQ^Uj|;gcErnHh&W_Ps{)2Kt z?wOO6$h=C_qLRfI@6~GP#`7$m=vetDOdgYkZ2z(P;j-D?Vf;rWebXuwSZVc;f#JK!O>$^!LXR?&l~7YrZ$jzM5b z5HrTfL&S?4|H2g)03>PL2IF0$|DL0|V&GS`bmtZhOMXOjy(6Uo)fd33zo6*gGwG+I zRmy*GeMSGhnREE3|2>BUREdlH6oJn#eE|Hn4e4UJ$#wF9-;hnp@7FCA{~+RpyZ<2O zU;6xmm;wHZYeXEr{1^D2Y~l~_*ZCD5L4^}}?!T{aK*{=-sD6R+e?^7U2N8zXS%;K? z!1JUKI{~F&$`e)9dKO%;^t-t5rTKrV(|{;-ewCho$}D&L;@TFQGx$UYVD90h!Z|5B z|9H@Usc;~my_Ohrav#P&v3U}GDMj@>(F_=A1d;#+=U-O$+pPYzI-Fs~HX`a;{=0%x z>BO(=ptY{kPtLWSa7q(^d=1<#Tz)f>qu<#miBQ`e?y5xhA$3)v7-tUVFxiOD-xl zkVzDKa=jH^+P;n9VefidbN0hV(?4WZzN)SuwsQiyagijLxMZ_8l52|D2gr1!JHRX25^3NRqv)Dt$o~OT?j+y< literal 0 HcmV?d00001 diff --git a/docs/vo/conesearch/images/client_predict_search_n.png b/docs/vo/conesearch/images/client_predict_search_n.png new file mode 100644 index 0000000000000000000000000000000000000000..e24e98d0356fd138cf714c95edd2d831f8672a30 GIT binary patch literal 35668 zcmeFZXH-_(wk7%*FrdgbfTDm~1(m2GK~Tb0ktBlTj3_}eN|KCjBO)jgB#003WZjW{C7=^XtXhf z@+U>={9j79LPk3sw3KF7$|iqqEoUEgH;wx0b=ab>&GtZDIhO#tLh@B(?m)-nD1VO^ zdplJl z${PI9ld%c6N1=q!-=@MJ?hOZ1$X~5A{_ltXTNaF3{EB*x)$7lO=}SvXqgi7exr%yv zdQ;QW&p%wf?9D7}^Xtb~tulw{DgnA(yI77LJEokdq5S-aoXUj@_qB@~qUGXsi(T0j zqdc8mT$wKa7$Mj(UCWLEDUsoSbTVmbu@H7cs5rMy9jFP3)>kA@52{GYu-~ zgNuR%ESs-+3rA7Yv0ul(1@W2s^W*Ze%-e@Q`EgK9-zWR)-ouLHTTAQ2JDya(JYCt@ zsU0cpSHE$;bJ&*M+-ljvlka!76*#23E({%^JEo)MV>9~YL{rS=bB83nl+4T$SlIaZ zlz(=+6)((=L}zz7R_3k7Cl|#{_LiB}gbK%L=V!4PcNI7|uFMb2HmT~y8aG7wM11~y zu_;+s_rv4esyA<@*{&Xz4H~SElv%)%=<^#Hl_nQ0NYjcgrC=SS|N46a-DvPSTxiaz zfKa_h-rg5ty^qO?qTouE~I_s!9 zV(}c~F1-|T2vzY53SyU<`uQ`v{rkkkvC`u+Bgw^aTDdo+Wo7-MqoWH73&$2TIO#{! ze0=@={RdvOF#j|xE4KSQ@>1AI<(2C~?AmqftP?lmtE(rlnnE}G9x}VkT=wJEZma9h z;SAWyx!?K6?dBxy_=%#$>bl5|)RALW%`%>)kg0h zZVwW)jtZ|7WB#>rO0-C#+I!;JbgxriN73qv)9iR>`$FqB`;Yx=E3wIK7iNrQUx{er z{AIUWq^dP$nyPZ?7HM^Lb+u)h257(P%(suf@MQ1Qj~`EG285n#=i9|fJwM#+dPFW% z$;2dXxH-|(e9N|N)z%lKnu`Q=?Z?_8fBdk@FKY2}C=97p%C|G;Gif|p<7$1^{>7MG)=%H58YBPLqeqXVrKQtEcI@0aI~eZ$sj^Z= zN=hn-$H1ebqoXm`+9**i^=$F-T(IrxwK6ZZ6DKa?^0Kq;J$O)wx8AsMgTKt<@#E(e z`U)*c+KJ|!dA1EJ%Zr)j?fj;i2s7V$d$9>m`W@5N@li^>D$ZwIe|~au^3$hJX>+od zE;Zj?x6#at7SHWZdv1Z!Z$m?|I2-Mm$8-wRu)9r~;`}nX4j;boWS@Xa^re@)cB5+A zc{Wnm?Mkg_2D3klS3BEkuBKjJ`ZXDJT<^}(@^Vv>_G$C(BJC3=PV7B?#r@8mI|(j` zPszE~1MCM6o=r_nB>|6|%fC0$nv~wB;pZ!yYcn-zNqTwk;6dJ7Kd!tKanZo7D&stK zvig#Z{>3_%!N1_e#l!jcMl(&C6ciMEJm@&05YO1z*`@hRn;ZSPG)J4C7R=PkkbNNX zt7o0?!eDsNY5Sy)PxeLQx7%i>fBdM$U+DJkjngZ=A1f2UEo3|Fi@S}+hudbba+xCY zeLG*Cx^Wh_ShwX^0SR*^y&s>jt0btsE!d6=5j*hh=1EQm^@3Ze^v86}TDoE`A99&% zHOl7wn4(w8YxX52c#cCeE1`4fn#1NTTNKsQzHrUnGkxUgX=AfFXHAjlzwgVXU(BzM z9%pFjKFGW++j`*Ay}Nghva;U0XM-Q%_NcL$53dc`K|o5_4>sQDyL8GMX_*`EZQ0GJ zy}{#@sHiC7XK9@QQvBJ6CEwm{V?Gmu&@7a_Pp0qKzdsq< z`gG3TUCM&?lLk?E*t^!Rl{p}feM)~5%Nr(twN4m6^e5U0^j6OF)`WE%w{^_ks@_3=@$mC-8GNDDrnOtBX-bc0SbaS^GHtrLXz>R@ zF^msN?wq}bOeEwu6GNiDyE}zTJ?%V#hOLb{-jK6pJ$*;5537t#U->hwY$m~*=Vkmk zbF-;BOYTr@VUfZQDJAO^+I;`akf@Q#-clsGx|FVS>njyWD>%mNIt4jr&z|LVUAiSj zHVYE87t`q|BpfxllP80#JUl#nSWKO&r$;i48!i~8k9mJV2oF$%uHNsG4~Y4&0p6~gJLA8v|Q zj#uHrhWxzu;P;wWmM!*WUQBVi#YNvg2Zm$Wj1j2z3faU+2XH42HAKt!#3{wT863Rs z$EmKsD&yZ6A@w}L+oChC)zdy)@ncfAe^5|dfR3Y7U|^t!r)Oi%jow|`x4#y4nu}44 zzSLX`kmqhT?!`^1<~n{{{xLn5!s^O$=JmRRcuE?j($&^RDYquC8tCiC0B86_V5#Gg z4BIk{pKseQVy1n{t^X69`1$jLOY_r?tII#>wr|%vSskkw%_2p6=7;;6@bFB0Jg~}8 zHssU^-Qk&^KUx%r`d&(WYbE2E+L1h2yWdX zv-ITvUDq4(IIupFdTx=`yZ&ggD2- z)}rp^bU;v0-TTbc!?d)t#w>G9+p#ufR%zd-t!B*v-Ol0OA`6m3LqpqQX(&BQ{N`;D zHN^|kBdnQ=;bWVn&z4rx!yryAk8rL21X-jzKUlG>AB2|kNpQ;Zq} zv|1RBPsU!p7Hkf9+AOi(#iqq}4TU;6xp+lmWqh2vvb@%err}UqL|$9`5mqiyyNgCO zp~}cNOxlN^CU4@*dEG((WgzR1?#nmFGpB0L(Z*s|=gz5Tn)o#*>n8V>dB;0UfA@*N z$s7X0DjNEQ4TX&N$u{`I)n~rKi#CPbZmZf(KgTmoMFEsQ1st>7t-^xbIvF)fp?G9r zhmSZLMjaj&m#s5hcyJ*;X>M3TD)fU#MNLgj9qZb8FGK=hTI;0OEJrfTx(Zrf-!^!A zhl(fgVvvAkD6`|>Ypq3JZfy-@I}H>FAHDY{mz0QcX=YtSepdWB-f6xNdd5KkE14}q zyJ=Hxk;@{$`vH6V_Wip_bWhUO)XXxg!#0mV3Rcg4!gN|?_c5J}d=r^_)O0f+cI#fp zN#~D#w>%)aT8FSR@yi8aoo4&??R)2zPRLfgh`a1jcT~2;P-@l6xTv#Gf`JDW#h zMTb&)?kDN)zL|*Yd2rt^B7%kK%q@99`^u9ElJzHR&V+yYqQ<3>Az4>fr%~jb+m>ya zO!6!*uM#kTzuumbN=V?G9cypdtG2SxQq0c5Ap?}yg80}!F_B!h-%Sf~R~ePvsG*wU z%;4*vT~2c383PX>cp-f6Ow=vbLCmkTrP{!Q~a{~o%hA(C^`1^4Xm`ZkBSX) zH@HfCr!^!szt-L8=x`3oxbXLF1RDKo2H;ZP6M+b~m4-THO7-7_G3a6Gx^-@$y*r|} zj7fWQ&zavm!KEwTKaZyH*d_29c!bBV3E4oQ>Yf=To&h$7k$p0Iny^X z690hNQF04CXUb4ZvaTqSQN9G~+0l;$jP$F5wDx--+Pl}Gqz zhqaK)LWYj(yn(5%rKKh7;lpjC6JulM0^gq=IJXPX3n4EC^^3HmWGxbz?bmlyI0w~j z6Q2W5nzW`xu3ft}13A{g(Qz2Htul)E43|YaQZ}~z?6mJzekJ0X@6~reay7c33 za;zqbS9MUDQvImjC4Bg~6wYa`g=uZrZJ^Yc<;9=z`V~I8<69^tg-@Ry$j$GI<}R7k znv#e{FeKPTRx{sjY^Fg@)OclSp8xiTsw!F8Al}@sw-60MK|JO)Z{uA?-U(m<(3Xyk zjU|-`){W)yhl5g%%d;)0G2{CB^p)e3qhD1&$%#b{h)uJewh(DzkDVlG*Y@-fE>D zEc5g8vABMXbi*gecq~#C6%{1&cD?{oQo>Wt1WssoyPad6o2XYIsC|*ZuB*cL5(toG z-${{|ES&Y{wTWp=P@iMx~Xmh=N z`?h%uFtY{Wltl^=H+@JoLG1(;6_t8+Z&%LTOfrBVAeA(7{YygE{LZ4Y92Cm@*H^2) z_qSFQo#^vq=rE$nQRHS)+d#{lFh4yY^GRgZ6o_swzu600CrQbNB(ylr4p->&u(7d8 zAvm?=+z8LI?B>?h)s<2}`3hV+_VMTJtRFD0f}$d;RPo9}{MOR)Ov_7}_~$s)WQ)!t z*+uk^?|tZ+e}8=IhxhNlRD~(WDv0A;R@uJ7RWgYZ zyIYI04<0`JF)0z~&|w2=3+npfm02S-hhCa@(w9?lM3HkpM}e5L-WC^Sbz`VRAy$EP zM8gO9f#BLe9&vJwE8$d-fhRxQvFKa%c29SO2CJsYRS^0u*(2>J;$VAFb6n>YExoIp;rq z^UWp-B|tv)jZIZ29i?v_n5lCQwqcJ(;tZdhUtHt>r^um_P>Zl0j!U@=_J|al8@BEV zd-!k@DHHGOT?arX5luPULR)xoG`)H9ro~wXCMGTc0RiEe+H)Gku7&iRYGEjd+7b>i zfN)(wV2lSf5p>2O4cTeezJ2i=s!3`s)sn3N)yrj~E3ZXd7OvNZ{pC0u8*THW=dULW zCohb(XR9vGjZO4x-XF=y&6UcV>`y|v>$%0vpoSuKX0F|GBr&_Ies$%isNd1cXKD8G z$~!w3gt{&mODR0s!P?i`8;(mDMvMi%q~5x<5mW<+j$^HzW3C`Z?-u@1vTj=vVIdrDRFu>gySdFybg$lb;iP}8lBi)@ zc`Xn8osf_QQjoxyW~PZU9_Tm!k1n|RnMOsq&YMg*0`K0vBXN_gFL<+HP>{&Xqifmm zrHt|NokPqy%2=QlPll5$QYQ^R*WxY{Yt~S50#{dB>gXuqz$Dxp92_H20leF`ZdFoI z@u%m~(9bppF#u@#3dL0dARCTuOc)782OrZ*HN{D}XzZ@+zL)ibF5)3};(Jbi5k2T@4egVhSu9KQ#S?&GNmJQO&;o~q;IQVyO~ zhnSX@of^X_`JmGQDQi1Sm&);C-vL(f;cnf%$HjsKyi?Ymx+Bx|0Qjj#=X#^$!jE%9 zlVzJZ+{6DMm~H9`k5p%#U>&_X;ugHsH_Y-PVL~XLSn%)_whv?OBlQl9(06I0L?Pv z?Y30AZYvJTNm{Lc`L&X*DJJF~UWwHJHjF48{`C! zcjU%tMRPuj4z;49BGjiI zw#B3bgiH`eY8C|fN&ppnsunmgVB|)UR_+^B9R~t#o(BX3#Hl3y=3$WhKV_~yX}&uO z#sCpi^~#m7f`S62G<_-1E@#sXt9)2M3sfwXSvH}x3Re6Ih-&t60dznU2z?Ez%e53mZt3v!2D8?c6KmjT5}_QPYDii%b}fV8?MN)FxUlwBtt z(C%*rnKE0ljy49tUtmlWfb9ut7#9{<{|vAx$rpnF^1Y1TzPm@IGttSjweV}BxW$&f9);0qDI&ihz{{sfDeWpM?!C% zMW#>zV&OCUay&9R+O)!lbpc_^GX5iZJH9m!U>r9AbaRaSI|QYmlSW?1=}AcO?AR_~ zgAuuuid~lz7PE~TJ}z{Ft*QqPq>QB_^aRi@1c;#{SUqE)UFl1gvIJSjDvuQ_p~{3v z(^%+auZ*k5Bu76OxkPQ+pm@kMMOmG@$vNa^>rRH+H?N~5=^+! z&++>#BZo4SIRGVg3Urj!l$HGo*5A`|#!%`$AQEUW=hAD21_lhE;Xf8QOnc&nzT^Nu z$&M5-9e!Scqh&Xy>pCB)#~)0CywE#X7k*!F&jlNs3_YGePe3#Vg3CKafQ|S-d!7KT z6+GwZxiwrf+v0w!8Gq6I_kE^|`4dH^fDYk+u@^v)WeNj0#3;x6lkWf&2~NFf@&yuE z<`~L?3yAq!ku|T4G$-2GBC6Mq^XZCOcyfD>7 zSiB74vDA!zA98T!kf>2LqC=z!i6q<-tRRTthD+ zXj#Tqr(|Xa+nOxs50(L!+I;=l-L#lD*Yzn>Px8{G>giixm=vC0c(RMI=Fpjt{Nl93 z@4I5dI6I2BZY`C+c+nH$TLca<00E-aGYJ-{Ll-<#G>dt_<&#P#iVMjYnggkKHfY&z zcf~pqHV@}_s6MiJ4oP0dN5^g1*5TGIwRSH0$5j_US8;+5-p#F5i#lxmDj9WOfYEDuC z2UcKx2nha(!qpr|wCj4s?4a$C+MR0i~c_OStIXHr53W54o^NhG(22D`c} z3=Rl8Wn4}@Q@a~N$!{-!LX6(^=m-%|O_~GMZRu{sw!#-&%hgNqM4@3dLd+GO{(RDL zVJLcR0Rg!q$s8B}6cqRXRcJ6Q`D5zp>ON^Lfqh?ct)u^qJjh?kwq{qpG>D?1lxyO7 z08uf0W?GG2^*bg903Q@LbHw5d%7ZH?&?_YLD~O}(gdU+!7;-}A_3;I{UO=pK6((3%{wNhqWW6Ia6#kE!z z~roZ)~PR|0R1C>W_4%iy6{*;NL}JbPBJC(AjZp_wi~TAO)m5|eTDoq2!6kL07bV?L|wQ9hp=#GTd2Q()aIRs zJEl^xqvLn5N)J8}=&D7zXPmvn{ zPHYZHsxm}v7FqdqEw~7@O&uhH2^)dURPOsKHgm{{uyLusb5U`BcZZKt{5+}Omr7|Z z7ezSjO`xqqmaW}q{r&o<{>mvUd)4HwLcYZ+fi0si?61KmA$03U7?J#b{knDU{{1+R zArQZtvb$a50W(KXv1jVo626)gPb$eeiO3^ykOD~p6IuFkj>w~s+6p3jH>_Vz!qaeT zdNjDgNRXT{5N3!j4U@wxZm!*v$U}qyL;|GVwhe$v1tGgB%|M3S;J>LcFn&ZpREWdA z#!+hgTKa$(D^Q`db4Iu!^%A+1^{`EUqBSUJR3jMsVX15`RAn*{5z!dl4 zIPlWmX7ZajetG2W#{G@HY>Gq~2zK#5rke!Hm*icbsXPe#jR5+xFOL7Eqob3pg|dn% zA^r3|0ShHiIPj^EYUv51V13j?f_ggOV#bZ$3&av}U+<`pP+J~%B(ebEcnv?lc>VtU zd6$_wiJR8e4eMyngkcT8XbUES69&YIC#2bkF-i6fP=o!RJ`Drzm}&01La_>?75ywi zHTvkt3-vYBhn~NELLX9%Rj1pr`thFyPGF2rrmH)!tYMZ-8+#20f~y!&*Ek&e%XE z-9?2!+AOrRwDSuK!>=DO*Fz&6UBJWqS7s!a|1)MJOQh3#FQTJo-tW9iM`^{igowK7 zpnj~I8EWwJ^^E}1iPb4|bR5qe;I$oAGA$;21G4_7^77iL{z@WJGM%y_l1L*0v-!u6 z|2B`(&kbuRJ@4AG1HScc=Af)x*>w6U?m#stMA277}7+URhp#oZ0!uU*Rw%@h=KkbbN^c z1@kO0(Aazt1q}^yO~GulrbCez_U$>JXFfhR_a!r9?W*rW<1b}3Qk%M(s+O?&5`;{q zX-k}hH#5<$GYkJ0KBQa!vkwVLB{s7PG%k3OW*8Q1zP;ZpRZnzUrqgyYkZE~>SDl05 z<4OV&Xk*-u9~T?f5=z-~|21GLq|Qs0#mia525~w3f|L)z;Mzsb>V%(cKL(abbmdq0 zY(@f@RJHbGPi?P&MF^DU7+7-z^D=^gNd(4DUVXm_{QRdVu74y+DrJ<+Ki}us*2i3~zfYrZ zkJ>9~Lyi-Op9WC1W^D*617RlAy92gy;$uMbek@Y2U%%!Ft|GK|sMEM0)%x}Db#2ep z(a;FOQ@5>>K{Z<6&|v#d{n|Qf-jS0!>mVj37JBWwS@E2yj*E3xcR6r8NT%@MxpkBh zzXZ)Awqxw~9>`?>82Q6$bMEqG2PjU|n>XK(7ssowcGwj@efo!ifx!^6ElSNGuq4K{wke2{9lY1w*8Sd66P(>&YC zZ5%)fZcFSN+|SU(mEcb4!S%!<1>Ll_SH(|fLNi`Q0oN=7sY>zc)i-(e6Bg!MXlYUE z27LSW&Ftgj|9wl5*yX2Bp9-hf7YXZ8l_XkMD89SDjfqIZvFhoD%~YfRFM6)dF;5wF z1fFODYFgvbFR6s%hfWZKYagC)Tg|sU^Z3IXVhMbAcQXubs$e&XPy%lHdWriQYHI4V zIf(eh->YB7gWrSJ7YAB{XtBf^L$iu%T1>!&GdHXjpfe}>|th`--{02(c z@x_@2(b7*F_y3_n>DaKbd*gSn0<6&myY>dUEKKiE~6e`B!rwN@FtbXmWPv4)0*O1TFqu=sR>vLzFTdKj!B6K{R_pIb|@a4$1vG(tG$E#r<91bPY{uL&I?)dw$Q# z048l3tBD_h+-}4s%*YgUg~RB!inKKKp1NkRk@$r zK^o>!D<)UhdS5oLq_ytx>`LU_C=9v(g*8~L=+}fBsjA;SB--J48&1ybaD5P;SJHYh zz273Ay+P@QSZkwLThmzW)A*gRl;4Ea>8u;q?9||?(a2Spn!0HtPNCjM9LhO|1oa|q zP2W+w-}(LVZ1aYw(Ukf)zYG&PJu0!czdzi5bJ({r5*$%U;n#6udv-TvQ@!N4>Jn?N zNj@n-)Wz(?YZ4Nw*y%mK$4S7s^sfxj!E2nmo8x+@4@#l3urAOs%ee<{ES(5tM(0EKE_Wb6GmKLx#?!TSd z@+1Jk3|hvfM=t(f`m{YBCKvgLtbBoXTQWu#Mt;5Xtl!LFc>U)XWWd`=Ml_1`0uZ$_UXsEIK{fU3_X{N`1XMb(4vExT;#pR7G0y7gvR7!iJFjU zdV+|EyX&m3+e_i}9gLqpQ&UR~1$EGb(o;<+)1!9}Uwq1M_9vFW z<@e7YcrHaAF&)L~=Wj9iU*C;ZqZbogNho`4c?E)u7a83z&_)$+`j zYu-bTj}`s5NxXPI8qyw(NiS!Xf7bU`A_w3~^24{}?1Zpf8+g))*m;D1_53XaotFSt za563^-3hBYH#;|{f)H7?f-`G(Z5sm*@lTTUK*H7CyX$H8i)az10_LoQWmF*Hs4Cgv zYJI%>m_L%6j?1hvahbJeTdD%MOsrKjwx2^YCtq@kZS;0$X5)J{8d z!fArda$T7>3brdfZdUZ&5G4>&Jm3od*#9Ki#vlna^TX$2JJF?W+MIA1^c4#mEK4xh zX6%C}D3pL8l_V{P$I0|?xSh5)e6>L(US;mHbcK&s(k2RZD7H}tL7b#mg-~?8!F8yf z)sI8<()jo|@eY7?jM8;mZi<41+({|ROg;b4U5t#JQf>M6$+ih7Y6v*BE2G`EEz6>Y zLD5IM8>K4S(WB*gOcZy`RBNL-<12h7INrOsxO4Bb3g8n>SfLq!I?Z+zX2j|NCPxke zJpSYHo$_W3mt7oL~>^{ z)i{MfJaradr-(hfobaOY&Anaw<5XYBGQ;n91yo=ZDV2^I2X7-bX3@IUE8KYx_c z#>2r1Kf-hPUSd#Ec8@4nRzBQ*NC`woa8`M_r1*skLNNy^?&0e<%9yqgLBT;i%PbH( z)3Ct~2JDT>ia9yXS3r8!!IRDh3Ymt1r!Q*xw9Vk3NVG6wwYAaz@kbc3c_&SPptFTj z@E<6|08&U=MkakExWT_cV89f(s590O9=_%=0Uqn`7fE#%wT$4_qxL@non$@yV5-PR zFD9qTMeTn?Or*Iws9R=j{}ll~q(UE?F_!i&zvIHkU|iWm~PL+PD;tFzP{u*_k26* z%=PfG`N*Y^r+5)!201q{oK}om{G9$? zr3i6n_It49FnApzni$rECjRak;XqqLmPKbAN`_d=?jjU*#C;0tUEeyuNeX_n0GQW_ zJerep3fm?I?+FsJHF;Zb0=4*Dr@g}YyEAV$DsZdTZgf9bq2Dp+$JZkJSoHSxa#s`Ll=efEGozv|j|xJ{C7O3DxzK>PNpnsuagU!+cR~jeGEo zipss~C_Td+oskvKnY5`Wr$m3dt^T#YYFF6KBP^v>$bCI|zjNqQm&J$;G!X(_RyM5v z{+c^ptCf#+b{I-_v$f&-x0n0U@v;PS#Ws$80@9H(r;)a;v`Idl0ttqzOfx`{`gA|} z*1qQx#Wf-{p16o<@aZf>YQ1G1A+0!$xp+`jtA<$4>pHa|9I9(xO54mFY-yiRdPhJU8&s}u|0){^KlQ)G? z$VT#&^VB1R_=$eKHI%0;;uoaz?Y66+4BVx5#bNTpZa(k}Bh#>zpC_#hC<>IqFVLWt zjt<*KLlFxtC}7maqp-ufiJ=wtPXiYbBm#5@kPa+@nhsxlVO4g`4SfeNg)1S`_M<*T z*e+vsx~S_sc~83=Wj#A5=S5dn*EBUwVn2e=6*7`zSu`JohK#=c{z&9&lc~N6oZOTV zd(=X+BQ0`32G5YOG%~Kg2IJ8$^c~TFbTJ{Pk#Y*I`Fdcb@n4zbX^2bRrt&9NTNpQd zBH-;9gwL)Hfubo^@kk;32}H(Z@-BpRn>Z7ILz?k*Gu5XjNTU{zl`=q8W;e9VrYPBc z@Tgy(pX$d?KikH5Dh!Eg7!nu%HGm#vs7dljuBy!HzaS)-hjz(im^vo+W+p zVk6#5OhyC%eEs^hEzRJuY{M<(W zU*nFhUHaeVj*9++I||PxHQOam(jQV{s78#gM1b{~0U{)jU}~ZF{Z~AS@~f#GlV1!V z@t97}%qW5HngL-g0mxef|pxFOp6^LNqi@5s4iKk%1rr1ZTC zmxw&_%D4KZHDaRslKhq1{YZgLaC#7>f#4bx0tvS5u;;wSR{1_MS^Y{h8Kl)9_-S93 zWtbH6oafV!OpICt{*#Cb*+N;rabx=ohi*{;X`=w`rO^)W-SdGLJ_#3Mz^!G4=UMod%H%(zQK{&v>4{!qQ47$ zT@=fu{TMa2QT}{|qYThA41R)ij%1I$T3s?KVkT6EMxIS9X$`^2_&)N5>VrN{XGYEc z0OaVE)G0kaUi)i8)lYbi2A>uA;f~-h-v}SpMG#r(#;412W9Q$LtbGgtg4`Id@zrc| zJQczW!ooPQa0==>(r+AT`aE#%zFIpJ8kpH0Y8``L0GL0|>oDpSbOey~wtZQZ=>odn z?4*UxH65ppU?x$c#V4Ob1QkI<*q7)42Scxh4r+?{G%$s+;pqZobIQ4zaLgk4lOjihrE1G zu_$y05F=+*h+spW2oe3KW@a*mo`;4eK|PB>DP!*(!f?_M+K2`s6G3(86*12e0LoOD z+j4W{{DN6`}I~zjtd>T z^i z*WRZHeUljMfjfyp*3sDs%0&}hm7Nwe6*9*PSy0`P>w|k<6# zEjyAl2rtPp;+Dyml-GoPM#%rqCOkA8T_(;zz;*ak5@5;H1{BTjZ6TeCyXX!dDyH|< zibj%hgmn%Lmt%MK6v_w4>)l(%&Git1(=X=(PD*T1Q*?NnKVE32*f z(>ZZ*%d5Q;>)5~TIT+QV$y}Lwp3AyKnz1g8lUJ6J+e6(X$X~Hk_QU4`BSO4tCYyBp z7*8Z}@`&fN2c8qu*FDK9v;A@zi-q*RdjWXs&Qmo{(7J}4xqbiUwgF1=K=0dgYEEN? zq7P=ps)I9Ix`3q{;C#K=iB894wu1-XsYh?1>^d{qym_Ua@%%v4w_|)Y@qxpQ0BeLpz`fo#OZM>TX70S)k@=v zG0fxW^|J2Ff?pQ;;OUV@G`n%1PS7fNF)J?#q`gb`Qa%fs8}&$_2RY3LX9olx*)s3_ zEu_yoCgv!YR!;5jo4Wewk-A>Lw`iK7%6LIT6Wtn$R)z6xq6#idmhI4q@fh{dMS81%Tvd)+AT{G@M&A=Jm=OXWe|DmqN)6$Dh=T$Yiu>)4?1%uB+q#Rr65ad}T@14HHx&SDHD&o?`d) zd_TpTk^u^h@F3_VXaru&|1jqUBdJo7mLM_S0U|y{iaCTnlAK3c%w<#n(tlip#z^{AO`x#)kB9!8J=dKgi&QJX=$Y4>-pvt^X2qLz_m*mD?vP(V5d4t5^?D z(vyApc{?a#mwPixZVorH!IyLyvW`D47p@;)AD=f+=tszd!7K!*lA{R?>nPOav5E@$ zb^yxtE6YB>xg2ZZ;ld#tCEiLPHIQSmWXr(Vpa|1G$$X$9pX@pDPh@$j7#%S;7Ww|X zalbTHuw*iXzcdYF4iGVAk!G{_<>cgu>i~@sdrumAreC6=9Hm6d8Qt^iEh9}B`4e`C z4(@NtaVS3S>2!J0t%Cv~JD-Y&5@sdnA~eEtoP2F7wm5o=qam>L<(VU}!@BT1ddQln@$~(RECo5lP)-U0}RUzeGzZe{j+$qFmDW{ia8(4-kECYFuH!st_y+ z4~B!2Q+pdY&;%TA6EvqGv)eWgt)U1EeP)O~w{fU^`_Q8!5$zV%c?BO<=^(|sD;uKNg1sJ^~_d1WO>i#gEmHwI2hk70GlvpaXv z-hEQlm3p!_7c>MVWRDuUNMhhM;JX1;+d5BlEQTMH0|-@Tsg!0=G+s%2y&MMB7lpCf@$B!o_7uK?MF6#W}*d;LilG-{n3XnIL7 z*%O>1bmQAy*3)ft))~OH=3fssl|CuBz}Y6qT@yQSn~a?3^0S! z8`noj5o|27x@1krdo(#Y4n+o-Ed!a&_WVuz34>Rl3p|Au8g()74Zz6=yoJi>Ix}-Y z3rhlA046;pw;`wduS~42$bnjKg2!V9M{{5V;KR#m&aBzsis&ksB}&eL-t$lb_*Z|G ziF$sxn(DGN{c4pL-Rw689SEZjFa2g(+L}}@UfxFEW4bGr-l>+|BE4U~>Y=aRq-6zF zP%w|drx&OJHQV7(c!H}EL?=Tc>1p|hcGNTTeb2M95S0`YHTW8UCG)RwtEatJm`|v|aiElolrKS&0_;CVVD<&P}kR zCTi!apkdu7&EkyJxA%`2%bdSGIzl2f)Y`r=soVhN1`|Qr=%Qb z5_Q!gJBK)hu)lPt)3}U?%g~~5R)zFMWOv#H6M7Q$?U&hBc>hW7Je)6IG8Y#YWqjr) zx-CyPgA8v%dj#XaB&ML4*N8+#FuGu|iT2%^waz`>k9%wwz5L}b71znYq7K(Oi$Nz} z0!|I&o>w6bpBPF16wDu{;0Van8scC8q2B4Xx@@!FtDTeNW-}b%qN`y;_ zbaZ;2$epXQs2mSowzP0@adtNA-baE+uJzy>r}(BgQW!(_Cflx)|C%Q>c8CJQ<%%nb zDBD|IT+n?#0=q#SNtwj4g~35o8a0ULSMOTn_S5HQktd<6fOzG;^a#u|{KwkFqoS=O zTlcNDga^I{#DWpH$C`mKSf!v>D8sqc2s4^QD7!AR>o2|eU%v=NlbT6vLU8-n)cr)| zOG-W@F;;^rPc=?|EhCAv_OwOA)nG*CUWIh*y7oC!(%CeQIJ@tb0dC3a9;x~MiNNp0 z>FAiY6|$INt^r#9ynW%~!f}xDK_W{aha4U2J`v9jNti$wTfo&ZIt=fdYoYoD2M8?Q zDu|NWi|ChUB?#%MukGnR(w7UeN6;(RenQ8OP4R+#$BhSLFMMv!$ijMu2yp%MVM^a& zjQv0lWUd9Ql=4bSN}=Ai;~|ad(Kb&?!JTC>?p1^Ce|@n4-{uoRYy;?)CMHA{(qcrq zcT5^%zLqO?KZVVL5KZVZNXZn2V-`qBu#0m3!UaNj5rrj@3)BOtskUe%PT>3e0EiCK zdM8+#g7F5m!UwITjmaA2NgiB8006^xRX2vkK>3GrUCpc32TOw8jfo83hfWPtOEI06 ze&xo!yc%hXN@g$CE&)nZUcbL4>0ClL)kTHot4?gDQQ|(t#t3V*)^zL-fvYfwlJ7Uy z{*1Fn{2NjVFml*V^-0k%o{}f-K~Ve#o^{x0eCW5~yIw0MCp_yrre@!18$jBgunLt8 z4T{8P4{t6Xs>Q;Ug%LYbDWW(KKP;)BiRA(Wi81b+Yzz>MS4IZDetj)~?qP_HO<{li z-H1L0!p;1yC-2-bOTR>Ux(_=Xwu^iB?hQeVBa;El^;j|NMJ0aL()$^c$~H!*9055z zYbf$xfaRQV5u|@$78_z1t*Cx1Xr@Kx=4)8~*?mkZ%gDE**L~j#Ey%v&rK!g(6&Nr> z$U(HDBU}=bGmN=0ctF4VQD3~ku!)1X0-U6w{s7(T$&5wf%0y0RLEKIxbvQVwAlNcO zUFOvDT}g=roSq2rf(>o$;5~>l9Y=CB^yV=sz+@uZBKpSR+~cf})^~(P$0A)(_ct{r znZu|Kl+fux?31iVj&#eb4K)tcq+motbu#SX66r6BorbT9=+_))JZ*Oo9liBvBs_xV zU@}_-XCkI^>&6w|LNQZ3KZqS+IK}Pb*Z1t+9S|+^Hm&p%t*E4~E8BAa$0ro`wIxnM zWmd!1YAnzuB}so28k$+9NZSey?JQW3Q;Urh`N6R-sbrj1qv-0q9GskNXU;4IZJA7u zjFP4mM%x}H16-ov1Xj#P@G@wjhbI+}3k-?SH2E}{{sdGAQmYvyStKR{vHu?>j;1Y> z$#fc@{QXpt{a02C*Rk9_7Js?F?ahRIPQ5a5N~p!!05z@qS=mj?(9FZ=rOQ!^@k*EbLg` z-K}d`FnK?|at)=V{{`500X@$;^qBaCgd`An<=~*5?dF)LbZ7MgbOOecfI)}5+*V!4 z@F&v5((83Psr~1Y1KbnF)gf!yw}D51faJdK3uYdWj$ATn3*|t|nA1u@V{UqeX#lXs z8oEss_s38)N7JaTJd5Sthq5!X~7oIb~yFgMygY8oJGRr`7C9O3u?07f6>4 zqHG4T^hlCsuGL@Y!bu3E1FjCsf%4+Mh43e^CO4r90HRDZovRF0hi=5BNq^-TO4~mI z?yhlvOwQ6?5$7Dz4ve4HO4l`vYK(lGVI2)TLcs3iS=3UyC1qvF*a9@*xR8+%xfZ&# zw6VIP#+^b#D~nsAM4;$U;rqnaocuT#XcT%G+&rLbBz}5z481M%V(_cv9DpdZD+TH4 z-T-OjDO`fKDN$1uyV%NB z5W$aH3w~Uxl0D&sp@Q?R4M+tj)_P{Xmv(5%V8~xnOa4zZ;Co;bQ2mK;f`D5O)Il0L z7~5BWopT&L!6vNTmhxAcnwC78$SsIn=~5OF2SMTgHYDh#S@EqC90_KfLAE1TvA7hs zt2A3Er!LRs`Jr@y`#@bVOP3HG<-ZL{TRhfNN}im6WOV7OXC0dO$qY3Uu&IL0iCRo5 zB8xY^8);LGE#cRzs9w z$hJqy=635m%`&T?=1A1s?M%jY&_n;TO2M$a7!a0u)77W>FaS<#(R2SXmq{v69PY(1 zO}Pi}qN9tvd-pEm7%?Ltr#7P73K{Zni%YzUAM-8DW&)kJ9viXeFtQEwpU>PG=f%a@ zV1-0p!ElL(BOdE0VGpsnN@UYOBs_eWa?c-aMcA%13dDR4e!jvaGcWHv4!rOQvr|{M|jHn}{4ba~dpFf@_L*~la9)U5-Y8or_RF3H9 zgaZL}ERFjk7V`S~h$QVuj0>p_xnX+`cdtXX!F&od1@hDY7)WmqQ5KH!&M-JQe!SkE z#iNn0GBtU_){nd$BP2>>7XY?rVT2$<5`piYVcueH#IEGGG(prtOdd@OgWd-(~$i1_MLaufsdmP>#-!r)BYuviC@Smr^pLj=g+BqDahwJAzMyPGN*$RfI;SmKM&3_*R?`3*yntfKtY#mLit9;&TZKCFK+| zIgrfDu$+)KB6580-A@z2cDN&k?QjN!25H(=#!xpxZUMO@aU;H5eJ#JipIBr;PQe#R zW+IVRWoG73({2RPSfp`wNGGJ@2n;wzYq7Kx5eG2tBO1qj#Ow4pX~Ojb1svX8@%#wJ zEQ*r(3IMz$O=9}v(~%R9OOoB)DSljPI{2gtZXgCcBDUvOr(J5l<7Qwlgy!lFVpeI+l}Cw*z${zbMr{hCH;pO zV?d@>!Vr-FuViylH@rBFWYAnk&Sf$~2(=httH>|`0@}5^L39kk3Qu-5nHfdIzmb+? zQlyi{(~WMblv9;3OY?#a!jnE_-aZ%hYm$l*MhQGS4z2Iw)zlY1e*9o;C&pMZECPD2 zxj6wNYykZXpwc@}KzuSsb)cWU3eockjN|hdf@5Y5%S=+L8s}jVk*;R{&`^u<*f=9z0T~y zTk6Yh@L5@O1;bxdmN@&Bj#YAzPE00QF^i8!(K06=qq^R*8JiTrjlU*i(dQdI{o>{o^b;Co+r` z)jzzKq!sQPjst4Y@i4n02&Rn%&Hd7XS!C|W$2~6vtv@!mVE!RId4dZIzkYr3VwHk< zq~*-_rh(=BXBl;qf>jcIm|ed#DFcNr{pgY0T~Wz(SppkO z(<1jn5*G{0`2zIatwEXr(HwoP)GOx;V{;Q(N-gG zOg#J#?o<1xc6zv^j5c0?)3N3VG#C>R5$-!B{K@D-9=m;qDbw`u^h)P^dbVy29=h#B z;m;LwQHs|r1c9M;%booLEf=N-G-t{WCBZY%Gk$Y1dIk&nm*+nko)N3+Q7STKl>%VHstmu@<5~O^!Ft zlLj=c`n#K7OH2NMct`2ED$bwci_)5)QXslWgpKw#z=Sw(GjV7n&B_8r5o1Qt?lgOCqIr#>0fs@aWZEJ#ze<4cr| zQovm@Fpd<|j`RHivjLk`aV2Z-&H1EY9}l4Ato;k8ArWRTKC2!v?Dv{x0XQPa z>BN~%g5~TJdU5a+FlS-t2&Oi5C4IyX#3Hv2!^%zI6bAJUW73vyaj~11=FxRlT-Q8_ zb&H5rB)S2}YHRO+3ypf>??Z_y@i_Dd8++g;$ggq3^aRafxG;jS*Mkw-+P4_gZ`JC3-yf{ap8To#!!~ zOP-;>;7g%{#z{MK-M$~kB4AkFN!$*$RU2y=d^B5dlDmfssD?G?hWiQJ46}59r18?r`Vv%s_;C= zaOIe=CqWtsKYy|jh2)5jIbLW9HbjVyBz;z}vqz%Gp7<_%IVU$fy?ONbd^YiTKqjOz zaCLS4*N^I_Y9Qt?#Fgm<*3q2h9CcE-*lMIbfht`aQ{7FaK!G)iDAr4LA>lYI>yr&` z;g&-eA%#Zj(c)GiU55KrZs&0Y@~`k3!I6kEo54H}dNI~Z5IA-t5g5=A#Adtr_U_dJ zOc9SRh0!*H-l?z8S`N#hS`Ny>Ah`qD5)ucKgK++Qo)i#EbLn>j=RRo|Irx0Oa~eg6 zfTLE#!v|<;U020U!G9nZYviVv^F~EFdW3myvGEup1}MOiBB|gkN$Lq+t$~=fw#&vd z7?L_kom`S3{yuAmdi^5`x7cCSPnxKz z-{D9etUv@=Dk@)i@!UA7)gfh9`W9E*5+3PJp#RpUD3;c=pkb3YwEikI zYxX3E3eMZ1w%aG7v6+!_!Ndau%tC0?3O__l1Lh-2r#^ z*BmOmh-N!gjnjKDe`?}HdgBGzXKcuFW=4hS(okG>XZ8YL@o{-~&pV^T<#ip<+rI^= zHdwgod?eh)vvhcH*s7rxFEA|&5-bl6w&wqkKj~*_k5&+_l^EhAVC9{43N0C*IVxN8 zRZMe^T6Z5=B=NwKl7{g^0-%*Z1j}=Ajpw3H{p3F@l zL-D|CXPECt^{;Sasn#oIhNK1BB>6|4KD*8pYqr=nUpkDZG>H;?8qBiGGp|AfkbVe1 zaoW=-$Wg_(MW;T0YSag25aiiS=TGJ&*a2jatC=!-2a`WR3O54uj)f z&&&H^=s4`ew@-)@6j;*IlHckvixJ~|L-Rjc0EQXz_6oW@ecTq7GYcS}i)r?FCZ)3? z7(VUo@0OAR(3KJWjD=)-{2h|-1hH^Mlo7A@H8(cEF9wNbU{bJ0e>hNz^N_rf^fe^I zXwJgS@cy_m+7~jO2urWr)eC3vd&SI=(uIO(zuyCDdIH%Np=t!%Ft9pBDu(+i$G*y1 zcIfPhJrjepAVeO<16VLZVkK=|Yogfy0tYGrO-|cr5;W}Dc~mRI=l1%ppa6s%Nc}kl zrcl)D5>((giSag~Q52BNxA{`XL#+zupWk$eP81gxr``e*!xZ}wvnM$!1x-873r+kC z-C^E3G)A^U%6dehNtcv6P`FBT#V01Llc?f3ch4zC98*BI9lXm zSmbf0p#JDPqLyDiP$6r8=Bf-GL*KA7nluE8ojrz1FwS(a^IahVx;gjRy{{p~{pXGj z1Gp|qOlqD&F2z&Q+nBk%B4HU{Ea2C(ya7|=9TGNuNJJFI3;<#L-{LqBm$vD0|3g_K zyWSbk66*#WQxs=Iwtg-yXD3;B#gv}Nt)_&wdYIW!ppd}K*7FZNLs^%mrhgckre#^B z{#z1IgBSy!g%{oGeJK-jI2tKZ1-wVCJcJ)_FfG-&o{xLari?AHaevah%?SBXLEsDZ znIr;$q&>+LW|m;yC;+aa!#}2pmVQlepHz4qmXN8Rn&NJ8gx4G~aX$l#%9|Z71?eNHrR#nXW=30=ghz z@(ugVyr2z$_0-X_M-@sbGJDpHprGDXsP*HXYA_pLnWql|EZ;mR855_h96dzsLia|X z?VVSDTj=1Zc_c6Jy+YUMeWVYz`!i!%SXz{d(*~s#Zty0UK(1D6w?KtwRVtYNA(e1V zUS2RAGlo5V5R9o`<_$p`KFhBoSOlJ=PeO6G32HPL=HF~#WiU2+{1=AiWE+#L86Kt> zQs@FYMxX7EXiEW^qcSoj?{;I8!$M1Q>_gfmuuSp8`M3?8a7m}>fwX~`6PH$3lobT= zZ8uUD%g5$3F?$Qc(h8x?L)-5wR44_D&Jl3OK&2acozk#@mi^Zj;+O36NiP5;SjxOI zK$uFB9I^h>`cBr2-iE*pa<2ByjU_d;NXSInob^Sx^bUXMp8kv%x(Q5tM__B0m;z{3 zA%5TA(Ua<9YACR{&~$tKm=9Fws!kFMg?*?(`bUTeKAWQ`yj5|*DR|zq_t+9eg&v%6BCrDFrQL@7x$FeX|mc#wqLm zMs+X?O6b4H-%fkmvJJ=hiqA8(E-pNxS9s)n?)uybo8}j<@i`75t!?^nMmXV?jukkR z-S4d9RzndOgOxy`7DRNS$|U;P7~8TrB5`*2w{qQa<6i#QP+kDRFb z)jxopE25tt0+G@Kz$RKfIfDEd22Wkbsh1Y!@+rY~kNy#AKl9qFD@Gn|4 zX@UyE>sdB4W1I9SoOCP#TXMWd9MQ+zo!W~Wg4Jh)silU1*7elId0ihXuJO?ZdXDiv88q}L)a`3Ex|saGpdgTA`= zYMtL^{^P+;eThp61J+V$%}1&LO*epgn}nAgD^L@heFnI6eSiT;DB|m&^#u0%ssf@VvF<< zI=10A3L$YADIKlXf^l#OU83gl_nN(0%nS1ds5AmQ9R=QiJmRDDAz=|^?Zn}4?HtpHQ!z84j}N2->;8n?4L4V9JRZoKJAR>gWDlZe5dFQDs#1VHCjquR zQ00ZljLTexUH)a+y#WN1w)^FLO@|>L$}$LYWML(x2c&fcx*zv@au1(QI!^w|`I-zx zKBksvQj8sTQxyLX6wz%IEcn}P7T@39wfRYj+tBQhe-gyqC=d}sj0@s$?SF6L;W-FB z+*=x8L25qKHs4{?wWh%eHo&;~+{n=e(!B?_r3P_#Pp}PUXW@;yXwe6<0Y*$3fPqh9 zS|jOmn9G8I07fl71?H%ihfw;iYUsdnMa=m%8s`GWk*4yz;YZzGuwU)POx6R6PmT_C z15Ipq3gz{HdvQi}1lh~MV4wsX59qD-=^;3~MP|Cr;`=M&LW>-k%nzi>@yEdYZ9&q|Tu7o^WOUlpsPJR>-~qE2;du zk*|Wvy*kg)o|G_9I$%5Vt2P$=mWBO;)3sqGlUcmpVJP$XMMOptedEBPOvmmZ842(n z;dcnVBHjmGIQlHJH&u$=^+WZ#?VvJ&AA6f+_wCBXJpj62Nz^AI(L6v(fwIB#KbJ#~ ze0LO@P0T+7MxdLW!pK}iW#_X(h;Xn6f#$)VUj z%yT*C@2Z`&LqyC7$s(Vu!yK(f#+h9U@>(EH@0A~z$>Z+XZu zESn^D>=+CzO4Eqj3^ti@b;b70ayB#wf?fCkb~}Fv!>CLFMg&eL#HD?hcl6xnqTrez z6|xl)&N7F+%2aR&QD;G#PjJKl&?@m%nU~YumcJwX;?o&0D%5dPH3A*~VJN*+(5ypn zimakf-QVRUp9UYAsx3`JP-5JTWscQ}9}NN}Bo47{nGC@ip&&&9N_&_QvTDApEK%eg z=#2=dpiH2bSt(-v2m_gDP!++Ma8RjW9mmMNRF2<0pJXMGk&*YshflE;tkOeBxNV@w zJJre^tM}C(KIq0$16Y*y1cvI`L=gq-AsBLyfM3~=I)ZMFgxi<|@%iy<@B6*#-uZ6C z?L)r7zjf<3H!q3@rtB|>glfs11EzomDo~vjt@04kfx>9D0jAtBvsqY$353r~{;F^m z`?ehGo7G8c>6b%rD=4B{YyYSlJbz+hV!aGaC__UjaP**1e9)|tJSze8qRte1^Skow zLPbDhkQ~Q{dhjO!`HjOdDBdtC=&oFl!~QspS0*pqT)z*teWf_s+5@L89uw9L&|0)! zKsowM!@`ljQ#FP#{)^J%jVr@$&|C}j=rrf2cEMUaBQX@>38YIIFmvkIX!k7ybdq8v zE`1o(l>bmqg6MfEZ0#;^P&R(}cln>F)>0Cy5<`9KZ-qSA3}X^#dNDvSDN{5yc!*14 z{ih5}G}1har%yLN2ht|4UTs=ygQ##eWyO@mY7dHQYj@YG!zT+_RnH$;mClp*ZYHoP zoW#kx6E0>n*e0lUel28mULbI?-~SQd(&zl?5}Ur(r^-T1ZlNG0HHWQY4_U&kp+n@f zWom${otK7S#PiLswXjZv_$VbiS=H$quN|)E{z2TW*4EPx;g?6~N}|avxCT)42=EVdqW*`B;@6>tH3=EaV?p$lO~-9Uq9C>BJQ5^i62@$@q+(4EWk+ z=xV!{TKMIZU2ZHstEr$+d1cX2+s2O{#IO+Dm(BFJ4N3e_GH8>O2^UQx+qfME0Y7$r z?5_OAR(ro4g7)34DqqTc+*o{c?2n_aS}l*0PEuIvw8c{Z=fwNGABYBSV8_@@yw7mGu$UytpG!B>x3amMYve%(9B?kM<$ z5_U3WFytNZr}+>>VT^!X$7(ZeEH`=Yxtj~DH>-IsR|#^`up4QLHHe>nfp5d}Gu)sR zE?@v#?8+UPek<_>np@YfDJTGPt?7OJKy>=zR6m-sd(S^n5nEc&>>NCOX-C&bOjtf^i2)LRQ>>`kxM`0tPxWcMayB~O* zTH1eez{1(nXH3ttkz4PdPmlhIna?+uC_ePh@eKw5l&CF$(tj<~hn(oDj}wPRaYj(( z@lNlshYtNi6gjs3qrTXp!m&k~K9<8GKy}UkE9t0f`>>C24yn9Jw@Digfs* zJ?pVb8Bzz5?&%5_7PDKH_x+0y)&z>{;=-VF4(D`Q8qQF_u>Y4{rWTeY)tZ_(ZP4AB zpi5UmUKjy`;1ZQ_cJb(-HGq?Pxkmwo!_w3%a@s-9lY){syAQ|4ZQf}DIS*wMp*%6a zHNI_~8_$*(9HF@z){w@UaBa8WgI97f5vcV42N;PVMDQpaAi(#t>U_I4_t%&EBrOo1 z7*xc31sUIInJ&n}lU{nr$S6tMmwn!Bf&dw`8g-*VqL$Ky_wb@)Y5+c7Cn25pIL%RW zUFBH9CkfpMLK?HRv}eb={_7KJYkt~Ns-1K3-77y2t8Vbl+0kE-E7 z1P*B~!z&m;`R`|Ry+q=gGO@;5b2(|BMB0o`L$bQ32UenIcd6A1mL0i|REt)b~@ zVE3p5u(9Q(mF<1I;~!*~W@ODZz&M#;yt_~;iZLhyejXZEh_aVtR(QVaM=|G8lO&f= zV{8Eic?m9<)Gg58J^=tC0Ok+ye){TU5F2Ei2Qn~( zeDs6BPe@gUNe&i*IypZB(cj~J{hT_sWEXhND$Em9gi`)+T3+NWvFHg})x#J@b-pV4yMVpJtN-x18@`iFI5; zD%p_gc88zlRfuU<1RAWDq;?Fxk{L=`tGcxO9W}Wcpbm%zD3ssNyGi_8-&uPLd zt0fkYHeCYii}}&_Pdxb&hY?7=i((v+)RLI%?*RGyLp6(!Dq&0>d%6Jbq0+4mweMpgHNq-cc@E-ym*_2AW;UKMj2PmxJqE){ z7xZ1E=h?I=+9g^@2j2u5XhsRs%a8z&H#usOjP1NCQTL!CHoIi!smYnqc^5P4gu&hi zP~Jm1)?gVh?Q+Ns%Q1V+7#a%o6hnm>%BS|?zG<5R7K#9!LO^FB9OpNxSDe_rN4m~+ z{iaQ?9UvpM;Rblb|ESyJ+W0sp7=~9D?d$>}MGf4N_w%^}GK9ZCbiF1KRCqYU0VD*j zw(UqWgmm;#Y~bPf95{x7TiJ9I(2Hx5!7vkFA4k3UP{q>XcCawmfE33(FbgQz+6+A9 zg7yZ43W|!>U9`8q|0Kxn^~cl|s=qiw8}Vu1!=n*MW%tvvzsr~CS%DD(M5#x?i&X&T zW_ny{H}HJQR0`Pep|*a~-C$vw#5XWFxC4ietxbRwb4=cR$^S<`V%g-_0~s2gp@w?M zD@Z3oqeRhc%Ee@hlvJ>vZ84Se^%RziNy;d#UOlYauDtCZ)*M z;-wz@4tr9*ku?f%6wWuMemp2xqIc%$qkfb+4$wv^bi`16k zDCF;n_A}EAT^TrGrmROy@dIANIlUe^__ZOYMeIBgs6pXh)GZETC*|<{w&d_}A)CQdyFqjm-Y2YQ^H-d@5TaaTxuw3XS*x@DCaAz@D;a-?vL6 zt5XHd6N%zTFN?u_Z%xyiD3(VC3&Fy;>8g6jrG>J(Hi+#&F(Po-9*_08h6HT>uWG(Z zk4QD+Baq+9DJl(VJ9ciev|rlZE9u1^dX3vj4S2Ti(;#H6Jex_52QR@Cc3c!W;6d@D z;Vm5~+a@CCR2YfXL-;7gfdIoc0ark+9(@T~roLI8#~E*y>H~;rAXV662xNVFy4z^C z0GgN{f(5Z}KTxsNMFA}zOTpX-YozR7@+)FT1nQ&(2BY~M!o#f~`UBi`1o4}Qm$LkM z<%-2;ieiI|O`Q@|Vq~9TM8Vtr=tdG9o6x%K`(DgL6%q9okhHO>%;E2`m4_LULa~Tw z@ud`YYI=b2DMDgH`YATmE~gAc(EJ=7{Sugl5s$cU>E~_oS2eL+@(&h1@XY)rJBJ?V zN=fX_YH)$c11I9tF|hb@U6{@@*kE)}Q5b!Eew8>08dQN8l07-ipE~u6ko_G>sw5is zN4eNAmWbI8$3o!fheo#r^CsXfl#9#FFdzIHidTzA~pK4H{ z;RI3l!q^l9@UXhT^3)CBhaORrmjCp5wT-n9yAy}=F38`}$MzQo53A?6mk(6sFns_d zAA%M!8q~I2Olro3!U_dokuV&+J{zzQ`!L3m>!SXgro3tCa6^np4mO%JgBGL0Zw+%PtPUH6zTnm)33j$@q>;Cd>qN5$1plE22BonZqQmi zPi(aYwUZ=ehFKAJ*!pPs)u5RECt8hv&6*Ve4lw{&MgUn>M_oc7B2avMXrAR@r-Eh! zxR_op&S|X`s&_?%sU_omspi~S*P~xl%&v#1Y%}^7Sd%_BaSjZfiCf@hnXI=1vomlS zmLZ*iB`ISVSXZIUq5Vq7Kxfy1=IGPIXy*8f{8BaLUPJuCVM9X$I+aQX;C!>xosztRg~>+_eH#RBA4Tw+Rbr~mwBjct z)qhN-+I|&Qu65=tI@NJeWSLRxn~%|5qibc=;vVy~SI+!rug=_0k?OwlOawG{{tP{v zWOXygGp*3=&8tn}n>Q5;8PxD~T@<*m-K|b4*~70rW!W{g*;nV}-jxtb-5LP>t<|}n#lGXaCF|V3JkevU!Y1z4 zWVRbm?oAj@z8sL7^3@Y&Y2FNykx}psMB8@vb7!Y9(o9I z=HTGq+MX$^=;-J+n{4bw7SD>rSKlh)x%MI?Ffh<#-*U#!6XXh5-&umho@!%dd;4tM zM*2PqCK;L=2J$c#rEG~=dTEzcbVi06dNq$u0|=Mtv;)+&6U>{kiV6}=^p^C=!x><9Q}{Q?5`6KZ3~cI-=Ix-zwdaJ!8lQn34YmF;~~Q(SNF zIcqDcf4nBJ!`$)rf70Bnvvb$3Xnc(5WvX!;@)V*d#PeXVH({A645&Ln$55OMDqaGF zQ*>Xs;S#n|}dsZ+r;F!s7Q#SUdQ znZZ*Qla!>Olb{Mg^M&jt-H`CiOm(hXhoMQiUE5PlokWf1c{rAJR+=~AX|IBOKcc3l zX1n-ChD-8`eVZBP^RTqWQc#5_1{QtxDf&FmvALyXOL{{b2%*T3ko_dX3H$7?S=VPn zq}T-!PT#QcaR=Oo35Y;+9ap*yB+M+GM*<31E+aw55YZkoioDXvfe*ODqMQryKCe}q z7#qKT_b!A|n_xz}W92Xhp!5;QY*lwQ zk1Xd>t_}h6zIX25UsazfRt*iCpbP?NPgtY$4K9fS*S6jUo|dIH7q*Ii#0jhM^vn&2 ztI?*dzh54Kli%9f`l|MdG$05VqIB)KyOo2%8Ek}@pOI4rMXd}%f@*QvDrsuQ3=a=) z&+{Z<5GV@I$n&6@;8YGtyzsol^@y0$3nv4ECsaP1-pW4O+0%2op$|=9_WP=;P)rzy z1HcsX=9#Z9mmv&v|Ndt#9iLw2V5)(YG^pVR`=;OK7b`IyV~Ul9IEbzl`9TP9Jn9R! z2CuWsm~HM~vswOPs(w=gh`H^@FC}H=a2x@}Uq=KmjEy*(=$M#;mSelc#Kd-3e0q6( zH0p;qRt;|#;zL#?a-&R;T(joI)-&vGXja$NvnVk!wA6smu}f!VWeoxWH%5a^%;mu$ zH1XsmSY*;;C3bM?eQ+;IvWX&#b7`NQ#pRj=;UhOZ)PV4%>e^aaUqA)+SmVY36d)bQ zPeBn}<%Puh$7yM4*XY<1IuG z+^c%SjY+TIJQ*xJxw*N&V_xPW69 zChp$4Whx_fxmURS(YsR-SF2+mmj?oCe?yj*khP6XH-t({et^A9hvA!B#knuF_PKa4 zwjdIeQp5?~JNGvp_=AB_QDi2~{hD><+1r7prY01qRrHzJ>%v#NqZ71mhu3PlbGVZ< z&#b%KnB0?qe+Z@S?(Rk|aA)F}Fij`MIz_yZHmog*pbxPuR%IRQR@zt_pYe(Lc+g&T z;4A)BUGGUa^5fUK122d^#dy-<&jWnu3IJ44kp%)~2)36+S$8jAo~f*))Ja}a?Ann! z3JM{B1C1bc1NnEA?(EyQiwEwtl?r7aqv=~lMo}|eT`(Q7-CX2-U0oE$Isck)s;;Ry zDmKZu48$QOF;N~OW#-ve$Srg>yc>J<1?AykMhI37BW0?@X|Ngxp&JAo@#>U2#2+L4bR^HjUQc+RS&)=WzodJpq z$dhP#4CX8!L;raj^ES~?uKS{P^5nYUijDa#2~VGH!^YVOos+VONepbbD~&sBvPl_1 zPAItA-lN-%#08U62`Q~wYQbQbrsp_nXsookEzUS?sOI@&hz}tBn`DZ|tE;OcCES*u z4gF)Iqx3l=&t~WD-PgKNT@nIDLsz8tUf^JuPtuVd0bBaxpdc# z-^V8Ka8_YU8$w1M^*g>{r?*D-#AnL^M)JHkwE({yT26*HAw-TCe2u@c60%_}A%D!0 zhB154W8Y33w$lmZjIDxZG}wc2TH`ms9`Tz|_+Rto)EuT+a^GjhF&X1cGGwIW4#gih Hefj?YJ%PEF literal 0 HcmV?d00001 diff --git a/docs/vo/conesearch/images/client_predict_search_t.png b/docs/vo/conesearch/images/client_predict_search_t.png new file mode 100644 index 0000000000000000000000000000000000000000..97fb8b6822eaa8ee41ac14800bcff0aff6cfd457 GIT binary patch literal 28522 zcmdqJd05VE+cx^cjax-GGK7SRM$tf`GV5xdH76-54H}guL%B_rt2w36pn0A&DydYW zL7Egwnw92p?Wgzs*81M>d$(7 zYVyn1qM@PRdCAoWU_%EFHEc1$!3E&<3^-JmaagW8BI(L&0$RJozIHb)-*H4E$Ff5$PK9d|kIL!t2V2fxxDe?){_|Nwtm4s! z2D#SHPM$oe_R7xG$=Nv`ud_|ctsq%9@7n=~DmoVfGjmWxM8y3~d@&oiWVpq|G*x32 z)XmJ|%cd(UrCY8Z8tN=I@Y^e|((%f^r>7?xZ!;H+3lB7%H&723w^u8;evMpLAzXZ$ z(4~Lz2Hn>0{ry?wgAbfowrm-BjR0PC5tr#-a=({T>2+9vnc{y@yKIFozgH`ETTqLV zUT^>N1NrPL>#w(p7AAG@d6|LclpxN+`LXIy5mWlR0KQmU_~rG{Z`xUA0d5Nu=S8i) z=o+|A1l(ugxhR!_p-a{;tVqE}ne(_60MOJVDFh(%(^*(6;w*KR+ghIOExc}00)tXlB?y?YVmX&L+kL=Ki2nI zPQOhuG}A7cXle$#wl&77%9kCeVV0aO+3l#JLU)=OGoG6}V*4Z5%P?5NdHjH%pP%r< zN8a9t)|MTJl?$O8&V77wj_v)E&``6<;f^*Drm}|*9#p5Mob%ak4y&zRF$&?KUpE&P6&3Ykm=+i3tcF@L_MHms?!HvGFxked8YAcRTg0^f zw4tG)cG^YH{kplkXp_duAszAwS3W-H)kq9pSiq2|W5VPkB=syUEm_~LV--4$O<)sz zFz@&KqrwX=tC)B4)5yXpbE9oLViSc85oFl*DM@G3mMyU{@=yB9w;0%o#UKQ*j>X&< zNXqH%=`nUUNYp%NKk!LDE-p?+TDo%l`wGf3Vy^ZsMJ4{TWy`*@1C7}hZQH%uvMru4 zm3>&je5@;VBsqUr1z`tUA36(z9E<>hayya^?3oanUZn4@c zH-W11hlny;oRS&^&6;xaDR$*6c~`PGCSbyQGk9h5ey zzkK?A_|c$esh19!@2ZmgPN&l^b~^Mo^3dwv+}oH@W4P68zkUHXEnYh-p2Qt8&wS^T z8=s$fz)1_>JOAm|*RrN!cX!&-(u0M28~2G=w&!ktdbFb@<1&^A2ivjhN>d$<5qmf| zdIkpg9336QkJz5YeyANkyMo?5G&Gbtq*467Ie#)V(k{exh@y_1Ldw{ujz~*AP2Y{U$lU?Obw=n^v*dL5kB%x=t2oalG!OQrjL&~&h40tjfpWs@SB`kvu4d?v{|!^ft&r% z)w&PQPTBXo+eE%3BPVAk_M<*pcD64zVsNA(B`vQga;QB|6-#v1!p+3gGsUzaW_Gg8 zO4Rrr%iRTxZ?(-mR%+Ks8YdGKrdAs&I`QSkNMBy>Q`Ot6);2ynwOTzvLw<{(z8cnE zK|uk_#*Htqj;~Dr`s$1=rWUKfX5UwTMqgiFyU;m{g!CV^Ps~538XkGKVe8hHSS5&u z`tiyyys*Aw&6?tQxVX-&Ubo}J+eh26U=j@67u4w@?u#z9!3R$X=wz!UX=h>IMlZ}w znfKL4BXg#9*2>bICkNw_baMI&r@k~uOtrtjHi`oVAo%-(hyVLD6dN_uS7HK8^9}d~4an)fhMPD?QjOKoT%! zylv4XXuLO7nqI|GL+$D$vvzM{xI@RjzfsxFF7wdU&ynO(c763`A0ltgj`ta{AHMeO zbO-JmddMR5M_oh;UKe*YBk)4sWwe}$SFU!@T+hx$)`=WCIun?;iX8y8#JWvZlXUY`Voc*rYC}VXjPLJ6VwGc* z)re@w>Xnn0{-9%37>#IdimZ(s%kcN#NN^}^?8<)*8uQ*_I0fIgKl-p=xYZ) zC8?f1eFtc3=OzJdHAzWHvxb;Q2p4Tk-?8(N3wG!uU8Zv%!B>=0Q#tv!o4gN;!3j@;?XmnGmto7V-U#=2 z-8>uCY%VUYIE*A&I|%aocr3h(u_9&xO_KgygZr>t=U-FpHBf!ufAY=Gl9J_BnDFco*GfT}!B8y&jPR1;n;l(iI z-<`LRr{_I*P&&*$nVFux zf(W4HJvucNX5h~dhyM_I+E`Ts#Z?#+_{q;h0E}Ab90|#ee5U`w7+($;@;@O zwf}L2im8_uq?bC@wUnKiY8js$NHHJ$oVHEW%wPY;P?nCQB@&|ejmbm;Qt$R*JgMdq zCfP%Bm~CZcW#{h4=Ngmsj+U2~8-I3RT9_I@xax0+^(ieY+w{*ryAXb_eLpK7DncWh z47f=^hMCOC4wk^Hnt3+SNXmG)Osr$3Ii@ltpc7zLE~xw&HE z9bby|lpwy`wOL6sefOb5lQBjvE-sk>k}lFK5>B5!t;x3R@P8d1mQ`L(8GI#=M>cHO zz_MdUB7%&~ZZ?WKzN~!2F7Nh(VN&^t2?XBRWPM|5xM}8hZ<6=TlgXE==-4!=;#fY` zCs+cPU&VvR>leOiI!Mv#pGw@|7C=?mtgOk9Ivx^$iIx^M|CA}VmLXw0c3P-mHARgr z_bgx8n5gB)m`5I%p8Inr{{BCxDZ9Go=jXdeMy%|a@C8{V%`TOSlYKgJSOiz%w-A`ZaU-cL{ z*4ihB6%nbk#7QM=<57BRd+1@SrwE?GeMPrdGn;)WaGKfva}~BV4~-NMuN+5`$^OP_ zthO5jI2FC}Vb{vP{;F(fQ00>GSF*NFJ^N@I7p*nb@b=Y1Pu8(ZCgL)!K_RH7CI(wn z>4R+zodzk$F6k{oi;9ETYN5xR(h;b32M8MIqf87)Nl7_*>eK|FQ1_=GyiJL*lBV^a zB$1+DxI?D;_3hP73Fif~uC~;NUTB{A?jtom(;LBe;D8be(Bi_=HNgkV46nMl6!bTz zkhB+v;=}o?=axpmnE(MDjgKEc(m7EsojG%+s;x~2h+~_u$-{-FG@}Zp-u#dBw1Fwq zb}}giWA_n`S=O(AhAhl`?3ixx{Mdum94jk9J&cv~cT0Op?68!SRP9pgI|`#@-a&yyw4uS%)J`AqlBm zzkc28H_9P)dKAhi9qgV^;mh6_y|X?$!}*T5y1Lo}nc&e6l}-Nqc_r)0$CDgqAAV3( zl;C1!_R*cWj>lQa=2`BE8iGT@`Th#bCX!TX1}^#haRzw*((>ki$(zZIyUhr%Z^t^{5R1MLS3?L z+wTJ(ij#k8rAIw=`oX$(n}{?b?QDhA;^SD71sE6@9x%3LnFrmvecN^-48NmConv2Ju&o(R*9C$ zvl&Lf>i#)BKA;9t;IUyo78j;41?k)j@r!|xu>n|xi>E=$dmeQ`?CPD@j|0q`b-c_90L@qbjl(7j2*7)0)u=h2j;#rlBFj#cn@N(@EK-muGXP({(DG zgjd7Pqay_`da8qZ#YlqTdzRYeC6r$xnZY=kL|;q{K&FoeUJUWp%Dx&>G(RTGcEsj1 z2Iw(ol768!sEYi)nU3~$|IpCTK1p_VcA9C1Vl$~d0Movvy4F3Fh(}R#O6zgd=g*&? z{6M7RzRJy?`wUo7yg`MouU)h_Yc9)JT@xZ~ywwY^P(t+T=K~D~0UVl8J0PU-KiVcL z_oFcKv)>R1|mQ$5`j`GB8^HoE)z<`C7fXq@=TPu$RtTZ<{#6<&Zk&c+ylxeDJKh&!J^yyRWVz)xn zNH0kVf+D-d*Z4!^2_PpwB&HbrKo@wZ^_6`BEbAaXq%KTX$k}hHm=9-q{7i2J^=Pw6q=^bRM=H%oA<24L+Gg!nK0@Wi(LjiJAr?oo}v~Y)IXpCG#%>!xtu9Ay?&28E0!EJ%#Js0UDp&Fe0fj`yd z@XJd#SvRvVH1A)+wd;Z9#*th4J-6s9k*!;`Xh5#bjBATDh=@82YTk|3L#f!08^u(d zK69pz1ou)zoq+j&L5+J{jZDqW{a(?auQmG1*R1K)=n=OC^;a%O1{T0K7N*mKxrf?A z3XQDFKc^Y7Wdld^RudlDEiwCS7tpu;3cpAW zNT=Y+)t5&;KRL*Biid}%MT(%6ux*~@(Ee$wCmq>djE5#!U=iK3#!=|I`7<$R)|6EQ{rGwC}HU#k3PEvpb`4tca)TMm}tCjXzY z@GYs99pZ?`KcC(hR{r?>40-B97Og+qtcvZoKJoR9#CrwsYpGyU_qXR|fP1`f!VJ$7 z*#!=z46)E2@r`6D!n~4-4@Cn)4ELc!pCm1@`cUHr(Ea?fu8&?MfB>AS^6wwD_G8^N zMyVCuqCjcBzP?Ot$ORnu|Aabvu9mY;VgvGIS5%C4BYY+E9}RAD$`VKY|=H7 zb>o0oZXh*)^WsL(ig2A|_2k;J)r&v~nhIDL5`2+8dH3!;tCM5dgo@JmTIteZa26Qc zmw;MF#1$17faQ;$IAvJHMVK5!TmU*Q8scmN(wcgTzQo-$kaJC_eeJ+tptck#eqr*! zH1w6rO!~27$HIoU{J^TYe7zX#DSrHR}3`*T(`H#Zw5BHhb1G=>OY9uqZk2#$#Z-8Ar&SDjD|493+6 zkk@1s6tcy&15wAv<8gRJMb!yr5z?0$1I>vd^eid0=NA@+DhRr=Dw>JeCUPYba=}X! z17@w6agr#qBpUHnmWH*lE>@N0K(?>=T`W~UoB z5&%s@#qHxc36(y{WsUscTWgY0Owh|$U7hT!%HWyRo}FBYl3z|eUhSi8okZ;S3ZKiN z=BUd_eWjLuiG~Xeyk5=T2qM&SOdaD<$zGG%eXCjjhCDcq58&+WMhd5Xv^p7F|GF|@ zzn*wm_J87%xiG@VK(Da_k%tOZ`!JgnicXT+6=~b{_02WJPRs@$6t|g9HF+NzFtlT) zT%?w1q|@cMyep`3XeZ}ESlG>ugU|zpnkZSpd<8R%L?42hS`Y;uA{wD!k2(x~0IYlZ zwZKPeF_+M1gj-s%dYu{o6=^7(jpXkb?h@oq7I zSp$F~>S}8K7$82t?lWi4w)$9M*Dch6{?q`l=ouPH0&#+|`m@y9^K7#&mNSqFRFj^K zmE`H=MdS>CdlfuerbXK!{3IT*CTGG4u`yUcH&=;q%l?|wAwc=f(bo>{wYh|10z~_d zCssuvhI1G!InehgGdjNp2kARkWwu;3L5&d8+YTJSQ-wbV)=Et#c?-)o9CKeRXo*(c zr%GSgRR7%5X?}Wi`%cyR>gpZ5AwNV@;^-jfow`bHZg;C`Y*Z6Q7;(Ww)nF7tFa(9B z@7qApnu1};w)#@gf*ZvkI6FbpX%LYY=H-0{DS%(^Rc0Px6pesw4Yzq2z7IagQ8o$s zJp(L4Os$DfZ|4!8j!O{R=wISJbyl>*()vUi>>~yBfQ&?`x9Ns=3aNSEbeg6>i)s=W80OoqFM>>l`XMP8y->f4Pp2j#eGm zXe*4s-T+EK6!~l6JNuILXB0;>I+D%cs8aq@{8-og?tf7}cq|-@fxVrlzupA~%xC&O z2j&Sxm+?#n(x0yZhDaWB@CwU20b!>vt6AUn^T_aUGRh}4*`WP@LT$0?=dWdVH*MdZ zu*JaTU}ySHDL35+e(fknIn~0&oTT)qi&a(5cGa89D=NrE)>T9sxZ5h#XEAoH)60<( zeyk`F{i1mO`2_u>oa!3-Q*oQqY`A*yU~7-H?6)xKVz06TOdy+HA~rey2)W#tq;vRg z8fuGcpC`d@aU(jC{TY1F=npF`Ba{E0Q}5)-TVRMAbFUfQ9nHz`=Pex1vS^zO^r?F1 z*y*~DXXtfIh{Qk-Hcd_^ZHVmQlP62p{?0~N2a#azZ5tC*4`aZ8cmXgH-G6y13vsS}yDw{yNh964fs>5p!5#oZl2oBy(urCN7# zP*FCxi1Zv6IgNTLpY0v{+YW%Us)h>3X&gS~AcoP>2UV#f ztp<@RR;GesumUF#J zb5;wp&F5LoJFt4RzF3O!fs}oP>NWdn%ePc3@X9$ZPsdZfqjs_~Eil}wrGFmE*~X9< zr|O7vV4tEAf16|{f+X8j3?A+tv&s9$G9QVUFZ6h6h9I>Urm6yZMe{Mtx5mfDRD5$vPC=JZn#UQ3B@}N^Ttp(==g*n~21UBiy%RXe@HK(b)s>W#yepPD-HSLyRZ2m_Rt|vF{ou`M*DImCf;?wcwj4fe-~aIxyW8}$ zruEm>E_&Cn)v%!`VfhC`pz&?#-Tk}u!;_!2>wo#PxXwo`aUVSRvDM@F&PZZ~V)@5? zaVAzKKdm_J;9m)-3pd?{LN%#|Ba%i>xT5AcymUC4{Rk-}K;f;x`v*H#Pj zS(~zK-jWJ+B!MIHpYOfQRN~jZJ|0Cy#RMbD$-7j=gH(a^aZoARVSi08MVzCKzvtqV zKJ!4vzF#x^uhgBM(G=s~e}4dBn(Min|2#lADltvA6%@m<27lD;+m31We^#FD-k7j) z*{2t&EY|b%zbOxMF?ap`w!=wL$Ce62oYI@_QN70rzmob{QHu8UYyC)Z?54nEy(ERd z#F#C4@QVxUakY7+S0j>DWv5zS1*9+6^&$Mgg~#O&qQ4YI?~CyN_ARQviF|`S@D9GA z8Kt_TlwUg-RTQ+4LkpusTQoC8AL6NUrKp)^IAtxb``_>aB?8j4$A>I4@$A!(YoYsn zn;y;7*(5a`X*U?$Wa?4sNyf^0#leI1g;O&54m9&-8OJZCczWwUV{vH@*O_gH_gk1m zE3lc)ImD^+C8#d*WX=&q36=Xk!d<2#Uv{8bR$00Jr7~!drZ|t%dw;HoAltBW@p>P9 zyeG(1n4d}{z6L7S>+W3^HUUu!1pG_(+bH4<^cb+~yJ)|h>ih3R9;KgQM>vK$%?F?~?hPQCAG zdigRdR?95R(mGu@W@fa)8A_76vfqM5iMHt6?|C;?qR_2sL!9kpEX7>58nOt2vWy?t zEiI^2(N#_^H(ts8FL5`P7_VF{`@KT;$zfUb1nuxu#~o71MCS54I`)-sNm5w0YhUnx zLk6;3DPGvCmv<2_;q+&`84`V@0=J3yK7Wy8{n1%CZ7EFjsSs9do)N2_-XUH#-q zBaa7*(46P5#aZa^Nb1KMboQU7##YFdk{f*}DpY%ZXT06ECZ6(KL)I0`-=Jy>DVF{G zYly1-U#*;Ck5;TsRG>;)$YXl+(EXyC1Q7 zlCk%20f9T2Ih#^M*LT<3c0?t8RPiO1{wEB~yfg7DEo<1H&b%g#-2EIG?wMpqtAvdd zH+Qf{Z44^YKlTgoLIjFbnq-)wyjk~Og!o;Ej}YRaBg;c*Xas9TgeRy+${8lLbi=J7 z-V@{FYM_T=HwQ0ymMu?1gReBV%eqWu`l3IkME6dKE$sQh5NX#EC1-O@BHRq{e3u6~dXGx#3 zrcD>L(B@~S?sC$+46i^A(Jr{21~DBVi>O&ptxUm0h`Y__GL`wlJ_sY#OYl^lv43nI zhff$*;gjpy?P~DeW4qzEaOy2x8F%Gl<`$~^@cvhju(ev#IEM0gU!2v#iZJ}+kq!wx zESQv-n-?u{c_o$jXFap(zVg>l+QSjO@2PLm5}ZI~{c02gUou(N4*8C&gMuIgA&w%N zd9+o~4`4SasrkcHs7-4jJN`eCWY;ZZ$Gr;vl1s;i>L?|pKk0Fc$Q9qFHPkN01OH88 zTdAf(C%+sa8JYWnifZ(ljZu-i`sSSy8mQx6Naj`Y2s_4;WxfwGyZvz3I$YDf6{PbyX3WzQC0PjyR>#A|I6!*k}GtbmTp;-V?@t*Atbo1ctsA**mE3Us-O#)R{VU{pQ|*B#@4PVJ)bH&v1ZUz>Jz4HopBM6ym z%^D&{FD$?tkP!RSS%$pWD~F+YkWyN~>nNqIdIj7QKX*^`aY#(~vuxR?o??kKYOTLt zPjhklJ9EcGamBI`=-nzd`rj;r=bEj>1= z@jWJyq_C2DU)wXh!66B5yFRd?^eDJ46-b2xDAI}3|*BQrKkP^;Q< z1c#RSZa=Jz3D>=gSw($n{^sBHIcg`p*`<=P2jWp+&iFD>UGD<-2LPX`#?}x2wTh}_ z+n>V;*2frXfG?@#psw(Wh^P@a6Z9KS8mWogW_q?H85HY-+E)VOJPxm-cGxz=0X^Vj z_WknVy!ONVuG7!Dhb+j9v0-hJ`uTUAlsO{zY}ajO5vak@F)=1vjWSwb8xn?>{k-C_ z(L3$!?ff!~6nEu1Hu_F?-PUY*=Uz6`5-QnXnMeB%8_zRzd}92~M#Z1^_C5UodOpi_ z&S7pt8d@EQcVKLEG);_)irl4>r{X+o8mGq7_r$9H=>=x0(sjRE?0B+@zLTS&0H%eu?LN6eYJ<9egBDbwkMT?y*aQ5Ri9^PNQ1*N zqfU~K!|zGY-1Ic@X>N{MPKo(7s+rbqGkx=SUqVXM{Y@m(^D%yO>~{TqR|u+D&_7LGbcGWC=5e5XT-*3@StGi!-8b% zQXj%63VUKI*h(ZOMH(H(_L!B7n}33&@_{#jA8{AE;w*;aj+YlPbZt9!J#}I8Tf}B% z2-}3+0QudHO^N?3-$XM$;3USu{>C`JLsnlBr7*hCNYsHKN$5y;v5XqKX$luMADaX( zoYqaIu#Hyt;9CF}JR%~ESTr6k4#Q#NHNy3=5i)`Wz8d;MdDl2$s?BpTJSeXU(|rTv zJ@`WgQ;XplCxg@t&N%9qbze$x6qjt&`*1QymFp1xh;D{e0_XIw+|+ij$2}H4%czxp zrZU1N(zjR3W|_;PvLi(c1LdoSC;~K;^t-pt94=kr%)>TF^v0!5ZP_o4sS(iJH8HSn zVZZ>5DNS*3rGKsWG=`iuOT57$_DHVWJC5!xee9xf8G{Vm28NHwicpyekX>iwwT{i~ z1o13UIi*-+V8m09&-#GU??cY8lXGCGhQl0Adoe=Jkx`0Mjb+)iDH=@p-88JfZIUjz zph5QlTTA>ZS#jUnJFvKT$--S1myArk*2iT?+75s*iS#zqk$)Z%gX4AM3P?7hwm;>J&jHJ>~@K#alga2{MZScobqmh85746L6Ss*RJF6+l7i< zyO{iAIdwi8v1Iyt{cwlMgUtXARQHykr-pIx{l*Bh=T(W+%H{0ax6d3sHAI6iOq5nZ zT4&j?0aU{rzoP9j55AcZ_h_%R5TK(3bl^=(B+En)(+*wf8LTld5BJPfe95tj*l$oo zii#tH2|78+B;&(k#noxM?Dz?~Kj+%5yX=Q}vn=?^D`Z3FJ9()R&%AMZDj+;DrXe7m zzjEbDjN_Ucu$GK<8%4klNJl%{Kx$oc^Z6LleoPrL#65L!p}2oejwtFZpZr}{Ki-FW zukJg|mh#wW(^o149(XRIwuyBH?L}IHdwZlR?{<_OXwiJpuL=Pe2 zYljC$h^g%Br?S2_rn1V(G7d;Y|9u%la0668b(o_Yi(GF!+IsLL^zm&H&Y7gkjVM}| zBe-!3rN47^4NcL&B(rF>_5=uIE3rfS>aI9}oCZMHJ?BJLEWijv3pO$9R+$z9wZqR zm8fQYKZ04+N3$*bkv<+F6Wn|F5HC0<)RE)9F++I2XInK5nYU=-|s*S{Auyn zMKJB~OaD!I`uO;etiCb;&jb!X4&DKJ6aq*BU_Y-;c49dKE6h{`PMHH~YO|r{qmHk$ ze!@>3_bxAw?np+4Xg`a>?9vlc8LMIeQLFt)+Ss~Uc;GHGR3;u*kKE7z`@cIq(r8Wbkf`S113fEr_GT?$AT2@!@%tKP`x&-da9L)9(>zDzL)eA&CefDIcEjy(!` z#)SC^8v;GW4nA70efy3MvSZDyjH4nih&!t}&lvZ$T#lysX)Xt@yMbVG(a2~5jy}?{ zsO1g1up}u#R}1m-)xdr*s}uuckp6~4c0ZNiWYKHOv1&%Z0JGEoYdO`v^Bn`VODb=i zsT%E9e|AX1`MQ3xsHybV(U&zLM4)jgTIvjbP!gRtZuJ}Gc4P688&Z2L;T!E8x>uN@>Pi!6&=3#w~rF~`5^ofbv$_tbCcSaJLW;K4C8Tpeg| zB@GY-PQPvJ78NBQ|ER^ymS&iNP`bj949hz^4ZKF(hdP`z)s0^)j^5x9yLA7SSoc3i zqIQ0Gv)dx|QRo#d-XFfIyG%?>LuZQ=j8`XBX1<6kjM;np*$-bMryc7>YFpl=32}+? z^)5T-H8FRaLn_DPS?ZL2YH`HCH<_n}pMHEfcCbBhBGLG<;Z|0o%@59~%-(W&P+n?k zM9Ec~W%2U!%fU%_=-k7N8#fBSwdLSEO>nRCgzl=9REal(one}GQpj6$$&s2D0^6k- zF)G*-_91ve2^7Q@BKLdNa&F!#lz(a$|GOgX)J1Xa)=LBUc!&T*4?uIO|hy?`uN9l^*z;-wU|?hyi0~O zdHE(Jril-yn2#J~pe`Xg68jCjX$@HVEL*n*r^WO{$W)Jhr#JBy&Lxj`IC@tx;=3$A zMfp)eRp;`47J$%8x&z*a9eIJATSKd&x*}lg;@-Xc1OOnhvUB0?b(R9v?*dfnp7sUt ze95Gl)xUZD8r<5cm9u|8Fx-k#&(zeE)e7l@^fYAxjGzauu4VnQU9u=bm9JmV)pwgQ z_Vn~@Ip&a%37zWm!`LWkj!-p1$7KxfGL#E$+SC=?-8BpEf+!=rrDo z!6&sTW;zhQlq_rt=16=(8ojz185)qpFr+BLAeKzCKD( zl2**xy-`ux!1W9kPVUp?TBN4V*g-V<+BpOU*!$N>|3!sBoP=IDQI9Jm9R6~>>_?hy zZngLDW>IV)503@Jhy%EKEOm_|VP)K=X0V7iU?VS#ba1L;p4lN9Xoz`Tlk- zSb7UnGflt0@dcx%b(4?3g-vf6C9IWs-3T*Fq3l(qfJ zW*-UvOD8rx`V|Z0EBd8UFKIc8FuC zFBHNxes;ays-DyzV^x&)(!t{LzU9;|_pkm3s8qiyKle}R#%yXSUjzYHFCyAf!$c?? z>_R)~>|llijG4vRR#WFkzr9DwI==XZ$B??Pnsmja85NsP4Il17m>h&g(XE5KRT^Ep zc8zp_lIpqX$1#kUji+iU)g_6Z9HM2L45GP@GzY-v)!oa0tg*D1E4TKO< zU&GrVesnK}e5W}+zLRUO3|9J$Nomy>Y4DxuFnW%I^hrSDgf^-ak}r;xuE*V1R|m;1 zTUH;ZT91Yb_4Mwmg<~>`WGAD0kf=Kol^i}|nocZF4-_!E zVUh*+oVouKzDH%??@g?%aWG_naiW~aT5LJMbc)oc@F>IwZnl7VuF{gnw8n#}XcPfAT*6c@3NNc`hO1xU!^RHz${`3l?2z={+ zBKMGSPoPdT*Y4dn%OSvZZ@`QByNQrqY|@*wSwK4qu^()K26Af}!gJh7PX^=GeaIqh zy}u_Wq7l7)SL59nGSO$GmGp_f`V=H7jYQ37XyhDtAXxMPtsc+8^YI~OK700z7Y>B) zD%guU}7`u(iR^Sp4d#naNKZ!maxz|qNy;SEW*;v*; z*4HrmQ1xGjVv ziePp{zu|E-L&FIkeRbzRYgPiN@cx%Cj~6-g)pO0v%wXfmB1ibZUCte((clSB7ECpx*dD|tZU3Hfx&h_ZH-#I(1q#5>bPJ)_Dv;KOL z1Q%XjnYv@YQc`#QzzOBzS0OjS_EUd0F4j3%g*6gWC=}HR@G;x)w_4(`nS1P zdhs#|q0m~w*j8}=VmjqK)v+;DEDgn!L{e_<>Ylf>6l}>G2#hmdda7Yjin;Romdx1X zqD=Ap27X1w%dv1WINdCkT3*>aIUKOIUHfSx>C(`MagdcXhVr zC_={PIWj}1`wf_t-Z?uqHRb5QkF|6cn|Dk=N(ydp+W8d8n8?U;#QT5w9J%&Ge3xB# zd|^r=fNrA&YRSeZKmze^9a?s~b%0^&G5@3qgXj!o96i{6^-5DS_1H4;$XM}kvN5b% zhf#-s8wDhZu7Ah}MBR(jii<={9u7^TQwOVK zKO$KF>(zNr$c)AKv`D|95~E>~Ey;sOJ>-o|IQ8=re--f;Ypp8g-J?+OZi(rssZ z5<_?QyP`jkpGDjF(&WI@lH#)b9(dbGhbU>`g9cna`W3^~fP@x&GFqm7yVa$gidbB* z4DP(%wTu|x0*|&aL6!V~36it;_U0aF%LdRU-3Un}*Ot82EL&cZNsc%nhZg`MYha6q zu|bs)TZUmOo1eFRpHt{C6iT{?QBCzh}UAWBGo4q(>jwQXogL^pCtsburRCek~oJOPJqSc0dGF?%JKl zG>Ogv`0UUOfr+9V%Ye~Zcy%Iq z$#*T;If97>07}SFmpiTTq+Hf_Rvu4=-{urxMV6MPBKZojQ*|Xny-6=@noOL1}UWdHLs+AGfFeoj8o)?2QK*6Vi4y<)B-T<;>GK;UXKF(@CWiRb zFmm|BWQ{&b;7|zSqJVyZ zePjOf!!y#1a(}ZxJjyWRe4msQtC&3)XWt-hevb#jOs7ct0#*_L*#L%@G-15=hHGKh zJ=Ic=mHt%9b zSl7-8>~9X59DM9HC$P6|yBJP|QzJfb;_Sj9KcveYMihsuDAGLUuolLi@v29q88vKa zotu`{NDyt%>%}>0qQDGLpb}hW#>m-Rq$e#WKU{bfZX}dQzBJrn(OGQYJcGi9_#qG| z|FiKk5;4LY8`8$Hw;HX`-BRm`ShsARqB#WWlsM_vSnzA0gUkTrsz zxPsCnUEANjf8PYd+J7wfc_4vcIqgsp%P_TsK)cz@AtPwAFT9vopM#)E02Cm2mn?j= zq>?6hkoKVxH?k8o%-@D?CN*1S9TwQVFbO^=Uho~*59|%tFF=~7$ZBd>wM6x8-MQBznlJ3&d(%C+!qe*l>UIg3Oj;>FS z*hY>q*!=u3;{5I2i&xgqLhf%_(p}A5#8vh(2G=E4d(v(dz6A-Y zqz|VYX#i+)#=$y*<04d1WfHJTwkXg?TCXfJedQE4f5}pnWjGD-|Lt|u?rvxTkSp(o z_VdbZ;T2~b4x$2DZboxPQnP`(9!_Gwwfua16yMqsTMX)2{D}Wwq>5Zd2F73RmIjxF zaB4$dem>v+{R%{ZLx>l598QGx@Dpo@9t9G#5-dGAW)X=Cfdi*4#6n?eOFVIqiHEB^ zd%8U@#_ z=sh>aMAxJw2fk3cKJZ>^;g2k;d-B=bjf+ZDPHiP5ax||eYC!y=EO|3 zw_4;eHzk~ud+;7^@A?gW4f=eJy9V5nn?(h1LW4SXxTq4kJL72g#AMQ2plf4PhwoLWSIni*SAntfZuHi}eUA3Uy!LPNkN`Ptmdw(|Y7uU>0N`+d{Z__?H z<=f<97Y@0Ui|%(igEtnaad4`a+zEMgA+f7;GvGCvWk=~-s*IJ5>SYPonI2ny$88SO zthbLEB&$KlaY8Vo^d~^fCUTckn~%ek5Rf}>Vu|LJm$8vi`>3TqjB(1qcfKE8S_r!M_=em^p2a=+E<0Pi)C%i5o9e@?UrqOKTl@E&%Xr(N>`Pf0*jN+Quj6Vh*Uegz!)K}}(jWL@ zXb+FF86ls~-10&i?TXmH8=8D+;NKE<*0-iFrKZ9|-?c|rPf>9KpZy1iIO;uc_&t{R z|G#ASpfLNt^x+hQ66qgSS4TY55jx`reR4*gF%R{5=Pzukiv~sy@m{{Y1Dg8 zmO&TJ*jx2RQAYc#S)pUfYzj$B>C?>ruJ2`4V%N0-r&6>H8@VFT@3+E2$qmaw_jzvk ze)t~Bl*4A{UJg_1;8a-YivH2hEKR)egu$DPmceq|H%mve(Vj%~>GS8`fS_(vS@6bU zYX~Y6{acPAIXtW%`D%dLDfS*Ec!jh#5QS6M-@o`y0aNgd6*5*5Y@aK~2u@zV9hF3N;~@Fn86KDC2WaNFYSOh!lQp8?6(=!-TLcm-t0ixBhE??fQIgp)Fo2Dk|#Qy}LPq!rjTl!0<1B zDV&nxT-;vffJ_mBKj_*I!NKE@J(im|Wt=~X1mZv-)d(=xzG*WKZ)6$QJSXVmn z?lRqq4KySdg_KJx@&MnrIsqrW+$=O z{aFOvAFD@Iw zmwe!E15_{?8~_;W;f$9)`z39ZrT2j!>3Zn*sRxe*yB3`rOa{12($f%9cO?)cb7rWG zP5ewOy*H+2)U$5yjCEePu38eIcNgERFVd;pcU&}f?Uk|oa&Ixya=WvXP0ZtB_)9@w zsq(qY1CoMsL8;l*{WF=4#>l@(#O6)K4fb9ZYJ;HR`nw-#GcsZ-OZJg&CTE(vT@lIj zI+M*$C#9xdDIMrm06zh(x}^9AQ!}$_bPMb2$lNVx{?8^~?Zs6Gx7f_kjZP>oE*&U& zIC{X%z)m054?6zk&wP^qmD>a?>I)a3*CRP2VEb9mxYjM%&mOfl=prA^9_gKB=4EQ> z{clCT*ICJYFN3gTtdSl{XJ?lZX?LNG2$}@z0$s+P{@?%_!nL10+!QKMaS>((v_nX5 zh$#Q@VS&~GpTdL2MpGX8in`lV_~2M^G7M~By7(w5#g(cmXHw4!q;ZP6HQLHo%5S@Q z2QvRIwS078UT&`W;Hzb5mFA~%wA=Q1pNTe{VCz!mK+{@4axByB%Cc#GW7XJ~b8&1L zzJu?O*h-Lc2x*>?>`dq!uf#}rTW2v>Psxddsv>`bk_7o z3-!Lb){0H7XaDoA23cVuk>L@n+9Co;e(X2r+lYSNj(Pq*;yR?plpMl^=5g28iPp_ZS&vda6L_wAKfSi;4DDAfMS1uis1Pm+lB6Pf}?W`ZX zZJiiYkboq3JNWLzRj*K|AGlr@9G8N4byENn5fSbd=iVo;0jJt3Qs${`+FRx&H^wgLyj)y7 zTECV?>%`4Z4nbo571`*Ylk-PsXs;&f%>J~q@^b0G$uDkc!p)Z?)l{^V%&VYd_b0fe zNl8Y6b#b2r27qvJ;>-t5^VM*GPkk&3wG4sBan=3Jl;+=P&$O2ZNe>chAjz zJ5g$viu5JvBjPs?Gt-zu!ZXBB%aJmF%f_c1EIl+r1Y|Ct*!|C;|8kSDkX5-Ek?Pa*T3W;(d z>?i~HTMH9|plkU79l>KC#1YA4hc*Z*cvO{>4PR=$9u@KGW`WmOxY+OT$@hjeKSB>k z#F7MJ=^e+ zZ=PJ*edNdp%gd!2?&Ialmuxc z<2c7+X;{@A9pY zVKG+d6G~48G{<+?-Z^vEiah6JbfoU?V7~{Ss`G6faisz%OB z4sNE-QIc(KY?(RoiPy8d#yOP}u3Hl@8UI@WrdPN5-i|IG^Ry&ls47HuU-pK4C3JhY z&MxcDh0;p#$bI{|&XyD+girr-?B(WQullG9ulkzoz7V%dGM}hEcpK!;|9ROmJ|QuY z>^B)j!Yvq?_89$r&`Q;_0reN*H=rkj$+qL7*BW$B**PMSnUL3;I#kqV^#4vJm+ji z5moUvD%oHeJCdS$Pe6S=M`uidpNMNc;dmlaE}GSs)-88C+V8TM%V|ZoFbKvoJCoOs zG-|k&IgMsKKO_qc%!Flt>?pY7=y(rt5!9v}aU___MvkcF=;h^=+|W2_xJgcVx`Zv> ze^gXdaHe385Sk(};z*YtmX$}mpDjwC-g$|hafy?RLa_~vDc9`jHE1p`EXO7Yt7l36 zcDQL?5m%3+QV5p=!HnKZmlg~!=sl+8o7h<9!3CAmGo(yL`6_!k>24W;`aSg$CEH)C zrEH&!?dO4pEz}W722yz%3HQ`+Icr7w?i_vcEvs90HPOIUovg)$rfeIGT*xl;t~s>y z!UrUebrN$Ctt_?;3@R8FR1mIToPBCG%|tHyIYQ&R*%Y3tBsnXD5iWr4e>fl1<2K;| z_Xn7JEXHkw|5HD3|YowxQ)yt z4|0U{Eo=>7DK9165k~aUq!z<3+3m_n(JW5our_5T z7u&;1sv)JG5B6(f|2VG)GuBp}1)2}>NPNfAP$MffvJ>a#gjGHx0ywPUI*%%y%<|Z+ z^ru>p7#K;Za+$Txj-rC1PGYBcJV<76&ujU;c1{l+MAdj8CBa$44i8a2hUtilf4}V9 z^JMq}{4wJrG8V?Dnb`y3myPC`9`@2kR`el1@W{tTzy)$;C3HXX$MP+CECTGGNUdiq* zr+wg%N#g$5hx0!=lTM7$#Hj-)2lBa%z4L&It?|AA!g`2P4f3v^hd?tPEuLBoDf)O08^_wTt z+8Hq`>;E@=Gz6SAr=CL?eR=rpCg3iwi}vapOU0moy+Dp3na&O%? z6oW=~GU&0UkU7om+IEiWIffypF0*KGInPkj*|n03V8#;jVYF8Vjg8j~r>-f?e_cB| zruCqItuZ&>9$U8~&CKIlpK+8C*|LU6)*JWb^XgPB|li=qslbuxA&uqt{$LsAYqf z+CiKIb6nfdS^qu9|IYKJC3f9)!)g>;kxDh$v?68){-qTRF;4kGEpNz@v-2BNOykbP1k}a z78=owGu|F@^^-hUWI{iMmKyWbYlWgMqrabzF=L`nZ=vqcwE-37vgdAtHEmL~sJVDi z*+-8BH;ZMBQy>8Ou1}t63d3kH#=fPis&ag_1bbwuOni;v8`aK=L}((bD{)6=Fd{qA3$!8q6e literal 0 HcmV?d00001 diff --git a/docs/vo/conesearch/images/validator_html_1.png b/docs/vo/conesearch/images/validator_html_1.png new file mode 100644 index 0000000000000000000000000000000000000000..9f92d06fb9a8209a2e2892185274150b3e81d356 GIT binary patch literal 28152 zcmc$FWmFyOmMux}U;z@`-3ji5u+fdX6WrZBfk1F~4ess^!QI{6-Tf8k+}q>6zTM+> z|9Z(tc2#}#xvV+YniU`;C4vZ#4G#eUfhZ;ll7oPFhYSJnCJgp1@FXU}%^LWD)&+}z zAYNYo{%+2V0-nHGi>lf|K)gqL{r3hUF&Pth2xBiMDFn0h_9HSUi#mZ-2?PWogcwLb z!Fm30$;%yms+s-Nie(Z@(*9RaL*c9;)uoR7ZjfeY!~5Itk4+U3)Q!5}j2hBWH%VP} zu_|S>sz?w4I8RyfG@Vq16|NK>t~nl(#<1g^VJ2a~$H?sOs_-m0P)rDpP z>l4{)$%IupSZ_jDj^)4JprZBD!RGe0plQjMkW ztem)hI)a0P8#7lgPygdGj^#02USsOjp+%TGp?a;Mx*EAgJ!OrzZfweKye1-LU5TKU zxx2uV(~q>pg1MK80I{K7rO6_fr_Vpkj3DNe2e)~Itcn_N|8pZ37u1<%$1FzM!FcvD z?3c7K?J`ToAl%D6>IWd%kxBudh^HcURVsFz=5KBWr&$C-@x*;PF+x(~ReOo|ks z$G8L}NE!+rliilSR5%zkH#Z}6U~Xe!uUIC#^=J_5mKp@~9$Z0F5E3_h5$Y0RwVJ-X zzGf1W=sph)jH8*qsU2ytsOi~=khHq-+!}f?&;n{nmu6&ORkkdOM&(Eu9{k0;uxurt zn`cMRcf}6=Mn0~5){-Cp(N>r_rb21Ss&0r%u`O)b#R!i0RWR_-MNN%3scm6heOv52P&bFbo= z);hkMF^$lTre#sLh=4tmf#C$}A=Hx7@)S0Vyt5SH#w95bwxk#N+VJPQ_P`S13bwfO z6OP%4l$fIvvI6I9;+JxFuPJW_yaQ7+lZ_=F_5fxER=XLy_S_#X37m`}3r97_70{N% z0~6YK5Z`q|V@pyw%Ik8fJxe`KTNnH`LWrlIkOWYmsgwy!KIt!BHdFB~Ed^!~K}r1K z1U7AVzwjmes@awVx{87qqzNGNgp>{`<8v z%(U{S&Oek-r!Dgl2Pdj#J2gjKnwRg7uCAE*x!>ZO8B9%oa%HM)~9R;MIq1iYwpW(_JA9 zpz|oL9$iegsG2Pqu+lU6g@lE1+XB;^y||PnM;)UZnPH+FHL<6VuK=1nv-#y(k+{J@ zT(E-Op{2{!`J?)M9J%dUCcnl0vw{c+D(oYQ?9ep!ft%8XuHH|(L9a610$FPxV?*Pj z#-c|Pt-x;Bg6eje2DHQDm7fn+~%JU1ftzi}xxMr(6Ohm*DiBGmDgfHgH{JWhx%PFo`P9g{bOwwjnyn-h)%FlGW3#BE z@huCklf3f&&ecntEg`0ajCKI~N_fY{NWo3)QWM%a)hJ3;FY$9CByi}psH)f3Gbu(< zAI&rJAM#KVxq4_{OA zvBKA>V*xd&axD+Rcsf8no%!i-Kw!4Fkda61?H#w6^rIt2=q6rA#?5+Eb+k0rTrC;; z>0tU9__00qFb1!F^cUroEEbh36D!(?7fhGokh&AzTC}XKoS}k}Zda}z{KaHp!FE7T zle;MqBvJ9=FGwEVkk8?372Njn?gQr`l37v=f5*ey9zLSP<~)UhZKou#r@Gwf!P_AO z$vLY5{WtQxka90S3{4d>QOB#P72~jHn|HKJq|Tb1q*+}~55k|qm-x|9wLLtJVmt5L zU<{7>(l$uY3HAHiN>z$R1#s|4Q8KTvhOoQ1t(WjKbl6h0oI0ba150! zl2~laF6$xZu|i3sBP4zo)AjdvdhjE4 zWQ)-YpItsy61-GI>Z~7>6O{%K3xp2&E*e=U#kBY zE;r9srZB#&>UUJ(1b__|?2t*Hj}uz!9GyR&Ryx?^j@a7FrsoJuY>%?~m2)cAAF-8Ds`% zT=3Mz4G5?_;SpjK;4M{-112P9P$D+6I$Zkq+-Au;D9NcXV?W$OieGACvM_%IftO30 zLNk$8hWZA(I8P7+K|CYBz-{D}`{>mUwK4SJGY1w63v9o^gb~?rn@{~_3uZxOac}VW zrHnR)g-sB;2>8Ukp1|xVzEW*ASjYxNk9XodvcIY@UhoAkdo)H8PM9$^F+{H-nW9~b z$3JBiyO@T|9BfHnWVh1AjOR*8D^j9=ELB2@xX7xsFSc@7l@a~Riml@Fsk)gO3h(om zwXCGcvCE63LmO8ySK~c!A38Q##?ps2ce?t&GsB>ltoze589dIRH{m8ZZV}ePqJuin zEumK+%Q4S6UsJ>OVAJTR#M3oHxBd3pTEo6IG@gyIp#b%H@+`kkV*3?3XdEc<+AnBRC6ax;;mqBp3xSx z)R(Tg_DL*JjZ{q|GEj~rET#$-qeX!;{nf-@$k{zz-vcn9$QxLIvvy7FOUqh1h0de+#Y3T?=JFZlIG1l+VrH^gk;37lKJAF8q+~W||9Brs z>I;ZfkyU5}9V;0628%taQ=dPYE~#O1d5mv{Q8|%X>q*$5z4U#KyH%jIlniyu98E=b z(jU|1ygqjhyY2Dzv(~>2 z+aMa}3_^{{pS12pxglG8;0?DBU~a>|n4X(j_TG)l$w558|*l=ej`X%KUCA>(9c)Pfdja=jj0BC=* z5@eakBveVL=bs)qN}*6h)9+aXVv8lU;lQ2#ROl_AM^X3`mZOiFusqS+&63IT^^Kr% zb$WkQC(HD63qevgcIlAZnXQnTnp(6pLx(K%_W@=#c-VOV-S5SU*$Rjr=Q5j1njRe> zV$*6iX0fvyYN60r|6agEdCnel9ETr=$pe6c%5=zHax*FU4WX+hOk<)W>&GJcFY*v= zTV$U@=^h1bC5X|y*yU)U4i}xi``4}Ee1G3fUfICOwS8FRm~+EC9`baWCNu_WMrRxqPru2MFOr-nmesTc@sc> zke3v1ARxXBmV5!&F9ZZAToklx3p|1FN2k;2X9h05a9{?!d;P6~1pY@U1rhH%x56R5 zzJnqMkkNGxq`z-ah1Y>+0Iz&aB2;^&+<)i!cYX>10>8fMAN%_M-x>ZUuKz{C|D&Y; zMHPf<-W;R|(FI~53`1py#zl&_^b^dVAXah6KL6nwco%zLeDwoNT}WM7SwXTBpAyr2 zS3UFt+)vYQXJ_jpf1Y~>Aj;{b>CjLlqtXeFVM!1GO?g#|r%XfMhiqpX#u{woO^L=m z;?Ue+yi*h8RnJdOHjWfw`kWABQO{DDOjSGw;pvnK-PcgYJ%HKbpG{;&py=UWt9*Zqmm`@RD;$GtIIKn`va5{aaDn2G$repV82n~ATkj`ud zDP`)IA7B6naY%EKn9H>`YjIFO(tiekI4(wAXUz;7nM1#yGP}$Q;aH@#6eS&fN^#N= z$G$%mByM^nFrjViql6c+Oa%SWbos88)L-xC+06{V6-xS0p4BxfJJ>r7BXyY!8Bb-J zMx$NWjEHW1BGJxT2#@x2jot0_tn@TcuM=oI>QBT`aJnIV0h}yQ^s>}3dqERV+9^`G zUFH!ER8;7J>*MRg(p4BONzIIGmzyV$?0YbB$qPX3^Bt6q5d!JuG1mr5d_nZ)b1N zN^@X-q97rMFndDeHWy?trfgm(ehk>PB?CH_5>jRod3KRpIRFMqii(QHhqu`zOBD_b z_m?cFVaStW(9So8)5}bq6e3(8nlmLa5(#9>sp6N-Ue9MNf)^dmFeGXgJ!k1@TzF!L z&t01plL~2(RVF7U&o0a)+ z^BKM#BXtLyO7nTw!%a+7w5)ngAEwaKP18}S#@_OQCMF?5wYHi59N#6i)VZG6L##oq z_?TaK#*V9a`22hJW<9D|5Nn;P@Y?af?eNb!Ge`(VB?PXBdRx70^QdGIyRSKu!{hyO zj*brZGuO$x5yb|WIgO3ni=Ig<7O~mN5_0}Kdht=Lv}7Y*Jp74<3Q|gJEUdx1$#bb_ zyzN_;QwI^r#!LZ?^kZpKHWH8}MFr79-R(Wi6$MQFHAjd05@KR-31QEU9_|y@g2zH^ zpF{oCEQ+6Qunvu3l@YwiVXpCD>l-bsETUGdQ7X7DE=lZjghbI+5>T8%ARxN?j~INk z_<4+Y5jqgCtL+^f`kzlGmJ1Li%8q>(^aojD zUxawSE$nUEdbu3ohI{BRo^Cuks2PLJjBKFK8H1eqSu|l6(fzB0=G8LIQJ#+CM57#v>CLvmii95-NOs;*u%_Tt5>K^CS&P&}x{*-2DyeeRjy^f-G4 z3#Ts~h+Pm-6r|lX@DWFA4#i*{vD7>dm_FOY<-h*G!tK{iBYRZ1oa8Sr%j?|8T?h{) z{$Z76JAXo6WT27Wn;*BCcH@0Eg@jo7BFj6#&EY!xnDaPKn|2>XiR{;aPs= znD`twi3+I+S+t%6#%^Fiqr^}e+4h!jA#^woU-Pq6%u%Q5q4R7KJ*>!Zs&ej1Y0}5h zs?Y5%??&^Sf77-`VLsd-@il|rlI=PVezTB<{P~J{8U`9kSu;Zdmxsx$MSA&s`(dzx z=>4x&|J`V1_@Y6Vun`#CWZ*sn*i&S>}H!7^S@14yct*L!ZgNLkv$Ql zNCx!n#ic+;ct^n5goT0iaIy>Rb5YT{`}snjhaY~t8Ho5^wC=Xw9$9{5q(#}A9IVe& zRf=il=e~#0L~V+Z=$nshEA?AIJm)KBejI(TeQq8go(-?1b>olaVWN9_>6RNgegppXrc4R-&&NhU=)N9!3T zvaFMn@Zo}+E90d&fTxEixlO9NNHIMvpg@MQsC>OwXG)Gi6}n6L9uQ&K2o@8=>UZGn5B&M4N2s5j5hDKX8G4s< z4DHSZKC;T!;Hd~tO}Xb3>mLrjF(=A0CfQawMTq6fb5O8nI%wmf$b zoZN?*5B5SHBHj8O1FuEDU|JMV>HKa5Dod&Gpk}6?&&k5gOnoiNciX`5h}PniPS3Ko zaW^4Dv^;_^ECjcQL6Vk9gY_*$oxZMXgUu;gq<7rw?4nf1xwS{fLvqBeM_md-m_o z8u}H{1Zr7Z7=+OA(``FTtvI$Abr{Ym@fhI_GNr^vGXd*7Dd3!7AnakQ=qyHK7svX& z7I03sl_QkSG;Z&)W}=K?PXt$xWse}cudhI?O8Di{zbmzPtiNr{ln9daQJ|hg zoghXF(WwxhL!JX1U7J=INVlh!E$dTn!WN4b@UK&R_Q2x?mA!5wm;GN6Rh1=IGZ~t- zUG!Wh)~!?E1oJ;Yv>D9Rk_PJbI`Q2Kd8){c1mMs8j=aRM2F@-c<=ma3o-JFvStr7m zv&U5}SAZMTQB((O^!aGB={EF`@|u)QO=I)8xc|J5^|m_5q)e5f8qinSM|B@jHc-sI zj2>9EjO?ukrVol>tLUTZc;@@YFngF0Pd&`;t9moq8@hT8`n&gIs8l;+$x7;l16!yg zLG*Ej528*Tp?>Q|L01=-a)U$*abMGWRZ?YD6k``YPmE0#p3e_WH+XlHEmlLc`w}&b`?%D_>#HiqG2RW14ge z-k@|rU{J#8GZsVdT7^;XQH#{c586u=XmHdmm~6N@(505X18>FY4^rmf0smv03$+7DSF&r>Q@%Q{K#gezD>CIyU~> zY6>xR0evx!`LY4Ql{d6{oihydq_iYryKY=N78VPVrS+_FM$aH44~TftO2%MiCgvq2 z?uTg+i6Xye))9SO-4dB(pC>nh+~XEx!c^fgeD#a_v8$p2#B||j(2;B)Ed9y&%TB)3 z5q8+unvCY+B5r(a2F%du)G+_*`u!rSF}YxA5TwEIJLl5_H%|f43=c(T&#= z>{$h@0|25wzR@Gwa-f5JBjxCL}MrwAY#JB|33D%Z5{D2mUN5kUYf|7jvZVnSlGN*bCz+AC1vyAiL zbdd@HaOdTrRhUjKk_#Egw!gENy)8$He(!P*7}IROK=LS}X`zZ@&!j#4DBB7q=XF_k z0l%H1+=_znrom(;R-yhVo%rZz^b3#e{*ag-(mxwCGRV9JqpPXG+~9*COjNOS8Ne?* zc#{wW4(e$BbOd1UEru8)p6K*e%~Ay|^G%SPgq*R}lwond>FfuSrUkocc@4FqlUeF~ILdGTq|Lmm@l)Tpt2mJU=pky?-D03OMh4mfLJJzJ7h%V*?y~jYy7A z;@RS&qOaGsmOLXsuqMQcUAVsfB5R^z@(+2XIg^>HsZFZAaI0fo#k%wb zkQt4fG|y6Ys!5iakYi3p$E6Lm4#X~^Tv4-$Dg@T5XtprIoUU1t!dPGNFcD612=efE zz>JjuMveZx<+6o_evxNp1AtIngz7iy^lEe*zcH3ScU?6lu`{rWI3RkoD5PXaX-DD@ zZ3DQ`j~n5-<9eUHpl~IxhM)!~7#awI4$bQfni?F7FFe%v-9u04PWbS9j1tfSL_Bf_ z+^4jVqW0Z{KIKAUD-V}qO#z`Z67qr;Q@Ffgar(Rb6$+z zVIjD7L{rI|)=Z-p(VzNaV`K&*mRe$F9c<88brpV$`55!=lix~{u6)&;`|pB%}_b>%TdRVagtrt3GgwxL+@BaeOgqEo<&ED?NQ3@>ZX1apQyFh!bw_**I_FvL-B76oIkC_37Ys_k$LTKU((8dW5ex)k0l00$?#M%8GkxSp z%tSkd5@C~{V78s;tjz+wWwJH(NQrW+}Mh>dF#EM*AYWiz33jU zwF^U3b!}wH-UqxBe=X7iTfwaqrzK86{Q!!-Fb@4Zq;jqi;MubLoBiivoS~neKi9R> zZdJpr8h2_7*+DGQm3@pUHiZyRe_yf#xTg&F;ri1r%LqToOW#5JAND4uZVNWr7Do9+ z>W2yBxbVYfXk-N9Y7+hz(KySJfD+Lkl@y{*p@V(F|)8RAh}N8*pNFc;gD+W z=s7+&F(H3u>)WxG=Ukm}=*zUH#mSbSxRg;azsP@?lbA(JY*fm8%-g+KyUEdegTFBp zhTWun7rXnZFo~_28?Q;6ck+`{+FG%?^Fi#Whk>tEyjhx+nQL}&&1^w@XClE^?#}MI z6f^{c_1*Z;nAnS2tSW-5;PQS`+rzxD|JU?Yz*9r)%wC$D6>NCKXvF#>D`5!~eG}9a zX$0m7t)#8C$2;EV9FuR%b~LyaFIK{FG8&i3B+JLifBvZcL{sx);3z>F%g-HCW$6)p zf+YPyhm-B0w)=1eJH@fZQ@&?gmnL@`LExRmU&De)061I@v^5BN{KT^(`O!K!f*t*) zJ+0e#NFM|m4I9W4t4X320jK`*kdcuAn(PF(B;d@q#f_^breRc4pAUx*f78a>*xUP@28B5lRZj?1nO>kZfz9Eis&pi+0DMJI zh3~1^FZtiJ(>Mo+HZ+18ltcXwe;tCdQZhptef191O}$2l>Y8(ksuZ5SLkv}YRX;n( zDPhYszgv~HfS(y#AElS8M956-GvFyM86s=)+U3Y*`%d&EMKUvsBEiv?7z2P7!zj8-Z8X-yq$(oRW#F%bTL64^UQRTlw;@n1QMi;GVe&{ElK zR)eC$!)5AnWLzt*Q7R*gTns}D61wN+pH#7FVS)9*Fkeq@9M3Vhp#qDs5A$ zIOz<|px(qOAK~4H82; zhOR7np;^II?Ll@AEua#bSjqv|-#72@xQFn&Mh$vDodUCh%$26b#s&AZ1To!s(r)*( zdH_4F)e^LTstJO$M-}g4{Xi(Fi%pRVv&*uH6HK#8LvTu@i*GhKQE!SwQ z=>dPayWU4W)m@HbP*jl>aY$@Bfb47_m`FO6dXU#0GSsPQjX}1tB9lBR6Oi;K>ki)Czxm zooOL6|22s4zpnfLiWK}8jr#ZQ{T*6>`0o5tKpx{&(nJt zU^_)_P=-=LKm^+jM@bFHy-ymf=qobF?*bH7M4v}47Oylm7DCP}@HuUMRzI5Xd5F3G z`3X5iSCGhV`nI-U_Fr+5zJ?H3u{F0IT5C76AJM!phDLp494gUlmBGc}`16@!smqx< z5L4NRzo98zL-KhlYrYTJ2QbjluOsNp_5(z>JDUGMNj=*UXd+sTOitNH9_oS~J|^XR z?P+|o4rK!bLNPqra?f%utD%82pW^tm`_s02j5C2X{W%yQkUkiWp2Tt)G%q$`<}Mvb zpYeAxRZv-pmDi&1KL7|=1J;E|tJV|mg98x#ZgC{oBhLW%hv4=q=YdXSJmBNoSm3c; z?=?#xCEtm_l||P~YxFl>dVi3$g*CuDU964KR2rR61vvadeLo?6vRfIyS8jrbl2_{Q zkIiM+dzqUS?q*>aa8;c)lenlkInj;@Z^Pi-qI3RJDCdHPdV4KM6br<$DOK@LZIjt` z-dc^iS-2TAdq>}uDQBODyfZX(&Q1=|7#vKm#AVCzl@euXd9XGRd;}r| z4hsig$xD{xXkfho^-;?Z2}Nj=5(;FjPJq7-CNb(gBymNJ%8{d6`@Z+8hXMUSk!!K3 zd5z29Sfm36)`UO#Ps!0`0lSk`s2?@WU6yV^A0ZRnU{)N;_yxMiO;3LJ51C&yCcE1D zlMP2@H4YtE-^S)ev&{6?I$iz}&*v&F4(ZoA4S5Ie0Rk1Xh5A=0F&48)-D3e``?$w3 znVygjr;(A6Urtc-3(3imZO>||k``y>ui2y#l6o}pyf8}&=>5$;3e`W*N3$Y>Z>!zS zqGx4g%$*9UW+Ql}fK^$cxIm43i$|;*)z|d1i0%naB4|rfzJg4WyRYh} zir=pHNmPx0(TVn;7e+#CI6Ac^HXK|FKqcE|r(Y3F1Iq>-fSpq)l2g*`uP-kZnl#p| zIjFUYDc#Z~0Q)Fu2*9ucwj1Q;eDZw%-nj!@pCL@Ul}Nchj!=~_Rv&j0o^1+XvqJSgH_WcCcobokdOf4E(BYv@@ELjIoG$vs`Xgq672aN% z_ywpuo8ZDTLiyYruE!=eJGP`l6OewH)=riTYUNLv_Ixvnlo<2?cMIuT5H+AYGfL(`o70Tm8QbK7eFN0HmNqK5#ssS$YXNPnMDh9 z=;-|fgQK_CUDl@fQ>~KEFzn8)8TYk)@t->A2f=w8gnNb&mc!q}I;-Lnm&u>)^XSmT z63=+CZ6Ybx**^gsD9t1w|Hy~!R|k=Y0|zc34^WL&`$|4NG(LB9UB@1l%+X)eq-6zc zl77UPCom+#2e=)`2Io@T4}fPaK$KgA`x6fD^bTD>hDyCGscpjXnYkO-e2K);4a~xD zfpT}w2_TXTF#pKJC@C9D!2JR*!p8n1=^;skciiQrzj(F?|A@RMNzHtxA9;D*WM38SJS6NwEfI9=* zk%i^Zcyw?SxOL`9WsEx%7td;|_OX-#0nU|7FGY~j~vSuE1;ep>picHoJEiT<{D_v;OHeUsW?{~%+OkweZ zg3&R6TrKi#PO=C9)bYjbKa1fX6{r4TeKKii>;402B>TTjL&f4e`%SiYxF78+!^_JH z_*^I>qQwoiy88##_oCtGIyks>@y4kFl2oG5>ia4Q_b@(ME|K`!8^!TtohV z-t0bX+)e7zdd5z#%(t|y9#fsrcMs5hTdPvJSB%;$j@4D#&|*~V?q&R;crU`K5U52g zl_?7PY;V4)?Y~S8oU98Rp*W3Jh0VRwhByOVf-7=)uwvJ&UqyeurhY#|@&JGKlUaL9 za>@4EKlxqtKuQj_$c9AdBMV?teLs8a0X_>{T=-c6ybgej4Ib3M7B5(xS6x8{nLm;F zpU%G%7@Y@ymH}JiTT>hDNqZ-+0nSA`P+~XKC0z)QggH>wmns%M!DOA2TY*`?J_2Y# z7dDBp9LNILa|C>5L~J%9!@pmf&P)K~u%#FNoksX~X2|kilJWn@2_2Sz^4eY?_P+*~ zgu?vS<=0tsD)X<(f4A(*KkGpL*E&9s|07rWPsv?VpfZ2WzqAhYn+=iGWX8V-G{>0D zh>Pf4YA_{%Hm>Zx-~|J{$(44P3$*d+I~a8y)x0{?Cd?a>I5IP;TS z>4|c^l}~=w{yD}t$lep`jZ-Bu@F`7jmHo#-^xFzDfB^*i?o5&f%a~fsv z6d>?mA?9|yQXJnC&X=Q*8igT1P&xnep2cdpY|9Z#wcPvpf?woRMwqW??1Z;2LgeDF z@)!p{PdLLTVdIz_Yj8P-dp_#jcQNeNEaYYT78Hu;Yp=ZUEqT3AP95B|<~E+-XV!-> zcQ~l{;JC0zV?toQ9aQ%|)rsQgfyZDQ+hQ!=V{c8*mp6Iwyq~Uh2lw~y1~CZhYlGv) z$Fn({x|1Tfi#T*3=+So7y($0X47ln}oNZ){0oFCr>!J2JEet<jZM3*@vUghny$^@P!UfkdbBg_rl8t=l$M{~#%`>xHCRQ=Qjc1W9y)82gGu zUmuGebo1#~1#Vu4{#t6PfR+n9rlh?^PM7TdfxRJi;7YCR`zPAp1_SrXczhBs&nKNW zSxOT@4A%@N`x%20iA=tcQG47g=_2?H99z0P z{Nk>IVEi*$3t9eC@)?#&f0h)?CM=OAb932g^ zY7!78Ia&hqKN5Wy%S{`1I~k)$8y)c!E>w#6Do|-8ua2XrJb@;CYkS+nzw6YRxu`*$ z(fSl}eSAe%yC7DYYvq-c>K1C*5Dy84R*NCDe4I<9*?IT4){9 zei0`lv%h7mt^)PbjRK7;(Y%an9o?T&P!YYlx=ec!Q`QhDR6>k&`HAhTj$iNLc#`9G zl1&EQt>4+bG;zI@y}vh0WTgQmST)j(w+Cu$fBEr?^?X^+VzVt-UA@mZ%jIIXdS;j4 z<@d^?9|H9Kt)3k9gLGHxGwWqL)b|KLEzrprm`3;M%^xD{O=jW41qv;%W>HddiXh?4 z+d<~_a=jaU;I7=#V5|cTp@d*1b`Yf z(S^$ERGPN2S8Suin(hqG1{eg*hNJ!d0h-WlijQ^EIVh;&ajJaubHB!=R_d)<2m{B-~PBA@w#vE zyo?Yf&B^^m@A%W_W4*vzWP{a_&qp4k!bpQI?QhQ-S3W|u-&8lMKTg+nP8Du&)-i^z ze&rEX!}I_%GHFV(7?QIMB?$I}juE_wD$yJ*h$zwU-r!oygZQ7QuC89Eltu{w^E{q6 z2^fx-14ZS&7x$OB5#v78`PQ`UK_X(;Cx`nrWRv^LHzvbrxi`z1+Ru?C%}QFSz1#+> zi98AY{gj*uz#a)e({DObr0$HgD5p`&J z!))_nXYc7|@2qF9pwJyWY1*)XWNI0Dz52FI^iLBRW@wp7u(Wx@wVGOWV2ydYG=KIx znBDCpDvDn%kC836-jF}oEHYFp@A+SDULpxEtqNsi*4y~0k6V(W#aFr>th_Rp8uyI& zqfR1r_8wjCuWJR-$k0T$({_#TKpWQEVB3Dc*=jYGSNZ(8JB4q6W?@?C$a7@kvT4>4Y7=Wll{AKy)ce_U&A0Gwul-4T3f7>X zFZWq?_Bn2jLx*?qE-WQ4j&(eQjj)U6ZeD|Kc=0qfCiL4&gT>udUxAmcwtl{o=5iTl z2dka$@b@Gq5rz*gRqCbpXD!W?H>l;eG^jMj=$!M z7H-+b9yd0gDS0NtM?RVptX=YZkpk>xJ9}NB2@tqwPEB?3<9iI}dr6i`{w?I)cS)d~NQ!4#(w4+sT-`nP6o6;t8xJ zAa@Jj{egCV0bn5hNTo=*o{QZNXz%`kI(N-)NQ7)>|12t;Nfh>r%`z*DZ@+bcKAEO8 zYj0u?IEr><_YX4hxbw0|w-?DJemC*#?`O2|6%=Nk+X5w*w77;3c1%%zl_VjE79q&) z7Ixhu1I{=uhd_K+Q9cFip?2&lfj`H( z2MfhsS$K9Ju1^=AUn4tNzjhF|*7|RlrBA3>QIiFDDIud7ABk@$LOLz%DWt+D#L&n` zom#;&ni#raB4LpOuLuCR2aX!^ho>F>y;B`7r%*Y$-EN%BC7_dIE6>8pT4FB;UWhw` zF{Q0Jg1>;xB$^pe9|{GfpefnhY=2FQ$N80gkua?TpYy_+HBe@v7iR0ygv&D-DCYiv zLBIzz`yhjz;QxGv%;g>r%)W7m z&fHdTA`-P+3}zd9NDeQ%akkQ@%Ify16M5b85;WZCmR!vS6MhxS{!A3+qny#^&*Cug zM?}>*5j5X4E5Y*9gM{cn977HMQATx+mb7c@jDz==q89Tk9!_UM<$5A*+#AXm`c+<= z`4;0j{gE;%*c?EVENb!Jle zBzrQ4S_xdn={+r|<4VC0+-#RC@|-q2 zOCF=2@kcZDkL)_X0!nxOG!6j*cgAl=?-@p$TsG3rNR*kakID{p5vH-fY5b?Sqw%U& zxh>%Kw7xs#8PObJZCS!>>aXj?Os=;m9b#N*tDqgI@VkQsK%4nC zZg+bZ2=WHo`*=+}#ELWT1Cf+WbYj9wtoJ7Y?{^+navx4^o*o}oCYGGSMm9dyC;l+& zF)M!4?R&*gva{6WlFzw056Cr&Ym{C@yc+oqg--9}90IujU1k%*@0xB;QBNCtT!%r{ zVFT2JZoH6%{n`&>8XFotvLe07OHB)Py()lk{~&exJkYksrpLU)d`B#of4||Pggb%M zQ2nW9f8O-Qcs4aEyEG%*qu0;f<(qwzZEzVrpVDe<{F!tVIrSq4X#(B`AyG>vT|)U?^? zr{}a6qT*O|_=YPbQ}e7xcfdZa6v*cq-`UFLQ~n2|sYhWA$$S(EySLru5C>=8tcb8c zWd3#p7Zbv68=NB*ct5@cU#|SRo0rs`FhU|`(m4pXwy*0!49ixdb`zel|qvjrFC-aNdt?U9=upAus;A+V!Kh{;3{isTv2(fKc>o-%s~GZ)s08Ld{onE?4Wew z<#`35J5M7oKbQCZ;#xM@zXQNaTif0>%T^MdUP&0tawOj*CO#y2N&?aR5&T8Csr883@AYu_~00ZqFPVCJmOK=F)R?yrfpko zNb<|Obs~PYh^w%tJ*$}+i&EfIfF$h&2@rJi4|*9bDXlxd^KkCZIaqFPZVTM=B4v_# z9)%^U^(75eiZx{oRZ~S8JJR9IrE)wc>Qhev14SA$kFL3w)sN-dMOa(-(l0ff7t}Mw z#;0Buv(9ZmYJG05^IZ)P{lkyzNwZk>!w-vR&2(QX@H4KU^Qt1!GX+FvBfqBtl1Cao zBDNcxD)SU5_X)oQ&Ve7Wq=SXeQr+D7rBi7Q_5EiwNIajaDFbyKYi^%TemUEj{zc@@ zo%&G&PMRiOOs7mHI@2CWFY!qzw939b9yT5bnP`OkvS%ZQk<`n82RC=uPskOp z-9LGxF5f#Cx|img!2J9c$M8TFt5jpAHHTc3A#Uw}ywkovZDfFNxTd+fcxt-gsMum& zy*gU7*iB8)vM{-5P%VP>+n9?iv!;Aq4x`5Q(6nJ!R*(lc3LOyCBdhTQ*j7|bO=}|z zA%p-T{%_(>N3Ot^f*xG_dw@T5+>bMC8<6X2s3liZ(=2May1M%3+3IceHY+-Tw(0i9 zC!dd5uOveKZ?&{V?A?L{YehTABfaV*<8o@CE=RLoG-k>d%E>eR?(=SoC1MZ4cpZXR zo?+t+#A-z+S_beXYmVr85QF6F4(0^F_?!fQxM4}x1*=GXUT;^Wfwa=QWexDaUPu9y z?brSN?u7K-B-QnWF&BC-iK`!Y_T&EYkW&rs-y}tE=Jgamme!Cxp#-*z1Lxm36Ey z%Kn+hg{MWQ-`c7(XhQ`w%&*^^!T)7V4*o5`oU^l^T66RB0Uy-MAL&#H1B2*fiT9sI zI|Cw2u|4h{?srGLaz4^PBlHfZN|v^UBBA4>tOioS$q0b^ZBFmtIqoXZy zn;+o7z@R4YT3`mlITxP}SDDn_sJ-Qow-{+TwUmsX7@zBNc`mYtqJ;ps73GkeTuxhiFu)}hn8)oT7}AzI!~H{C_XT8r(q_-L zgtiO&XZNgHC>nH|mL5La;j63Y2mw>pJ^SeB&7d{G{`)jyKEHLzil)^_f71_k#PaB(EDYu~pPeOuH~ov!Mv z0%TamsNA@rfHJI5yYv&JKpjUv?Ij$bdczd~(by^NTTMM=e%4DIa~_WeG%56M%CpN+ zoLqa_157~zR=w&e6hh!R{t?G(E^r!yW~(Ldhip300cT3RwhLJDaF)s>|MUMoNaQdt z>&hWI_!31;UVf}~=iz#+#n*2QO8@3?Aukd3Go^4O?OKUTxrO&>r{TMd zFec_4EIbsBWDUHKKU0q^kFPf({kx2`a?5M;hGQ;M%bZXowYpKKq2>B0a_JnJQrsh_ zG%~UeA3W~1zWZu>?y2Leno?I+A*e(qOTfX&4wZ!}ZYFzO(J&=FEppR@P(Z5gLmDCfbiGqOGP*};Cd);HxX%<8Q1H_3NjvX+yAuH@XP=B`|~Crh9U zm-6O-nRji3+~kR6C)T@Px?*+*5}q&>-vf~gcgvU2#<#@YWJd?@@p8r(S%Ah~xfH6*+I@XXz6^XwMQ{3B9DB+A`P zH11miEmTZXNs1*_-GmoDdydFcR1Poau5Ne@q{{!=25-Hw+PAQuM$yiS5CAnW0>>Xm ziCj^^Heb|B(WgG&SZZ*RqF2J392ky%5AN_$NxgIoq8=i3j@q@Ao%GAwwe1AH56P}| zJ{`7!9oE3yv8C5YXU~d@ZwJm^%A$IArNO=ePhZ`!fXEqwSGJH5j%bLyhmJ-FSy z@F}e;&TOm-AvohMq(GJLh-5kDk>r~;6SXvX2LB^lHBqDwN$z#>Q7Tbf6xJixzfK#8 zvj28mCTit?Z1`jvmUoVMPZQlJvY&mvkXK>O!Vwh)xEwaN3N6FgmPTvlaj{eLny&0RUZZT{1kJ zH8v?L;-)I!?Lhg3W=%rMdSWP)YDC3f!TW6JY!@)Kq6Dt|%>RP~qG|gPvdrx7$O9+| zUEML1lCLko!Bx7lHQc=NbEFBZD{>tP4Fgiy@6Bm@{^l1Nm;*Kyyy$PF-n@DPgG#9Q zlYVfYEmhSe6{^iKm8YNhf`<0gq>87X86}{7pz{_;HksC)p3v6s%zAB`4CGi(m;oo{;e1u?JET=#Em&&bUDs=Pg6Yfquc?k1Wo}^G<$;dCmWQ;jIF4LhbfG}u3@0hs)sK5n2)wpDH!{Vwx_~KV z&aUKLv(;MON|Ox)o<$nItYI^aBstxlDVV4R${XRd7UZ&4T3P{WYDGZ8o?pW1;(iz9 zucP7Gh;eYvyplSWvTQYAQl)4r6o*Q3iM0gMqSI>eBLP zAepKH3B0IR=_;Lkgs2LEl^eNRXQbn-8ibZ_|FP;b=^%+0J&z$FAr3LR$oaWHxxCz( z>fD&VALIPT=K@A0C1VYuCMPLrGb8NY2y?H_$m9AoKZ`Jx4V-v}OJc|rh ztFuTlEDwp}c2)jPh&8~Swr%sooD0o!z`W_=F~Hz=+E^~WV~2+1rH71$vz4?7EZ~?K zL)0BtFcleZ&JgqA5oschv$nU9Zu<>WjT9s#jr1%$1aLS_{L}e<>TiagkDolLSwsgV zy?QJy?dx%F%zXWk`VhmS`C(kV(XQUjx&LzAcBvi3Yuh8`>=?=@edGah!4pBwi=Lb; zr;LlAU9uY!B`y6}tE6M!-~S9FCoGlRZyAFB<>j@3iv3;C^bH0tXpq-}Gmq`I>?9%K zrD@VDFRhc=1O(EYX5NN{NBs8C#W`N+cVYZPd2ezd_ZoE#gm1?B)!G@_DG++z zX$|_S0!rE^5H$loSnnzXt`VY(-ju-SGo*R~`L-Plc5fJ{oQONqNilSds_y8C=;@GI zP*Q6zA!Oo`Z(N`E9(z9C4vi2Kkbb~i$PCs`5za;{wy?E5oog6RvrL22$&2nk<9mEq zh|{e?CUY>{4|W<`45bCGSM)agxAYQ zf{8$1Q4rw*d&Wph3gc`dcV2TK4$H4lBY#K>G&8UZ!Sxx`=aM1R2$Mp143Wr=b1ybB zNqK2$-TDbl&>*=!`>PU86**6zE0&iW zE$R27w4LkQB^WbDIpD*MtY&oE(z7n=YM~R#Q%yTgDFT7HfMi}tv3y}jrT?`6F%BTy zeMz5QupJ~e;f&JJ$2BOn*@Ror*KRVh>y4!c8v1-nAq833w58vz=wB>`=KnfbIO-zM z(84_evkxX~8#HtI3t0?5#GU!D`;Erxm|EC}cu@R{c{&i5FX)tww+txTAj{E%8S9$e z*P>U`TIP z8AT63W_`Z_T3hyDqDWHdX-DMZ;X6KZ2fJ`ims!O1o!M?L7o9kzlG4Sxe+cut!uF1+ zAVcGlDssU949Wl*G&LzjoU{FNPJ8Rtmd{;%JLNgU8@EBkuu>}D;3VQO+FYg91P+?d zrx!ARz9UmXs&VU$o;OvvP;_rU5 zZ#;Z$JSTW_k{1>O?00MFY)tn()^WFrSb`Gp<7xd0=pDbOY`7QStAGBS@xzhHoWBGv z}+a|r)Ips>QXhi)Oh`nu>Me{t>fVv6Z*YI zr2TW8w^o-&;5k2$T$Ro*Tp#BIZD!H42=*I7XYD(45|=VUzkUt)Vb?N}JTho`SUQbQ0YHc5tJ9hz1#-bZx4d=3!(kCE zWyU(yIG~Q`U9mBd{XdP@r5_$llynYf=~Q7csVq%EuemKU_P-h4V%Z4*@e3JR%UuNZ zmyC0Jca@i;Dvr~pU4xw}et}$#0o}p5jAIjCv-tJ0D0PM`0*QN}YP)tki9d*@&&>fb z;(>ZmhBprv{9|GyWPC)6Yte=E^D)Nf8)}wu-#aUn-u%i_5a~FGRXO0g4_xidfGqY4 z+$Q4e5sd8HKh6ABSXHID_jYgQeX>P%Z-0De^r>IkHyX4fs{4&wkF`iq*MggE!*d!v zs{&E4%KH}IYm+y{`Izv8>%CYyy46@FUu*w6XQ9^yxJjVF-HXH?(Q~(Mn%QvDSm6Z=6n-VUcS=HwO>x}kuT)D0=4_*e=~f2*otTR zCi)B6jo48u-Ye?+yYrwSGYR`94zwfl^wT_9=k=aReI_JoJ_ZA?C-FUPiNWrz4wMbe zNm7zZX8C&G^o>EI=+x_Gw$sSPV*SQlBp1mXUGTsv9ycs3v=h@Am>aK_J1+gmw3r=w zIRsrc=Eg_nqXib)MsoCvv*+B%47~f#%YK5i*w*?b!Po}gPX*>Tp!{AgiPu?F9w4M! zMj3&yerai*@?sI<;9{AkAwk#!TLl(89sx}3e6>bR^*r2eohJ{A>#!a_2E^TY{bNh8 zU-k6z{T&`Zzn}|do^nRfQ&38Z!>E$)_Ap$1s) z(jMDqjE0}NS@7v=d8Y7ecuQpjOh=x$(9^%k2r!6=#1TDslRk^{+eQZRtWs~|lvn_l zN#$Vx{b{W%EGkcNv)rMF(C5zc9_fF%3TQQ4*%-252wdTFX|Y7#ZNf<%R8rzy7^gyg z?~nYL^=e-8n|*@2GwA*$OPnVmi1GQ{9_PKNxgB02HW0zFUEC^5Wz-qX*{+g$U}i?S z;EMrT?mU0ycH_ZJcxSt3h%IIvL$wcS8;#81c4oXgA_BDKq+w@UxST=@Ozf6q-2Ya) zt+UlkQkSIK=B;wH|1`e$%Jh`|uKR*#;`y=p0drrcE(%^m^J}<2+HVX(i3`T#E&=0i z6CUQ}xZs;4{Gw1RqX0`b`e!Z4pjfEwY^jfDDVeo!Z-2YteMB4~xoFs#8}LLTuxw^l z^r1Ro1*|p;xK*S-UenM%JYU#vaC$^7O1`;iVI$=&!pf$m6SmsxxS7)s>U*Z^bMPR6 zt8>yYtAPU0Y~P>lv!>w!H(&mk%5TFLht(rrvrtt(ipa37JN9CYO!VaIJ^#<cp)X)GKp?YpHWgFf$xVLiEY0aQZL zDSy-%vmDNZJ~rAWC3?d>Q7o;1^-D{TymVSsCk$%W+)0I}mlNa>jISSjp-8U(ey~jm zHfOqfd)|v!m`4P-Aw}(BI3D*=By=TutKJ4Iae8jM-%Dych~5&eMv- zkZG~8WL_tk5K*f*oUSa{UJ{a{dQUVj#^Ss(VJE4^yDQG+)MS=)q0#i$P1nVumfzhk z<5-+DH52o&uV=FAMRoD(hl^`k&e=`wVd2{rtsPNEaV+{X>E9N;Xfad^#6n05eLbu+iwauI~2s_Txuc9_8( zS>rKwn6(Y61|oTO8T6WfxT@j?H%vB)HoD-Ak1f{;VL%I9oO?Emd3ey8B$C+rM(Q6@ z&h1-w9?nfmD~#}9hz9myPBK_aP~27|r=z9KJpRnIMDUg4uvz-=k0jPZDN*w8Y6jsz zAMM9#DWLs52hgUU5df`%;h!et-wt4!11*W;?{*&O-|hcA;JBJ+Y=31)8Pk7S4MJOY8+(JeIf?@8&TNk&v8^I`9aiGYjR@on)Ry(Mg4 zVdx&n`ZVZ{VsoN&?cfMg4`$=jw(hBpcb%`Xa8_$s3LXHT@Sf(n<{Bk(q!B&l?=Ayp zan@_CGHB|#&0M6ZNJ(YPmO2KFu0|RXFZjmyZoZl~oh!5Lyxwm#)X}i8uU;^#>38fJ zUbbYGP$A2Rg5s}BJXJF=yY8FFE4bBJs2D(c!<ibdAuK8<4LWSUy` z{<<#m?(_b*-YI@@5Jd4L{WyO#&Ysn$tn|2}cH8Or=-5l4owL|Ro-Bue{~=UbN{U|o zd)0tmWo`a-!AiEtsIdB6pkS+wq!YyHbkFMiiMy_}?jOMgurwEu>93vx=$?!4D3uD= zlsU*5s<&Fqy%(BvJlm#~hoM5)AjfrAD; zXeG0TUFHSt{8F!2PeY=}LhOFGqU(siRmX4Y`KggqnVu!0utzm&gkG&X4!`()?=4bp!e7 zB~Kkcgoka^tHU)wLzEu2mjh0SS&H-x3AMQ7ZzQhP8cvX-pl-Y~V%{Tyz$eT(odnK z8dH_Rnlt{y58_G?YaBd8cl^?+^u&H}4pJr^R-102dBRRl4XgaBJW=Bv<{v4>8U=C1 zFA<&<{N81?Ln*Am-0H^nW0!qPuTVI)6V;C@TkE^GE;=achr8Wnn!5e z4-f^@(C1=wdK*_kvsbVxA9K%TrYJOu-i`~u#NlEc#6DBf?8s?WkwD*fA-cww#8{ZC* zdTakOOsU%WW=f)BW|4<`5?X7QGH_*(m>2!abt1u-BSS-@zSd(NboT=xl2U5-8<5~)ej>sP^k4Db8JQ~_FP_^N%rE<6}tWxy7ZO1XJ4GdwlM ztW3yH8ilgs@~X{0jFnkeTMuHv)0;2L*I*shQ@2tmI1lDmL;F3vM4Wj@9$A}HAr{V& z?*qJb!07Kg0`~bX-Kr71{9A=>GZ9x^(~i+-%Gc)6k^S@J)hoK-{90ie`MnNxmRlpY`;5O7|clFm%hBE(k|@qWa?KrYw665Ns7(P~J=xuDy~?y9SyF@~?i_|&hOFqKA48(2(doPqT! zPNDe#Y(L2H;sThV z-_oxtgKv`7O1jm@_Fe(55m=Zb5u@O>hY_Q1M0^3GXltgS&7=_}7<_$lQlF$()Y25z zS03bp^ix@nXk~|ICYAFsh;g9G2ba1N$UDhFd{+H({Rxrb!pAa&r!i3pSU-&32Jfe6 zi^Qd|B6e&r#d7W3q;AE3(36M}Ez>_az4r1mRU3+M872UvRL)7!Zq15~Um;!2&_mr)HoWCB7D7G%vTHG8Kkl|1PKE_|KO!le()22fr_gwTjk;(F(7Yw3CU6CB!el@TBK2rvG zs*AP61@IlnTy@tMvYTZni=dqEwZuRu8xD|b@RX``OdXhwIi3P9_f(J7GOEna&~mL| z^+{pYI6-D%yDe%qskZi9cOsi9i`TffV!8LDdZvbU_}+87>%4&wJtUkpqeP7KAA>9o z|CPUUF=JNYDb7gG=#lHJv8-{$+~AZtVuwhy;ecN|qd1R?100u#+PJ__M)~jSWrAvi zz^c_`Td8d+!i59j2Yr6`?Nm{#^G8*Mb9q!nltRwP)hB)~0sGa6c$jL2#>!z|#UMRB zqf6gwl6}5}efAiAeCy&@Xd5-sK%%-@;kcx`Cg%?M&RCL^zGZ@R&P$P|QzqM>GUiqY z%n;*^ccHL1)F-!$zAa#8^W)xjcaBg(JwrTe{0GIz5$V?Bp3J>K?JiAqY(BBWw%*>1yK>;gyCw1q`E`DDm zi@FlTtO@peUj~38O#{vU#`ll41 zV7Q&yS}CDn+Mog9wcmhyH4w0e{oISa=hUG3~Bx=a`~?v{|y}d?*sl7HIcF#HEaKv zNEVal)an}C}{`$XM)Bk@Y$KrzZnfKaq zlYA1uV>@{apId~Ug=Ttj?O@2v&p-Lr`rvbmrSITuX|(?mJ&RG92_pUeB`k<8PX$>s zi4ypA#zi26EA;FoBj)mBqx!F<+|7<%1SZCLqb$nLfI>fehKGO1Fm$&?w<4kVd6?Q3 z#S?M1Y}@Y({tbXJnt|!YueEq=KRYZ|ZNYTRyC<;ER?Le7tujZrXRmoo;YYrO7mb?i zbJRN9AGqa4&Vh(%Z=?N>nO~Am8cdRmOZlhWY-Y=hXK|si{U7+`ATbO3o|m38n6`6) zHGYJ6jgALZy(0;_=g{+{^R|H)--AUAG6~zAu1v+t+!i8i5$!l6rvNaWJjaB9GKi6( z`VJu$)mAI;U*DIAF2t?I<_UPEBtw&I* zoKDU=*lrf)02A!7qI-k^Co3oGKkwfVT7h6(&4{ac>=>dOGNKK(KAVtIj(PO;&cmzY zISFZ0uCp`%&BnCh0yc9W^dx|qvNBt9-+`zR%e3rTJxH(!T^=~W*L*PUg(`DuRC?yN z-B;Q??+R39XSQ5gHl{n)a!$=9B&+B=LRehSDkGhRr6nz;kYV)7z|RrS;9JhiD`xxr z!1Y_M{zqK1wVH239tO02TA`OG1csX>;=57xm_<-tX1xz${Yv9J0fT*2-xaePOdll! z{53x*ix9k*n$R{-i|i)}Xb0nKsm?QLd4fGYDKMFpMMVF~3vGbr8ev zf+%BV_W+rqYx?8q5lwrmcz<^NWL}q+6vA&mCN&swM$ZhYRIeJY$Yd*IMYDU~STVQj z*2&ayUgB{O6;$GiL{Ty+^e5B`S9t4phj#;>x{&pxm_KZe3{W-g z&?RlaVJvM*ytEfjjtdHF!02)88QE=o{{5KQ>TW|>cT2>#@u2?T4!IN!X?Dapj(oeu zh0M4AU5`vtU^v@(`NfdH>MYa5n@ifOpzFCp&K4$vE8Yt)I&a{mR8PJzfH|-X3Fi?J z?Ky!)jB`u}jha$<+v+GZboN#Gc-qtNRk)7_pX@{10-pvV8`OELv&>2~YJ}eKl864h zPc}hh)o6HJt7OVVi9`-ug>ahMDlxMWVZFv&eX(Z>9|@9&%s6G#$qc>!j1fz}KCFt- zd4nFOkaIXdseIfIqAm#j9``eBp6Olf9x%n))a8GxPX!4$U`b?Vf1Kl9Iy5i$!D3`P%mV=TzesAkcb1Ce= zp{Au~0o=xkSRtVpVK5@$A-5En1L-E)D7F9*`mbJMV;f6LxP3lVbkO|F4C^_yyKS*( zDyT@;dPcZ$LdiN-#Z&vX$N*jM3&KMpm)%~ya8?+rvGgsv9v;=+%fmRIi&C7*Jufi_ z4nL}8|M6TcL{*ypwA*_vB^xw7Z+kC9#EoQCx0NeihL^R;LB<;$>yGEP%^1ADXDAgI z#Kv+VRaJL%2lG#=WfQ95Y)zO&o8Rqgi$>O^*DnndYS=#_iZD2B1<$rXGICh=jS z>B&*`avz1edbMcScu46)5Qca7=yJ_zsi+6WwadyQA+Nuek+M*+qlT7_cW z9RTDq)g9>XRnmoxsGH+aiR1kA03Ui%E$+!2%t3a1sOPS$#IZm-3m*RJ%D!?ImX7Z4 zpRg`)w$H&&3~0ojjta|*XhN7%BfzQJ2AhvQRD9;EHJOf?y4;WhRON<`b7a%c7rJM3 zZg2BcAAVd9)vH#KBoG7Zp0HxUWL1tIJvi0qDaR|rrdHl^=CwMbb$jrL!C~f_YmHOq z%$?O+JI_$57$YFMn+8?Mnc+o}I+{O?-PG&gd1=?qpxn+d@Pjt(%xs61sA|Sw>4fj0 z7zf^bvp9v&+h~Jczc#J_?nW><*w#X%3K3M`jqM#Vw+#r=VpdL$bSg6}v4Ad5R=Cma zujL+!N5wg#VS+?nFYhga^Z)<= literal 0 HcmV?d00001 diff --git a/docs/vo/conesearch/images/validator_html_2.png b/docs/vo/conesearch/images/validator_html_2.png new file mode 100644 index 0000000000000000000000000000000000000000..a10a56b508ec1f7636cd831cb914e920cff17ab5 GIT binary patch literal 56370 zcmdqIWpGq$6fM|s7YGsJAwo!qySqRf;x5G9-JKZ3U5UH96L)tbN;(~P>p5`meXriD znyRU(dh=rroiugQ-RJB5ZCPvWeZm#xr7%zlQ9&RO#s_KfPaqHi3J3(FfQ$&7Ij)jU z0-g{JWu(MG&_DmO+Y94>Gbnb_8jc_k8qS{|7*J|D0dNw@>4TgE(i$u-79U7D<1PgR zA_sjC7g2UwK3em&BEHHHdM>2jXAew|amn4O`%JCpuE=eSQCuTabefocDq3O~_t_}! z<6;*|nggADWv+{+KZ#I4rs(5nQHVbe+OkRdI|} z<#seUluTyTmruyHGoGv`JRnr#f@HC{o<1G>Pd*c0_Uli`j6Z+=Y!AX777_pNpgzzV zg?;nykvrW_PVwU3Ln++o+qbZPkD{L}($fFENBI{471e+5p|h7UkvE!QH%O(CiTv;7 zj|H9*B!ru`>DBH)P2FT{N~%hH%w}OI#!fZ;rg8 znZ-Vquk`rm0*+qZ@MmXl1uu*)*6as;__zY}GKZW8vsZFAbc$b4*14`Juh&}r>6f}C z%FECl_q6pasJIF8rKP7A)6jT*{dh)oT-H+C zgcp2Kjo!sU?lc6V(-!8@PlZJktTqwE6Hw`DpW|_DPk3m;TZ?6WL9U$FLh`C@1=`-W zqM*|R%F$MjcTcEyBox&P-rP6{B}WcR4;L0N!mTpb^q0xe7Cwl;_Bs{O?R`G@&oI1~ z5xhGILvCRsy8*zu%46}KR&lcPi*4u`edV)X4n& zG73M801{T03jvLxq9FRsawaz3le;MKr9HYHy~l*_>}Tr~ zKmmp6Oye83iS{yM-@k!}w);qzQiTN|PiSIMCBZu`%VL^oVA3X_is}XJt)1vDIHO-; zjq;VJ7UY2QE5MqQhvvA?7SI4<`d645It};-X(5w^6!8fOxL$4R3!V`cxSCcx1qr(x z;G4`O&zeN0Yv=baB=6a?P@AubsP!`OW0@`(P7Kd&a==zOSIo5?d>wn1U8hq9mq*7U z6Z8TEL?rZ9PjazRe@!Fvy#hSRj`a{HlI?z5=CctG#vod2Q+nmrzM=!o^ zRm1tM%hMLseN42$3129S8+%wcr5LD++fi#Y$EBU9DV@6#C5QhmI@cRVWDx_s(%GPC zIyh`Zvz~yGn=lC@sD}bx0iDv#i=Z2)7ulN^@#)u9I4(g;^j2Qz{zfL#3-UDJdtaLJr3?&}s)g?iy4Mc5R`JpsDkJS2nFDMU-Y`C0n;NJyRb>Z>N1Iu-~u z^H_@t?l}3dro`V9&5T4>hfwbPxzuo`hgpp{j3qb%`sLi{Q5q8*4~D zTtF6{c=1*~m7Oe5PV9eP=d#pz?;=3_tWUrq5OQt0Vvvk@zi}A4`Ev8(p17Q8_x|G_ zTPdT<)dt8iCgE>?XuHCDfPII=Qb_M_=Y)fRN-|+{rsZTlh|zc{9Pn z3nPwPE#Jgp#E?RE)*}{axk++{;7rCsM1_9pgcXv|sAF&axhu z{7#WPLvz?TZv=%NH8#$%W5_A68$9OO17V}-JDr-ipE4l-J_5>SBQ)5f7rb@m_I0>( z)WOQ52F2U$wZp=tU!xFGEFO4RgJ9op4C1UZD?YdC&V|7&)b$xEDPdAP`JHGc8=6WZO+=GJU52NSRwQ? z_G^+AcXsCJ*{ULePq%y;`}S|dCMYs;r*%2+U2rqAeMw7Muc-v@G8VgK$dUK z^723wATxaDpba8%eo`qQ;r?;B8ACR2+smzDsE378D;W}R$mGQr4nm8PQ+rnuNx{!s zdfNzsLH!6Wn&g!wV}{etr2!2)vqM@E?KggG<4F8**Y+>lQi2og%u!=kaq(#8HW zmO!Fq`N{ZOm}k>vq)79`v2C3hUAzOC5^siFSz1w*9@!gLbS*LD4(V^5Y(*68pDdOZ zdH6lmXT4;gTix=?a-ZWTag0ByE+=-fvdwcTRxBD`rjdTe&!te8~jZ8{d;fJUVEG$rgD0i-RMo4m{dMWahc%OUDR7;GrQB@RI zg4>ji!izzY{;m^9AF@OQUZ&XHI3n&3xV2bT6wI#hrU-oq>-7h0pfK}6_cX}>3yEN3yA{DSnUqf`pC$)=#%I(wow;Fl zsKPSJ3MPgo-~YPFi~e0*2g<3Et%3`z*j1g;G%km2T(Oqus~lU~(CPf4rDK@{Q}YJD zt=xXekGJ@Zop0CIVJ3W~_IK^4eY?3jza^B!vw1<*pXm8Xc8BnNbQ(?QP-o#$*HwYa z+`$(1^z;B3reeh3+F-v)4!M^g5%VP!oj4{98<7AWbx&;=fuV?y(6NKiyv3_?#;{yu zjl`)lFDV0&(8!AKXoC122mi2pq&8y4qsuJ@sgp zC^X7B)X!O&aDiMfumQ*+TpZiYt{^fI=Gj6>^!vBGbNR80zQLrQE}2)PK|(Um1NbOX zM>S5l*WH@nv<_oG-#j6nfAjxZt%<}8aj_&)=iT_%WYSn@RiC`b?I+l07 zKL3l)%swEYnhJv7dgPDMRLiaBgeAiC`jUpY=3V)0uO0NAW`&YqS#n<8yo-GFDSUD@ z@nd#Q+^(F^?fUAgQ%ψjyT47Oh(ZP8OP*kkI@1fuaS&!K$7LV%k=T__x6Ajxkm za{gtnsrW?fUSfqjPJSlM2WV&!AS%@RA2GVGj8r7ef%GL{k(Rw29ykq8Ty zO=dmpAHUCNBBNq}Oo}|jkmKTLJx*lYV2x9b%Z7v?*NQ?(7qH!3m&g_l!-w`E^!*6; z-SY=0$g$`H)5=!!NMPog01sSnt#z82`|gmE!CtJJQsueIZrN@4)kou+*X3<3RnJ%7 zE8n;*3v+GkjIj)DQJZ=L4!BJ;oUbCD0U?-QQWQIROjTjWi? z5W+-SnMjVCi$s8=jyGK4wk=0P*(3U_+G>ez<~I4Nz~iW zr#NU!sRqHTc9rqj1G2QG;)Ah~lX(wQr@C(?C!$~j=pO_OJYX)q?5$B+-TCHUi4(F+lV^R1fEJFl{d?NplVwxHv`R-lQbwCqeJ%tr1l_l)Ps zLFX2)Sp*O@61Oz={S`C4V4}Qm{dw55xG!lsQiOL#2HR*hn{-jzvrwr&yo47KW+$v4k?8yq z1#|F4;P3u1pyy~OUyZmGi>5lOH;n3UoStpnwNsYG9trDlpq@PaurQZn7BvX_KhC)w!!;5JLly2+xA|k6kX{T4TS?2nj?WgWV{_vSrgv`Q^#_Zq)qV`1%OK2n#dSl8M}RkKOdV%yff?W_&+;tpH<9dF}I zhPn5Am+h;B#y$1rlR%*;UuAbXr-8gyxso=$4z_r=1;*!0ba=PpVKORqMj}Op(s`>c zzd3|?%HeDJh<6osUR94Ezs*;*y>;(SwflP@GkKl@}}jcaNk z3Vqy|;9@f~>sb2I4sJ*GC6uIk>2DioD>AKQMKU^C*xZl>UXFNIE8ckY9ZT})bpr&l zzsRN5xNLZC%((8^MnFa;^@SDoBqZ3}9`a)xFL(06e0^g7iWJPfP-YYz6I1BinQ8eH zHTqB8)Cx9FUBXG$mbvqOO?;E8C7oKZ1k*!OiJ5&}=t;h@73{*>X0~s&tc_sc0h^QO zQ$)6oagi2EA!`$QzkZ{CF#oWdIhKJv8VTBB$7@51-DhN)Whus>w%N3I^9_ZVx1DZ zv85<0LGdX;*aervN=oRmiMjdgIZpsc zH_7EUj?cG+b^D_ARiGk8_U>6?`u59^9ty7ssDS6EcsN@!VIW1@KXk5PUVP=C5E+SM zxRz~$v&G8(9ZplGUJZLZXH)mw#KGgU!n)bD31j7J!q?)vhGAUJ@o;wgR`rzTT+U-* z(C`mbOCy~SFc5i~_lRuc3dmIRJo1y=eD=!&N(R=`_n?n=oy?HUxY1Vi7Svc30s_bHY^jw2?z&gs>{)pc zc_C*a7sv^IeqZQ3Uwj8-aHSi4l8_CehC>^k&71ofO||iQK?sj)#>L-Ce4wgNAgLKC zx#0Y;Cgn16%@;u%W?nv45h|`Q*}Xllei6Q@f7>qNC2)52EB!Lne=vL?TiQUH5i|pf z4uj1of&FC07thdJq)FYYLt!+nT)tjCll;1HQhV};bZpS!11#V(VghUi8HL1l%~ZEs zG)BxFQMy3@N_X?(=JB*2u2OX@zp*)eb1p&9y&X!SEd5Q5UIRxmQBF>-55}`1u}4m8 zYRne80y%v?eH{e7B7VU>b#RH!C9LWZ&?vtq_y|%>kpbt78VwY^&MZ`MgIyC zT#*I@^VmYAxB#OGLWMd1_*&h>`<<;W%U_kL)UMJApM88&G&xLmbG(FaYRVIRtsD!k zDuhLb9dRfhjL%@UHf4K?0WR3J$Lp!#J|#6~rj)EsgKVgYKM$4wytcv$h#chE4|AE2 z#rjnQ_xi}LCx5W+lKEC0|4a`!oBI1cj-Gs1GN;Wlhywgq6jl0-E%73NTsXqQuF|Ej z{#Cs3Ye2q>u)v1j3i&Bp!mo1c@yikrR!ryJf+J$dVqwcryV|(n4cy-h(t>En%PNo( z!wK%32&R$|m8oEZlFcP(-w*NX6Cg5Mi)u4=GozRC)$p}<5B3C|vcd+7HZxbw2cW4g zj!4`dpLM&bhp$IVD?C9+Oh@fGxb3Z!vS7*#MkP-V3j|(RIQ-pv`ZO~20r4`k7 zG$K-_h7DebaDWOP=+QD886eLfjyk;PS=N+xOE4B!~2hF+lO*6@G)2-0s`H!e^W2;;dCsAzfsUh4#lnt{IS$ks#kQ{?^*s^ zY=ArU9bIFA}&BQT~5)nI1+^zV=;R|PFHM|(;)J1~xVZRsc2?O-kWsq{8|mA{ z{^DS@#!ZIhgJ_J}!I>>07jG2OvmLqZUH&t20JiT<%n5HtbYDxw;X}#HbbRGHqqxWQ zqsc`2z*4R2L@wmyXVk~-rCQVNiiCNy~HN{cC?6Lv2QUZAP_uhZ*)W^$R1eI!jwS_OP>< zen=;{aZW7uhXl_;*9zR?*$9dG2)u|NIkNePOC6-EYqKlh#J9Gv`l3(%Cu1K7^Z+## ziX1__zB+~j0VgKcr!u`ex?zAi4l9B^c_W#XXK8h3FH?i5%QDqon8x+)AJa^|c}FMB zeLJqd?kpx1k`3l{yrtPYbE|^-UT=fLD_=lOiCZef7M%aEq>0DNlsfA%dYT@+AV<=| zy=THYh*@u|*vYS+4aWB$HE)jB)2>}dwi(V7h0u@g z#xm=ifwLqb2JaW2WhkNm9SZ~!4fabcEO#*TE||k)MpS^Gxn5v8yuNdBJ_*RBjjpKm zvGmrQthu>qN7%>xH}UbXZyfh$L9Z?@8H;g6+wRv7>x>^fZg$tmB7jVV*)jn;*nBdy zIplT`H+D8$pP-6A?C^7~bnWb(Y<|Myx$Yh&q^V4ZCLmQ^q1&FCkU0UpziS)LUOetkSzAUB2^)a^_JWE|*8(Ogv<9yoj4k)K zPNg-BJ$F2zHFV+tu0-3IsG_P`0NK`sG&C?NRJKONaPA3AXj_m~Fwj;0o2Ld-+#9Jz zk1ST3%3sLEaE-N$jSV+hWgA$+ks|Z^0p7vlZt`*u7bh7pcR!Edeg>Pg!kfTZ>#(L} z6;Ud%X~mW6e&a|AD7vyU@cQ*_5nj06o82N-wrkOYDT8e@t^(uVKQ1^Q9WACblsQ;a zd)?gHNZ+B8y#Dymdf#E8zw|5JawqV6U8f~k$=EWDD*wJPVy}db*ATchmCW2_F6{E zHK(Qqvf?xqV1#DXmQBqRfr`wL-I@jCiptW;-pQ=#K&$}rFHsX*4p#6n?1HfkWl#7J zm&x&U+kARUt470V?kvYwA2q2pgkpnm=~aJ>z0n2;PQVG0tvWS^PqL3qmg`wSLXI6R z#CAecVV?72eynSj;f)@uc^RM@cM+D>SSsM#VH_;a7%xoSGt7Gcv=|^ysG!L+ZKRTU zHC%1d6#w7VG5xo*H2hQ2oFZ%t>N$4!S?>4(Yv}`(UQY>PIocde|_VQ`w27l38;1 z_S_?#GG;NKz~#{a33dZ(wdw=q><12r?E>9GZpKKP;ojJT*T|=vcP`NUZ<6K-L5j3N z(fhocYX=g>#?);@?FWlX{*!uQ-&8*k5RvtQJ3UE2Rh_;|7G*4fWV+sE?V->|IGpv7)RwK5)zbcsAl2mhe_rU2nu@5 zm_ovVA>);~R5&qHpn8#|+FjExmvAvj~r7QwXAwTc`dM~;9#iGoHuX72f1L9HYL)U`$s|-& zD0hnXukaxO)p{xdlr)T2UvrZ)8ejuU{zWhBjldU8VSv-2*ZD&Z0+8yl3KTr-+FOb7 z-_&}R<4nm6RU|#=E+wrkn-eCdr~d>(_+Wi5s+KfGV5Gg%h zvdp{RY_*9`SFx{OjiT!_{P_8U*hNEc^caPYKoV#wc$axa$x zlM)yAMV>VF;;nI3$(0JX=QYp~_Q1CWI{nvMnr>Wfwb6XfL)?HbjnYRgY!f%n+Q?&F z%V%b_Q5f&&W{_ro)rn+SZuTOYemgvPbchZ*Ot~~J#wiAt`$=J2{0l6ftHw6Yv9Gt# z&s|lSuJ7CPY$Dk3b>R|*bFZFK^m_@g+fd~f*N*#pe?c4n|1~-;* z1MN)j$Zk9Cc6t{R#dD7#bVZ4AaXg52Jyt%Fq$5HlYtr0vwoCW5-l4fy$=y4BLNl+r z7eE3(+6|v?WK0?%?R+&7TTb{{YrkL^T^|(}c87yCYxUo~M=Q%FH`t>h3n1UIwoY22 zW_H}@OLTueJ3C8X;Sk6aocib~#fON9G~L1jw2uK*_mBJ1e)OQH@vSFoue>q*H4=VV z@qOVNXSZ|!k2PL;zu`Db8j=N>@W>1UKtE@qOToMzy1%Qnf~bGE(069S-sER-y}|^{ z4D=mk(LzFm(DDG7wwao+p~K{wA7%6#(36_V?Ypm$zRj_6Im%9?QA9OgcFaedDf9zk z^HIf56!Y#b%c1);WK0tm|ArA>%1x3cp{_#Cf02e^a%$>V1%NW|UrxBK?mG0rEDCvE z9}l(O4H%IXTBDIaU;SLTz8`fSkc`fU2rb+Z9bblX8b;P<_K=5$CX95`TITM+B2I=6G)my|Fwd<$6fx%A6#c~^m4^u zH0Lc_{OL+P>|bZTdm9~#mq#ZnSsD8Q*!V$5Eq3~U;MB>FV*teN(dGSYci6;QTHz8} zvLdtHu)XQqK6CQ|o|i^av#iG5SC>OAXQpd9ZGF@Kt>t)`+vHoVzFu7PZgyKe!5TOA z0Hd|uXvkx5|0RL>a_^&Uhwuh~Kil@YQ#BeDFeb+m$J-uIL?0KkV#esB`qGU!Q$%{a zglro5d|2IBeW@LiJG9+Tlp$Etff8>w^j0sA4%a#MeLQ{Qgrs#xpXI^NQL;uI=f&<- zH2e2JH|`r=l^lj-3t%Sv=ftP?&44wQT||SiJMUvky)6DqoUj8TJ)`7uV^188$1}j< z=oi0axqS}|I%~oWD($HB7@zk;0gT^AL}neSR#9FTh{Kpff4sIC^#?0GACqm2S>(H( zKgFFzXONr$I9Kz5dR)Z@L8#=UJlgf>Xz(?>eQLkq#PNu{IE-MS~ zm689zus`r?;N}LBRcrj~3i)LwdZsL&N); z{5zdR^4mXcy_PZlTONN>9@-^m^bk zgF?R{cHK|6CTwo3*;m4JIepWiaIXpZh@`yTfBL*HV75H^z|E!iU z5vClv28^_TKyh(#qc{MdZpjz#2>;^jKPEB5UMDz%>K+vdpez9HncXVE&CZmx%K;M? zhSloJ8{t_1Q08e}9k%=ejBn)_SO&J2?f%B|)RLPy+HI-G14f0E-AjP^J#i)RxqCW5 zTtMlSJ}F6C_T~hdm{|*PxrC&TD*jj7j- zrK*Fr2{0rlm$p{NJ^f?A*!WwQ8kAEDy7w=(t~NTO5AJB0 zF#z(->|}=AadJNm8+=CM-WzBmy5HN1%B}fJc@lk_Z zFOmyTX4qfc(FGjqtTjEfy6zLtFu72Nv_Ol*ZX=kPGlaZDu}J!jWw)syK84CqRzY@nn4;h#tuQ9M>W9aYGL0Xd!g zZfNZ`A>|Axr^KF~>ZWL|UfKj97>xdNXv$ zD1W?z-@)nUvo`=IL}ICcxyK_hGR}!>eW1pK(KBE8A96Ef- zzX1#^xp8^;JAiM1HWa{+0QLr{W*X=_PPf3&m&U=cJbiNc_PGfANY#T!uJ<^XXz`UF?YM7=P>%eI) zE;_Z4e|CxiB_ScoEXXd}! zxr``8Y{=d!HPcM=RQ%6y6uZFp^tlS4BeDLp%fX6?PFgiE|J%2g4CthMu{M=Y18Ph9 zRh;Fi6|jZ5X>!069Zx5Bz0hatYQfJ@zO2!iY93`1+zWnx1l8JBq+ybwd9UJjc2*&- z@DT}R+d5c7vCy)L?r_Gwxf@kV55cd4YO1cwv#O!O_luJdYsjQC2{6sG0`>G9w@!aK zjF9dHrJ`FcUljEtKHQt0Jt0x_NwUVhjrf=+M;*^DhbXs+q zgE9Y>i8?AL$We{ZX|xabSELRoi1hXuj^JtXK4iIl+1l4JGluGe*~*j&w0--IL}Mjc zO_pE#!ERomgT?4i?YE)HrOhqmgUU7vtO>YaMUhzZ*{wiZG5&-cU#H_+K!^wxm_q($ z#eoEJ_1TxVOy93z_0%xb7JWEL$=0wy<0to?ehE6bS4qkU0hG0B!#>oq+XY+#9+N1Q z17NlVke)AEd;tI0d=P0`o8Rvrc)nmE7Ibja9XQ_rXE|#ezMl}Tp?3%%IY@6fKWTuv zYOPn}*&_Z$WmB<$RK)xuRT!}Ud}Hq}XxJZsx@MjY7&tlMbvEzs>I7Cye9ee?I1ML0 z{h3g7DmC-SMagXCcNh8`EJ^{0sDY*#Z|_n{YF=Kg9L|`ut=+767kM{vS|@HR7sq`E zv5C|Wk9;1Wl$8@3a3pad1ax523?vdhzSBEBp{$!AN)RBC%Xj!r%imB6dB6h`1ZaZM z??+siajHz{ko+2_PrL!pjnS z(^sGHdG!u@R3~)GQFq8JSwd?+Wgj4C$0FnsjNA;oBW)WD5W5l2R|8pGRDeDgQ zXD~Uj4*YW>EUY4~BB^EaQWS^H@CAAsyPy1CdDQ>8KF}_j{WrvBq^pVhsY^6y*-Ega z%3uQH;8%8VO>njz`{}H!M>TjtuBb(7yw!?@XDmsyG5_xMqCiNF+JK$9rCt@UGv(_0 z-%YQNiQdjKITJsx5xm)sV>UwEseei6`@-*p!dPj#Q+Wg6k zn>rUH^78NJd(LzVA75l1eqG&?3PnA@^;E+#sPIl3v05I@*md$?K#jXDl_q2=&Dm-T7SFfyQRMeEI|HwQ!kYA06>YJf)BIyPeXrdkY#;5U}olO!+?oum~| zvmP9hb)Vz5n|mDLE-*qeyA4i|5*Q18@j3?R?r8ZNFAZ=q!YOzcB0V#Z)^(_=DYn07 z7h-F~rxT}jZ6=;0E1WpUQR~mn2&ZuA^D7k%DqM~#Ab}ao1VdK=H8(d+ZQ@ukk{IjF zqXawvbb$HISzDBBu$uKZRcawFbTX{mhkRY}4yC`hXP}&Js6`1dx#n85*Zinxe)PyJ zYdy1@m(i;*Mx*f34z|os#uFc^KPo8pqLj01)@ehtHRO_cYEA*4jhXF?v4r8i(bOwO z8210pC3fI*&+PHM8jnYyKDxhpm0ir+LWI@(IWPMz7%^GR?<@~zl15J2^85tfZ8mUu zzaDp--KXY1wHgRd2neKmq}${f3)-;gM6gB@&) z2!u_a$i-pIJj?Mw%uRe@Q3T_Q2pZIDL*gCpXygHEYuxwf)5 znGa>qGk#!^_jG9g{*!RCUr`C=r3gHZ0Rvb=anhRwOCEf7HJsPQN!A}jCV2qIv5!x? zeInPz?w=~o~-DK3yH7?*Dnn=+E|8pEn;9*S+ zfwF{o`t7D=X#G#m*~or;qLV$E}n0xO?oA^DFv#m2va2jE!IR1Aiq@sC_i6 zBrk>p;!AM0;ok*I z95j7xF{K)CuT{MA-nlVQw!O;U-TeYyD6x3$-(Qk@5^-mz8B?i7dPl0f=@5=gin_aM zV#N%5WAUz(IxEg;^sNxv36^mn5fGdBIXHBOU(sLQH=3lMkiO20Le0FlVd{^TRJ^H+ zf!hCRn1eQZHUt$cSq?UK0$tk!T00+*5wW7P^Ox>N*JqQTm)5Yl^dNnZMqezrP|3;X zi$rS2F*HId;$tj>oiS#^edj?-YW7ZLOIPm?PGqk=<_^zO=$w}f%GVBUZa;&m-@4JF zvzZ8Myv;j9Fx8QZ91-4$|JvZQQ8sd~i~NSQL^N#{ru)!lx`yrBHG{K>rePBLW!na9 zAEk(pslJ^8cFyNGW+sib7OylqNCmh2IW>fztvsebx@7U;(%=ZK;fu5PZ;xNVI%F>+ zIW?FvE-L zUYNEMa1Tw0MkifJEN)%NM_Lkiy+ght!l+1i2^-^iu_s~@QK>z`29Ak)r2j#jvG<|F zY1A3xUIH$V3GoGCMqR7g5Lnx6O$Y4`(_HCLZ2XaNvm+DSVeip4s!I2LqD(*hePN@M zy@Sw%%7CN5{#Wbv-OG-TVe*HH^Ak{N&1Opzw-CqbqptoOw<4GfpM ziz$s8=Voo0^NYR!Ic-)K+JI9eI!#^7YzPEV7<+P!AO0y9rt451O@YphJUV4V$0P{-8%V{2UDbX=$NQ+s!#BJ zg1zo&Jd5*6h9u((ju`{T+Lk>Va;2FZ5&+7Ph_PF&8#BhXFe|zu}j- z1`{MjNx(;t0!7n_zGqW==3y8RJ*UzW$Q)TO48aT=62v^fbQpRZmcDOkbUFoI6{}G;i8FMWVg!fCcGA!N$Y2 zPoQ^CM!7FIL)sEtr-viS4}q`snXaT!^NO>oFYq)RIdX(&T;*r|xI~nAj$2LO@K>o2 z507g=mmn*-ZU`Q3JQ3aXTs{Md3(lqQTE6ivRCeQ3;kDiE5H^md*L%czhYhc9r-mJ3 z(=~~KevB%L4T6d}HU?mJ;7ZPpx20Lp8d^3@o2GV=h{hWqnMD)jG}Q$l~9g#2^$8uz}fCq-x@Voui6Rm`)g+OWWse$0}7%t?dME&gc-^*?jA_Ul8BN zl%CH2=zPDcIUmb<(BXh_YE{=#8|lpq15Nw_0`AG1uD}4@>k%Us_-6`n71ULHo4Iz3 zW#VCgg9QSE6s6Fa2yNWUPQfdT#ESzNS$IGZ$E2kv&*C%5vSlv!V{4?>u*h&zg|<20 zV=D_X?i*gjv?Dx?Oix^^8`GrVxX1T9ZYk-yCtGR*?mU$KATz?>YWLs-_xkiLvE4btNi@Uj!WP?jS3#SUP^aQ{wZ5*h}9y2HV-2x&~ zHy7H@UJd)&z0ig2qB2%3QeJO)i5^0D(BW&6wJ5QJTcfy(>X`~^z10UFa|Fk>*H=bI zitx#Z0P=8oBJp4gI-{h6RZwOatmif8M@MF)ux#FZsjoXshRni9bl^#WY@DHd`h%ku zqNUuiPhQ#3Q9jF*-&i#~he{?bkNqOz@Y~ur95I@b zdx5|b(WD%IU36|!5paZ;ox#O&eow#)Mw(=a7gYM3bGLr~k)b6ubaI6g7R07wx&|PT zq)yzrzz4$)cb*N8wq*AoWl1?@aoGH?M9A~0w{LTh`H6PZjXsR4Fzv@>HAmAU3xjPf zYBhK9KgWE&ac-q{>d=`uLk8x(%v4({j2?VldmPoM2^&n1pu^9**E{UR}}1ChA+!oE%&rt zZe5L#NyEu`iwikm=4RC;HRH>&We;Ylywd0$T{<C)?OS)uMmEVoMf*G@>F#_SW?un4ZS* ztTsP#D~rwT_$zWs%zzgBr4_C0SHf0BJYey0)R*I6P5d|*Cm|MiHj~_oi72!NTb4WB zqOLr_S-}(%cqg^-6EUeNV|JMi*+|O+A4BA4hJ>Oe6NEHLtdPm4pCI0K8{s(`<3xR3 z)}~4~7-%4>CIFknm|}7cr9XDcUi>!;(A##Q(;+5$Qj7?0O{cJ z$pN16gDjdLUfhkq2^Y+apUb-VX$JNm&%2iFbtV%9?&dBsODW$=Vv)hyP3U1(ydhJd z0OV)4FWmc_z=#q72nnLTKOO5~h@Lao?2J$db6tw|`kj2ks7bYYv|{)zkE6=BOEq6qrI`5pv8Qz!e{QAx7S75sym zw|J#|bI(x-|9o^%LPO1sdEhq>k{)JHtA@xLo2onN%gKjPM}?j%Mk(M^eZ%yVeI=j0tJW zwB@E$6EIR zBd|5_G(TR2<%-+hs}JqAk@jGf)vp6T;M7`|7cK&58^Ux!-jy$aHmx4tvl+9%2GuK2 zJZiwd4MbAh9b$XEDxks!g~eF0*s8PvZsK)KF#Bfb-^ZB3llXh6JO2WiOl_-Jby|~m zSCi9)DgcNK_DB9D4CRINn9o#YuU^n^23rZjuJ99hutlo&rZNi`VQ)c2RF-8zItyOb z3+c9{+-fsUDQR$zDHINj+&;&n%u*~YFQ zP4R)rG=s}df}pm7s*0q(cs-VzD^cPJtvuo8UXGK8=0($fua-U@fkMvW-n5sXE8F4# z@~}S;`LYfZt{}WiRbjlsshDYdSb&vEqi4MkE6*ft0<(Zhb8`=@nT$;P%CS6J??}L= z==;&x!fY7gE~e{!kEO8Mh=I7E$`s3@@}ku|<~0M&#sx~0rqQGO@w@l8TPr{GkKBIV z)3`7Fnw1*3bC3+>8bCi9Mtn`#$>fu`8$9f49)%3Wqn{MW`n!ciq{d1ra5f7$!=4qzpeFSs(tWA}9m~NasDDb$A{k z9>b9jeKiI4S!dWC+73c8I={^+zj5+<*)L%<&Ny(fyRp~3S!zV!X$xy4_2yJaIc2_n zy0Q7C1-FHFT!*yWSAn?%@lL;xm)4O;^F$c>O+Z7?POn9rqlI=VRNWl%YB3 z;FS8dd3x)Ta9X#H|D|Rt95N}oNR$}@0!qZYPQznxFt=`nO@Hv}ggn@-!*uzdJ!_Mf zylwglnB%&AYo#pPsA?$nrzzngzJ;@@UL!35PzUT>v31f?{VY|hrfa)Xd3a}_6YCEH zk}5|~=tRfo(G;S`FA4!e#~4)ayImJ0JE~P8b-ETMBCX1s>x4z=*C%r&zpm0|e{Uej znWKeJTi`neCwjeS2%o$r$=Q|sV!1O-*xsM?(_ya^K|h@s`IW@oQ0o@R`*}DbPA3_%w@9a$hTu z#WGvIF#*j#nfYVs)(NXaHG}Gl+xae-YY2f=#MoSf;|pGJ+s60rtMysU24>(zXE{1L z@*JHniw*e@H$>xC{k2QMi5X7^UL!F>Y5d!Y?zQK#{fjq7C@sS#dTjU3py*vXAI8d)J{6Y@8@pzjZ zrihit4pPj{?x?}pZ4G|uu}&29Opgz@PD_`_o_8+6MW<^|$;=uuWKUY_nX5zydX?C; zDI|dYfJ&6kh$P8`@X{qmPGVOOD}<@e$krXd#09^%LAh_IDf;_`nxbbNJKwoOiQU?X z@Jcs~Y_#L7%{5KDL1iP!l@{+-Ib%?7h3JEd)FvMWB^(UwYLn4qkI0okpKyrgy0sOAi=`o7A!af zcVFDyA-KCc1b3Ik-95MlcXxXx*UzrM)LnPo^;GSPW#=$Ev(wXk`t$8Rv+L~ncKHOQ zOW}}j4Z?_=8ff26R-IwCFDpzjFi0$i05u0`MJG?u$aY_bftLzNViqz@W3LR*2T?AT z88F6Z6@flZr5l)Ysbo-~M<=X(i);p8L8B#8bIQKRQ#Oxbbk-@ydY)b~MABn^Ja936 zUF#Wd%M*KIms8lJHf%B_l;|vJACx864=H`ys`OxOU}SdhNMjGz-OI%&V%FjWg0LpKF>ZXdX18? zrWTYz)Qqsj@t(R1zsfs{Sye`c}L5HUuUh<9A@#j=P$--S2%$k^dYjT&)JUi zwIt#yoav%ClB-JTR9=?rP%b>!{^sulCUOQ&(Dx1}gSj>?wm2AUJU$3u!4$aB)io>X zuRe`_jP*lIrBPrI>X>VQ2Tn)0jX+>I0OOKD@a-%9x7}ZIrGII!RTEFM9Qsztbgapc z!SpoFF}8dXeewp+VKRNd!2GIG@bN9S$YPKbvOD_^X718WG6~@;el)j-r#ZFHE4q#m zM=TPsP&bNLAB6Z9hdd-!rq`Osyqn63x>2w!pk1b6^DaC@hPyHV^PR49T64wN<=8|x z$5HolnD&$p*GLByHWi@yZ$F#Z5nLnfjFDma?A6X+y>|sc){4+-J{7z z2v0`Nd3b{)@MGIw8x*0rPefAv6dsnwDeW&k8GO@8@yj04<}P&1l49niL043yG5TAP zg|JW!hy>WX*!;5*9@ZlmsMEDd?Z!+$8PT7TOlG4K2kC|IB8|tYK z=d^r|Nj++gh`gR@0;&Q#YE3>RKhV6p?ZZ}exb6bO7NP!h4E$hj)!C>nG@Ig6K`$#9 z_!!e`NfpA12>^lT*n^OP$?wYq7w)@4zOqgQOhX6>17{XdDI+!;QWPIrV*S> zAw7(|Y^AI_8^v$e_);i)D;@TA089K6n}&mYt77P6sSx8; zATLk)u7*DM*oXGL?-NfQ9~Z7~LI~`z7(Zap3)y;ZGneUaeZN{f3h9jwZmI*vD2O8~F|3PJev7e*wf=BM!R{k^$kp;f|V%LA|U!%)zEY&~7L7 zS$lLu6qNRJW}pV5&)q_8ck=<&k}KWaf^VVZS*`?JHadi-i3&2JzACUKqZ9qsCoGt) z>i|4?@bYo2@vEpH$&nqKVPQym;o~3Q{nu-oBn$yh);=@%RTL}-nnQNFKBwe!nmo>G zE^gtMAN}2V+qUfg9{l=81M?l)duX80^gpj*%k52xPKlXQlGnNx(MJxFPk-B}kCrU@ z?eC-CUf|oj2b!P%^I11nK&tQGKX}DBModZkw-K-Shi&`+`)*R3gMxxSLKPGxpSSn= zgptwx>x}+7+6LGGR$HqlZQ1tCBd*+2U<-Ta$luQHT-$wke zU-kdf5&zHKP|`?sHglm|Jvc9H+>gR~7w=6~b<8DF&93E68(+lr`rxA?h-jw!H*V{@KxlZaJntYp+$@P20fnK)HtouGj%h??Wo>tZo59KUO5Q9K|G>&h z-fSHkpNXe+n|(t1(4D;!rQglQ-~#xzug_^j&_cy**hs(3--sCF^qtN!(7G-L*xX4C z9lw`p@)l4KdLDIU-$%1L)f%3qOH%Lp(gy1!Mlx$dyI!z7ko9Din(+c7BTp5spb$QZ zg73{|GB4~290oP$JoaY8&S2E{{waHS{UJJPWu#yK(7(dgq5f9hY!e$21v^jow(26* z>UIT|O`85-SK5t}WszruL21*lE-krIR{Vw$-9B!j_~nN^PF(z(r#03M({nATUbdq2 z^}M5@60VI1!_cmlc*7%)RYyWAtbjvCda?wjaGKQ#?sUMa6FD!_Zn`wop%v)9YOXL9 zm_UmSnOJQ>+6mNlaMDC?e<}$qzDWns;ay(B)CnOjnabslh%~u+`_I}>vofd?%3C|j z|8I2X~-95PAr62=Tb+{R47PiCo9uJJZMI1Bgx3|Rh?Acep?mu^(){}VVH1)@_ zqpOBPEs)8yGTt#(F|D^j9gDCDw;@P*bX%R1`cIrd*Ha=?J$&ssL&FJ1^lq`oNBdwM z8kQSxf7y3YaCL1Q-}sTY`e-^dOtnKoW2!dmH2+rTfT^eqoR~U2;XZonqLB}+F)Lf0 zmQ+Via~{g`Jlyv?xtJTbEEN3tK@{b*1k0xF(Jxq$NMNmLSK1)8=Fbhuh%RA*%XJW9 zeVzkWfBucE>OS&X9c>NT@l&rw=yH_(Xqzp84*r(G-qbcJDXH$J8YD)a*ayPK3`q&I z8J16<&fE5vtZN<06u#TwGR5||@wB_tTBA}WCp;{3LpUt>RbiO%W80AgEztr>Ar9Hy z5IwDjyT$9dIDE!Y;qX4jzqO4iHlDnmAb^|IfqF;(H`QuMyW^K43sdjpozS#3@`z8^ zw-Ny-?*1+1KSv)19ox%zjpy#NVOCu$@085JRJSi|4qRgsPnC~DzWO#O7gKHW1vekJ zRsCpi2%WwO%zpAu-rse2y@qMEtq1Q=|E+i~v6Xh>-UbV{%vI=_yQT7OD4w0v5G5k^ zW*4Dp9k?=Q1h8dVv(`s#3T!7B#zHrYv=7oKM}62Pj)qDO>$WCZuYw8RS2sUmB^K7{ zDfxbCVor=+-KP z#`O4n!*M?b16MYzuchz?s!SuL+!_b~0>x8>~57`e=y2(h4wSHQfSAXd%Q?pxhT6y)rzJC+&H3D!$s^mn! z+dZM-d1hq^S=xi8@CZebqb^$#uoo~Js3-47^?FAcN!;8v_)+`b>)rX|;J;rN3=N9N zCjG3HTnC%9*(n;U-&PmLhINE+_=w>{$cxQp0=6GWql_6Uo zx(6TrBM!e7B808@NQ?Q5)PB4@_kbv^Asi4Xu8^BZ&cFrJFdO_Oe~L{|4n#KFf7}aL z!N@<1Rd_BI7XVEj5zBHImzMGr|8Olh}q(0HRD&Lcrs)UCQ{5x|78_QO2vth6EK-Jc;hch+*w+<5aQ!Tmc^>syy3yAI)-}l zB{0&{4kmZ(pMY$OrFQFYAFxthwM}Mr0ChiRTSJJkZXw1ab1zSJNTI@}>b6kBe1yH$ zM%LLh>Yn{o*2_MEl7l!DpOV{9=v(X#<3>&P*R#AC-u4{AQT7i)-ku+?y$SBT3`d63 zVji=;`~Jp%8CP!gvT`c*5cDYvVVyDL0gu~9Va6|R@nig2jN5w=0XnIW0zCPK#hSRN zzUnR&Lp{yfp*8OfVQ!ytl9X)fb9T*1YHP@Ui?|WxQ%Sr%EqtPdAO5w719Lu2XnGrXLjTwN7cygPsrgt?Nf0BvyWmT&C6lS zP>8|Zs^3Q}MZWf3s#S8R-88_bps?Kpy;eKm9TQ^H@;|EFp^exj6MYyE%$T@}n@B4O z8{}Bh*Qar%OX7%Fep~Ge13f{n@hI=t_4#YLzettbirL&(NBWAw)EuXQsCvFbEY*Y9TXxZ1`OFelV zwtejUtsMR4JBER*yEE$6{tNBqoC=QG+Y|V+r@g`9P6pUnfpBTgYRzr@0~KKk(ibRo z8Wen8njZI$SXCjLl+U>S4f&Bi^Vdmq>|-^LB{gz5c(v5%%)kP2H_+W7W8>mfIkluC zg9TtbSc@QQ$wPOIOS!YGUZ!gU(RAy(@Gxk|^(i*zSTMK02a-A*;P7wAV>0Rk}08=Yl5a){z#Z@ui;Au6=mgTwfprJJp)sC0O7L- z?!bvIuTPZ>e;2V&Kx0GZUi!=LQy$C*KCFywP4ox8ofrH9cWCT;2oJCZAkc^FcSAF^ zPRwi{p5osmxb7CBZ79NIO+NAX@fq#)ow%z z9~{;RZgRy@AHw)Tu&MINGOupjQL{v7*I9m-Yhj&%r>v_{o~OLxGfjO}zm^YzlFIq< zk6pEUcL`XnuWzQ6L)yhwEl+M)yOM*WZ7#KdsOr&zCoJmkjG z4@1qU^(pl>D(l8C<1MHHP7m#?kC^dn(oybIjPsd5CJGebeO&1)2e>|7`an8O}cTgvDKbU!7g{@|N?IkRS|S zsX8p{DG~k!4u0(Zv|X7EiXaQrV945)M~4fegp+y$)1?JVy1LIc72_83>g0azq-^WA z<)xY2Y%sFfOndJm7L;X@b>QqJqbEAjRo@&3I+Hms@tGCT2+YM1CAxd>pV*SLP+&YS zrYW8``n1!-lJuVDA^K{MfQ!A}?=8f^HkPX>^TciYAfYzn?WoUblbleoSFSjQorBf% z@t2GjR;?`u^cWbdw+(}9MB1llfjHb6`F3{pyKEPg!NDWFJ1ngoQv>IwfLi)f&I(F-3uOUbUOwqI4dwN?*mc$a6ZA~%qXeq*K^PY zu*h-%?uUkuUwih3z_iZS$KSdf11-ax06+Z-SxMLr_Eg_cUip%!J6X9EjV<|QcuiM5uWvpT ztj>-g)`Cy!iSYSer4tN|^1$9KO$E%Jkhm|4|0F1&xDXt60WJTsO5#RXzBaf}H#%_h zeYkbQEYnb3q&YXGy;W{x&`2I%iUgN*%Bw?Jb6WKI+-kquQ^^2=Plj75#u-V2&Y%0G z>(U12j3}6*N{f+0gB&J2XfLi+YEkBHij8(OTwO#VrPF(!UXuLkdV(i@k-OeN{32@T#bs)c`GicR5StuL{U?8B zzia}iW8L=Sx79VKbwXg+6uN2*?i2^B+$hm?_x6Gg-29wXA#kW6HALSxp_mHPI8>i> zofMS1ll}O;7MM_s2vStp*9I%#u;c3@T-Tvyo}pwO)F@L3jaXIv#BrwR+Inw_B62{T za&Z_m)ndz|Yn&{Z<01z?~moPW3am<`+-# zsEGhm6(caK8qa@9bK$4ftqFwu>|6L1b*i&r>!}X&UZhY6X&R`Ctv>-d%g?KpuVd#l%vf16y=AaQ~sXQh8Fq|10cYBp@gMb zH%gC=qK%(IVOYmv&25U#;2gILfL z)IC*K*_7%obebgZ(kCPgWVf=MGXA7E+w22<|0Y;uZZu=%8#WRfP9s`YfqXe)ku4C# zj2;9diT`&HE}JS{%FUa~GSv>m%!;?WZS52B`e`gXEx)tuPKxn2HaJY`DC3vcp$1Y;l) zLmKp&+ibApS=XTKQLI(P77`u2}UCiDC=kZKcgHIHyznedaTBuhZO#3h_JUN$}Ui>!ztBpCyb zW|gWph)%=~@Y+=FB+lHE^9dNvy*tGRd@m|l#R8IIy-%)B(5Q=FGYca5Rj5*{%_-UZ zTP0e0zx;u{!(>+o$vSHNGV;o=4YGdlY2%wyaxh+Qy&k=+S&6cLj!0@&K5#`|C~r5Z z_eUU{jLw7R8v!wj>-X8>OY@M=ltOL`O`4O0jVN@b%jd}17IgGuG4seSW#n$P4@W!G zMFc#>=StG_3^bavzpkBQQOCMz%dz+6osV0TXcc@-NuSLtqIUEw=y&3M>FAVW-Cv!W;c!d9lHp=sffK@i?- zu!95|8uN#;4-U=u?@&_etS&Dpl9HTE0u`!Ux4R zUzqLScyG!`ks2+U_J-3t!~>%N?yvx}>cYKUwpt^W{8!!=#Z6oXMG(Pc2t;7GjcPHz?Q`ULS-VxNL1cfnTs`ylZ@xG60Csh8y znxl@(8@TuMP^CN=R}ajdZw^#D#;1?~#;$xcQ&Al6vxS5MZt6z)V_*76v%xTzm-n)j z1_+7}RvjgCZspb=A0^7hp8@JRG(9pB@Qrux7NmRa~QucJ>9w@g}jnWvH;3u*P9?i8Nz4o4qMZ$Ab9=tS_geJZ+?@^Qp(%e`~& zNK><~g8R1yqg$64Bh|Uv)A;7MRDHzp%IPpyaux!Vv!n4vo##ije;zmx9l%I`1fuTL z-znw0!#v^{`5O4Q|GacP1f;(H{evHgJCQMwe;Wa$ zm+a2y=;-`VGQXx%;Jy%=kr}`H_n883x(Nac!^HQ|renJQvtQuQ0{>hc4jG{z{m^L$ zoU+nYY5tvI|B&haIZikI|K`j8oXOvgFwhy5{muWwZd9z_Z*LhFtd30)&N>plR+r3( zN9pYqY=o!Jl}b6-6yvG4#j(v50VGIC3lY5ZgZ_PYr&k4YomnegQT2QR-XS>zB-k(PiQT~g`7faD$D zm7CU7cjt&@C~A;R1*9imm$Yv#41e0*yP!DVx9f(m-PvVc(DV00N(Zs*u~o+AKgFly z2b;ae*Ho%K9m)lV;%s6=Z2oPdONurcvq`%tz+dulux&UyU7pa`8wD=@DRl(ZOkup~ zjHoxN>9yA1R-M~5X!lbRu%g9r`r=NE+<@+1{$Z5;8`c~<0TldjlFcnTfv0D%&qET? z@o&U}A~sxE8P-^%coMcI9}U;`culA@;V#`t23O`j9$FevF{=&OYZw+;{#%`4D=(Y! zDmy5cAC*`P$4C+;^4!+59{ELxq9n|m@bVMhX$EOt>-notaRjjyfm1u(Nx>f=6S%|^ zP$Q6%DtISs8T^n0U=ZEZ#{!&$HZ~tcEoPWmSf_T4;7UBbdIk536L}$r8=KgKTxp2j zElT^3JK1CPy$KCB*98_vX7lVT4S0nxx0@23+vS&u2~Y1FBMegK9qdt3CZ2om^z1OG z2DsF_MDJy~Nor8Eht01WmALvY!p1TPw$<{`X}bWfiwllPkzT~n9^55>MO%w?XEVNB zKE-8y5r4`&!M1v83O9Wz?Q|~xM$;6b1CS*n-d-69C&V5lL+*h@<}xzz&30S+cS54E zxDzjeTHNbHx$#G-O?6hUA~FBgYBQ;!+^yFlcVM{I99aV`q-m}`rn)%wS^K>EykJV` z6iI$pPFAkGYM-uFW$*k~d1ie@5*rtEE|KduZL<@em{e8<6fZ?$P=U32j*MMZ2Dlv3 z%!lqt_eQ`SuUnTPP$sy10Eu22qt@-4g<9(IQ?4^(;gG)3I~iiBmvpb@h#9{jT(k54 zdZuV|OE2g#G=yGGnX)pe*NORis%?S1d4*OPJ#Cf`%nxpig zroms9O5iUEBuc5zN5Csz(6^g`qSII35cSvbHkDP-c{M~xS@cnDwvsWkhzT^@*`kP_ zAjRXV735ow{36iWynca)X2TYbeLy3zWLNcO$g*ttae6;@z zR;ho(d?W@x4~mzv1Rr)wtj<58=3OvP%~CePLO}_8M#hLUu@7D2&C`2EJM6u+DJl9; zo#HP7b)y=!ZHF7ss&@1+ygMJ-EGKK{*$?k2Q8QJCj$ej>XyOR#0QqqpemL@Y$U7wa zSiy9J4+FK+!gtn%*yQRgwU78WRRp9s^neda`LRu*had;SFKCP%x@|yx$X=QgJ3ioG zc1OpAid*!0ZrguT{m^R>Sjjh1cNjNZB4LhX`(UM8?e}aOLMQ<^?7+o85{W~~^4Q-c zk}RixNF=e_=b28O;b?w=(kUL=yzsa5(0u|~?!ho6u3Qt9hL@*{wu-*w&V`G+FX_76pb0-`v$?kITMJ>cM9nbO|Wa2gj(2k271q_w%9_n71(nCn(88`T2m>*>$!3P$X!Yq}5mR)L4qPQq zMw!W2Fn1q-;vR>0I}r6)y?&5XKFeJ!Mp@B6DLNc|R9QACH>;BoS^ay)piu}?{ai#FJI+iEFHCz4zHXvh#pO5@@{iT?h zuf*CK4~G-+2y0a;F+5oNy`LM)KJ1UC;Ww8@UH<~pjm$KkG-)gGxd)Bf=;ST&l%C#v zbzD+`Dfk{lnV)Qq+(7>INblj*gx7{Zn1NYew8X|a0Um(G*BL>naSw94_3aAUeHZw3 zLX|g4e*$y^U1DnnT*bzndj(xpmPzr=bx{{kU1O!IOOhBaV2LkwkH_%Nt~3!5{)IsF z0j&I!=tca{+P++pJk>N7TrUlmpB&9$X7g~6;2gpq#-UG zRrcXqyt{k4l-o_^m4Gu3$@?MqlG2KAeCNn;J0(r>!b~I*$LmSbu#;ODTg4JRHkn&_ z4wBPUaaXX!VhSFm$3}+NpwTrjmhaWKXQXzlx(J)xxJs1rsB&SDi(rUb%=-aal(+ir zX=8~XY@g8hTrw}1eBZxKO4mkKM@~vQ-xR<;OYd>P6Q%rIvLG1jKzva-*{gp~ildTL z$vNMuSyxK<5>kM`@Q&7vK~@BO9z7A226!t)}bM4()*)Iewv}BIWlUs6Av$gc;h>8{|h#nUT;5sIB z`w_ua#lyZ@U25gqHTkcqXIZ7C5{Pc)QDFnK^KyL;+9a)&~XAm~Pb0WpPfSD-UhGAA?S12|P)UvUR@B zgc96xe&!-U!z;?I+~LXJFVbQNf7qUR%ru)pm-0=C$as+{i?<6zoI@J(G+VZePaZ+N z%Q&J!MtZGo zaEiuoi_krMP$tLG#9^0ur!3S1Hm!?3Xd3l@lK&$I6#2I+!0)uQpU$@L-f*b8n+VtG z3Fn>N?KdTIx8x}L91&t*;8l~p{5Msg?wRV(T%FgIE*Xw2dLGCaMxn>wHSnOWPDqt_ z=%bHb;fok2?bHLxe62I^Ggc$O~RDIDA`;wxi~k86MP=al+% zRj02HQZWEIP)u4B^yokIfI|FQ&);k=5`9_laL2m#F79y%gzt%+rVI{r3Ov(t_89tF zmF(w^5U&r#+hnRu#3h*JG;!R+R;MRCkd1hOEZX7%6_jH$q+77~4@PfHKPQ%h zw!CA~!C1WH&*BI(9`L21z8Vt~)1s%w3%|%OF!>HBD)e+gwvjK7H##Z~2U5K-i#Jh` z(e>c_qourJ+^0M^|D`}ju5nJtes_opK{?)zOwFU%MV!L2_xr&|InI~~TZ(g3PJ&Cd z_WoJjdaYGp3cc{HrxP`*X*M?TRKgO+>eAhP!8$r9)Eor2ps(8mdNYUhNu|Ct@%eXO z1q!)^`u7^@J7)(769FN0mwH@C{;Sj2z>B}h3CvS}-G;AOs!(r!Q0uCcryq?N5H z`nd10?FVUqfoNK!W=M6KF@r1^v>~71Z3}nzwy0Ecy3LrhV=)QcYVqG$T0uD2fa>1C zoRz=@0N4*r09d|rAHecVQKt;fXltBZ`xQE+Y@7n5`XZ zpdC6JZzC8zVzDph#blHamS{_poT$%v%l-*}YM`hxo~k?jB3>Mvq@D*~m|4Vl+mvs@ zD;hQ-S>(f2@kV;{`w_!)8lWTFO9X-~JklKn1(-UBg4sfLmM&7}Dhze~CsD&8<%uY> zyXst46`T1R1A&zZDFjDGZAXSbqrhtV4xUa&wYMtLI-TJ$68h>+L<}Ys&s)2WQ8>x- zHKj)PVq-BbUQGs!3cn*a+cs%Lm0n> zAGgP)T1{k2X9>Yc`a{|eoJ0<2Ql3&>>BeokABRW)cX-#|SUwu{cAM(B*3XE2(nBqd z-7~K|zQU_bR@LZq*jc~s)r*j?rL)*1GO&pBRe5ENvDa&M3yz;zJ6{CU5P-Td2M2${ zum*6kpUo~u<|ZZ2_|I%@^6`B=Bi<~%OHNcXebFtw+qFo3=P)XISpQbk7936O@8#fB z^AL>CZj|w`uP8BljB$@8yZj3(HT+x!#=Mkh^ds>E*uONk@2kP+#`~lx(s}nWQBf+P zouRh6=%Nd8aaNy!X=5Gl*Xsw8V)vGHfdfQ1Zo{4~ySXy4A+J^sj9g8Wd%CrJ&qCuZ zv)*IJvS$i~A5rfA3a)SwWn8$g2eIlwu^8~+9Ufu|tzE=_Q8@uBKdu+uu zyU7HC7sy}VRnXqk?P)I62JdsIndgEzyQUOyuUo0X#!8k$9$L|2c>~x47}ct_zs6Ah zNx{j|>%Ugd1~&eOf=}{K+Q0!7OKXu%PM=f_QZYZzH}$daUx##vO{L)9mvf-9bffZ} zpMRNVp)%s?XJC638gW1s;gZNAhP2VF#O3ve69Od-BiD1y2dE5}Tt9V|Hv(ffwC`=8 zyl&oPKE0BufT5y&bx&_qllCoERQ4Y*+_ecKiGBSI%4GCl9~AReTvlaJRzYCb(K;Z9 zAgBM0Put^c7(|B_mGTYg5QJI6q}L*ol6sx^%mvKryGT&-s&Ff3ouT-rSK|v=g!jS| zXH;4<9+X$pxH?3Z{4;*#W6irU{mKXp9gZF>t2Ra-ScsFLN{Rzf zf@Vp@)a)wx7i-pyFR~4a^n4J(?^nu%Dk$GDGBy0PWj1(BHe@MenG`wHN*{{hoNwpR z(pgccF0kr+_QTNTdtW-1f$#jpO^onNVtFkZoI7;#PnWzmaR<>g;p7g^Ogu*T1`U;NImx3UGoGlqC*hRGwRUExI!MWufD zIb(Vk5#t;tibck$-6lD?uc#n38x`^?J&wD9{f@!o-VSXBrz2GcD^+Fi$1X7R7ZO8PhWooimj|w|m^ckQnDG}h z<|)pr7ki|-UYg${ryqW}6FgZnBQvaapp^5V!5*kRt}XQCke2jw7;x94rf~CI#byG%g9ht@&JrRer%%W400Em0!Eta3qDw|1qUPn4 zR331FYanM!bIaJWoS1!{oqef?W+<4v>ew~&u^+uMN5Duf8B}jHFEg5t99TzF5))s; zcqMOR=>4;@|6QK}P!9elrq}xGTdfSqR--VQGT`LCt0%fpiO}!*H`TmI_#f<&;zg=R zVvRaRjPw|J`TO$U0c6!5*!vOrmTu$(>6_rcck+sViG5gA{~K-%yn_YkW~W!S0vYVzpyxK)3qQ$!fwF;jDE}82bW>77;uSQX zf}G{_xKl;cPoe*?VBk0%Kjwix7xKzTzyG5&{_B-UjEe@!-si)3LxNyOcqqs{;DC$B zO7XAFf5!d4GT8qol&jw(nwXHE<-sfMs=n&{(Tc@g6@-n4zJ=Qy%P~S5hQN-G8V<9VnqHhah%_a(IB3_Nysp3cGXm*L!Ht!Z7@OM(bgCg3Z=NJh z0|gZt1CRY59ROBrY}9pD$fzT&p94g^g6X2FFdEpJT*C}Zgc1H-@$dZ4ivQlN0%xhY z>6HYzgOL*27yxf(e8`Zvr%aMIOgQ6~eNg#Xd_y+aX=g> z1pV9@fl$Y67u>&}c{kQRWK2ZjOw1^cgCdS{cl`{1~K5$`&{#YfHjg=L~=$#8iYT-D7v z`ma8PS}sM`FI0Eg%jtVTH9>Z$1k5|zO)rd-a>FgZC+3Ni`u8nP-&Y)4qYQpslZXh8 z(Lj#jg)JspH#Q#_ZUo!w)vck#%*4JPK&SL5R7B_)ZU0X#3rTV5fV-9g+|`uzZkT|m zCoIJe01f4ibnUXu+m3ucpOdd}d5~ZD!aU6)0SB-YGufNu+5^B`7#}&=${9$fj36oR zG?vzt%@GtI!u2^I4s`z@Nn9Y7gQK2;(3eNgj*sV*KU}YI5>A#VtILOO)yAe@f}8M7 zq9M}Qd+n`G)XJ?Ynv|5BLeHfq!EM4el_cUxZiYn>QgJVnxj~@ji=M*Fn}Nc$$y~XP z4bLF%6$`Ako+0o~NYLHOWx4h^SbX{q{8^=4IAiUq+=+eBp;m6Rr#4P9*rYav1)FW1 zF8+1_kX7T7;h=@VkvNDx$Iatj^e2&rEBK_i`!KT9;dDfa;)zjAawurKj-)zxbX2N6$gV7=|_9UcC{Gq|dh6pkOl&s8;)tr~F0U3zWE>ko&)ZFAJ_|z4&bj zG$=H=on>*nlLXMc$kB9q1`w=#rW9f+%aYRc#SMpyj#ID~SVqa*CqGe@TM4 z??)V+7Tho8&x|08mG9 zRyDBhf{%=YthFrj!iue9wjMo^1{^AvBqT@)fV886KDy1a_P;bXkS!#ts0?m%Vx7i9 z3260>&fs7}iOnzp712pP6C4CYv$Eeibq%Uz_%9ZNs;p_D_i?dZqz>L#_G%1ycOn3l z)xQ~!%3sPksOzgjFUd_|x6Opq4KD+R4S3`nIMJQwF~NPPd(TGC&qm;8T3dofMmdKC z{)wk7)8~G}niKgGQ3>1Sgl1af0ckS?poM>IyRq@eQ@_%NKGk>`#r=&FE0pqE`!$%T zndKDKzU@(#eDrGf<|L4w-*Yv74;qct3Z4Pp4Gi$kcuU`AG`l(Vj|kAs_R+&(bqMxEN`1q={n}n2bvCSD{otYT8<`EKu232?n7?+Jehq3a)70B`BozGj%QT|ORgb6 zWK%!3Z0R59pGWKu`hU%hfTA)bNYxof@#)X6L{N z$c=VsGnG018+&5m3x+uN_c-_=-ub{YOEg3&5B`!RGOsk=`Fl2}b+jOkd&WI*PdR){ zX$vjIyCNt&wRnv}8!%jtHSZ8P8Xr?@{Q|cR6^wJ37g<}wo%Z&M`#B3Rx!N$t8<>?*i>dP_{VTn_UK*}W67D4nw97;z%S0O|^n1NSdu61Y z;6X)Bphd0aTLtsMbz-i^#EVM*dd=h`6EOy!P)6;I0x{Z=;RFTH5#G~#n3i(zOj)W< zw}NErd?J>|`4kVV5JEU~?mgl8b9C&lAeHx5kQ#K}B=R!N_1>`0-9@kVQ(^yQrq@na zxyfgyMaMC7E2D=4sU@4jUpWaKMfqm~hQ0-|#4y9uJtDw+AGFo<1%4@>q^ZoB`Qbqn zyQRjS!E9grZs@nW!n}hrmw7N!lY>zNs_B?e)rw35C23Z8g2M z;aYk^-jDtWiJ;GaHo932x+QM3k;+AnxutrD*}1vg5#>eC@NQj6YAS6n>ECNM*-;bg zS3RQ1@h@fNIj903_%r)wJwy1BXBK1jd3*Lm%S zhO0%havXYIuNzWT1yAv#?BV_5SGU*F4K3-9ZJ37~e+eI(F5)+?O!|x7dyjH+O|q*| zRZowcj9fGQilGZ3W}K0g202XBV&{<`NmaxbV+6G<>%uY)u$-MT5}A-i8FPF*HQ$-P zPkeh5-6PjAXfD@b*(yc|e=$Z=nMHY~BLvnrrqG(_;ZRSoBcjRTn_8!{6zS~wAhgSd z?_zhP+Kn0KCl_?Y>1URg7hNA^#o7EQBwz4SJ&*v7j4LidNvw?9@O)JQcKpT3vv`UV$&_4vJ zF^z0Ljqa*H{4NAkkIQFlMC9}qD?O+wAaJ^#p06cwGEK7;ulJr!Rfix7Xu5<$$(gjt?IizS^t6(P|WB zGIyQUne9A`C>2fjAxcqdYn5oPMN=)dlZXEf@C#4736^y0%nI>IAgQjc^6rBE)3b3A zo0!W2XqZJGvU;m*;%cGiH-O$^f-zIJrJrlJTJoWf8hsp=`n}jYBt==JPuJCI)y@i* zN_j^|K$)P4mV8AH$ogi=Z2qvhjTda(oO;Z`bdw~pz(1wNaen;XM)f)a*Z&z+{V}#< zy}}|QX#smj7xQtGERU6(VQ7s!O2iq*YNGsiww9lx&o^dO|E|GC=7|^V+LPUg9ogZf z7IEEc1?uG^=O`V3Ik{@RyTLdz)IOoX7tq?5ne_D7!6}>wAz=O_7$2#3+N)1J;P$!s&TzH0y}BA>bYM)j&yfqpy=>{`{XlS$C+gw!OtPBCc^0ZVDMv~ zm1mE{pID~-EB?sAfFqV9hDT&3J=?s`46p6kC%kB6X2Fz+z)1{B+UTg6&#U9hupc^rAOg;d#T2}by`ae%lyRhVIza>BS= z=by5cWJA5H8ar!~dEcuTv$TPtQe@Wyw$vfN)Et2%<~UOhyh!dc9o~SOH*mm}T#fUi zPJhnvm)z1#7|f#2!QZv*x3JYIMuk`{`C6A706?0l=>MbcEra6P)~{bAK=1&;0)zm8 z#@z`LG&l+FP9V4jmteu&Ed=-A?(VLQJHeesn!Cu}`<(Nad+V+D+pVIgqM(JfX3zP| z@q5OY-(B0I8B)@YuX`=R5+=1JwgqA&`SO=be_SD8wY)p4p3|TG`sA%wuj!Ss2(|w) zHOO-Ib@gY4EFa}?g&|1d)K^H8FXT%nS~?LiU{cZ|G$zd8i+PZDzZ)^q(pQ$~@A0c> zD00CdAW|K>z#Fqu+f+Ww|2AMLwNO23JUT22s<@_1;*1GO{SOLdyaVYkOf9T|z0<+n zkKt_T?<&292yM-5dg#$MR+Il3Y3EED2ucyy!N0$i!Q1TMVU@K zjfV1z944m4Zrr&`I|`}6cbjihX?l9lc}r7%DoTsVGfDKnqBME$o>lcN&LF_=5w5rI zgHagee^@}UE2$dfg2MBlLfP*Jt#n7cSa$31X(abEO7>0ctR{%$g6$;shfNe$pBPfV zI}){~m0NU$+AA4obeBE^?uRVxoKnG>OiS{wfY7KVLj7($W8*aBiVBruSa%6Rz|>E- z!UF8+=V~QJSBQZG;w{tT_a<*M)~nH4FrBAd{et95(sfQ(O*Yl`1c-P>zMl=Zg=>(Ym%&$$ zZp)R5bG~s~+{NS_;1XYWuS}`esM!rz-e_xJZW(^qr9?Y$c38|?@>awp3{v7HN5q@g zSU9z;kXCZ&$bBVAx|5+gd~>T0)<&BnK})GI1^gDP>ej=rOBU3e@fOAsgmhb-scQ!S z*Oz<=l6hKW`vYw>f7X6;i$JP(&9 zucV`Hv>97cU;T-I-jtroDL~BTCzA}6Z0Br@T(%LN@Kki?K_Sg72Kn7t+CvuNe3%?&#KmCPW zvopc^qfA$VV}`&!k0fs zf1&cXLnR{K;(%+-=+$2)7ehC-1~rz&Q1hGpIf1t^_Md+~Ms*K8jhg6@=WGRhJ(Hb$VG~dU_Ib1@kflaxZTdCS7JV?4{t!M z7ys=6XDSH$vKR&h)dz|S_z$oniWX3+h-b!cs8-bnnOWg(1U?We%{2F%2$cmmMo{EJ zRm#lY8qV8q7^epRak_NJ-Rb>Ye)S*rl3~e|XX4_3?E~I7L2bf(Fuw6W2%T*hKI{`adA^(z}l>CT;@PcKJrXm9Us?D$;!~cvOzOy9wvPZoeL^Rq8oDz&U(}`S_K1km-R!J z@tgNq+v)+@)vxa1(bBP9)RTv~HUe|$Kdxd|mfeo--8_Qb9_MHa?;juMzYds;(j&l2M@FA&$5dtM@*X+9M{jKbi8S^@<2 z5oH=vOjyn2aNNdQ&BP^lSBY}Q7H3a3EiJOCOK#uw*4JLo4lT0T?_42y@N;r8=COSq zae3h08A>|m&9kY-uJsVkCV9nOlx36_&AzAk3&qq;qWg#!T@~b={3@XBlirQ=kINSl zZJ5O7wbxNlHZ&*Q;j{te9z-Kc1egWS-d`;;A|DlGg0~s)8;>LvdkVn`g=LFYhpWw*Ian?E0?A;$`h3O`h_z_N7T2lKj1 zT~r!(DYu@Md~n}4k+Bg7BP$`cbi zB%wGGmE?kw?;{mh%ny{3J7M!2vXS=|f@0(q?9Js>_9`6h-5ab?NQhk(Q=^HZ;-Ya_ zgGo^cL5=GVQvJwDznScK^MAgr#V;`0=Zh+9J!8 zFN)uIbD7e8`qXTiYL+{$C={K$p3$zuvU_9v7V@xuF=x$4)YH${spNFtZB-(N?v$)L zVHP&O?x0ts7?&pekW~pC?{`+;=3zCYs`b>{I)>1Rt?bX#ELHid2v(Mt4bkNELSK;N zy>T0$ZsIZ`-j6iDHaZ@5xZBu_RRe9Hl-;qYFLhsu0S`+vz7wB!OZ(j==|!=@ro$}o z+N#&Ct8D!=()Piv{;cmdkO&-RS$hCI`t1NfSCfWTzIjTy;VXzJiAPK1#~K!+g^cmN z9(%8b)BaMEpM(DXOK?CL?#5_lB*Z-})UUG~3)t&s;({WtGPR160PhcS$I;evJKqWh1e|Ats=$-*%h%@$Tf z|0`mpOvYr-a`oasCWm(**%ij=cS8K{h*b>Z|KP<%V*YFPTX|om|b>;AQ(q*tsjLl3J1{HN?y)Y8%qIri=sX$`&0fLqC_EP7t;X>XNbkZU! z@tj2iUls*#jcBrCNC%97rW!SD77Z?O*q& z@Te*e$04p=P?P3DHEG2efQZ~*2qppEoo56%(A6n{v;l$12dPJ%wT(jB&Ah@66GkuD z<3hjLl1vhu-k&sG`KpMi!b&PXMz~bVv zPP_|!0i2vp0@CBrbE1`QbjNuhFZD#;E7k2H8E7|$@OpUCrrzlNdW zb_6a6qHDWlMTZr%(HH72mqW}zk#XOVoo}wv&f}!oM+$;56D#L{MFU%)t6688%-q<>s$7 z0UP(&%WeCtoktrkrIh}sc7AZtMdZ0%@^}LcFW1mI`?5k zM!vjt!m)C;_MFsbwUgLz1r8*r#|?q)oU0Z3HKie8bp+i0*@tAm;nB}L)h+0zlS18i zckj~rDsuz$T{R>|)z6z6xIK`Lv$kepTZAKzP zc&B|;w<;T;MNFc%<>6DKm0$8OnrY6SKj@kaq$7jls#)fnhR*6ewX;cG+3XMJyY-_y z&%9VS*!DK@%}CR|$RN=D6{3Jq{IXex!J{~m1&ALG*lzLYk&b7$lwMJ>LzUcEs#v1G zAu`HMmAXauAwqw18JBfoLGj`m<7l|*ad4r>;^Ht+LrlHr5leB~OVqNX$SrMveaUlJ zQjj@g^Q}2W_-1dprNTeG01(n~&N%*86&L7_tY42e&P@N;zk^mc*H~YFmn-+M+pp)4bcKuC1jadJ3}t9(Z$R+6R3dkl_|Ifm=R?r8N;Xo zy=q#47DpCavFytXoJUKZ{Lxq@me{-_vG8y1A61+J)!@=^{JXvo)~nwF=2bgVb*q*8 z2FQVZ;f_YXTFsD&=s@Gn*6!Jj^jcY4Ck6SGAIm8n0}EzOave5?7+PTVvyiPzI?bbg zbD_)0k3=ZXxC@Kwl$MMp4O@pKIuHyJ>Oc$Yn{O4gaa-vH>Ns2OSsUczB2-7qCmo3e zVM%&@78^HJWn4eXgRb;u7}cT@kl2q632v3-&+^pxe#Jx-hk@q{;vQ>Sde_crxEBp{ z63&K^>1f~Hvwlk0h9~yf+CHqqc~r*18$^dv(u0ld=vamwY&HTg9zQ_@3O2YUpi&5q zOmLp9-$royIk%PKD{zWw+o#u@wrCSxD-P6HIvKu6aw@;%$IuP`)@e;ui|Ml^v>_0%tmrxs9M@^Bp9}ci zT9YH_n#x7^GWv|}a+Z@)1!H<-Kw5&N2enaJoe z9ZUaZPDElJE-suv#UDFOwOyZfV$#rJ zxO^_Lr6-2#*k55pDR@_shP(*yu@~+!nd6_8>)DH~f5@laDVGaC&B*W5q=G)IcD`~T z;eNpC+>0P^(9Z7uZF%-e!J&0dg2K+LjVV4|_f=fX$3i=K9e5x+4$2U?5*lE>OSsF- zF6&pO1;rqEZ9=VPY#94Xnsgl!|tybS1pNVL{1X3dP!J8asmzS6PS1m4lM>Ae- zx4>!zplq6a&ri#>*p3QK8t-?IvBEi$4BR66CG~a{NpQhktwkBnT<@ZhhUAR)-s+lp zM&Ea@iee;1mZ^?j4Ob#uT?V~ z4mjhWN~AG=Mx2UezC|OqvyNW{3+iOyA~BmzD2>}O;cH6?hMux*r;RWY4Tv9w+sTJs z-;4kds%m$F@eSLLI}s8RiQ)&9)*RoY1HF!l5|P-Y#T2tgc^cJ(hbSVFFepi%t}$y; zBMQOY4AfQ*Sav2I?r_1Ldanm1mvdA5E1md&e7X{Z^_5w%lyrhG^oAK`m+eVMsvTY_ zb#Mpn@tw6(^Ug7En)EJ?4f}NCtk>@#1b0pr`pP{;HXHYGr_#5r4KH7kUIOA3+@H-UM zz|=rtD5xR!_Clf{!48zG`tEf_aqHnZiGb?14xPu&c0hCoE!}HO!j_!Ox||ae*P?l> zQr(Vd$bDSDt3BO*;UQV)4r)?BOC2()q)f#9-R(8GLyk=~q!RlOV_s4G!Mb>#y68>` z;(P_HUn%~lWb&+Ng<}RvqOsYNOM@T-HLXz% z*|**P**uXVRGa&o5HtU83y@W>u9e&*<*?9Pc(XLV`$WwZ~Z~8%-VTbh!an#&bB5`_ujC@9TzvCmGM3XUa#~laP;p6sOJS##!pkX_o(lL&K4M!5jRKs+G52UtjOmo5l0c_)*j>ex2L5TqRbhAzVkb9>ULz zM(@{cT8wu)X>txw-wja>4GY3mhFrruG}D(Zt2n9Ndw;vi@?K#wQu{TKcBHtqF>PP$ zKvK)PreS3KEt>wgi^)^EsU)xJX)MsJ)-G0i$XU2^9w^N)8G~)}ppAE8;n9vLx1&7A zNWf$ei7BI{Pe5z{#O%>AHwpqdy~4eF5lTu{Av$1}REo#f2ncJKu43lii&_ZUAnxDk zU%hTZXCa=K{^uexseNQPXP3VrRvy2$&`B7qMIW^LKiaI}1PHZh^G#189k{D>W^&=? z3k0N%eo@m!)VlHWVJT1R8ucn&Qwt1UNcJ~wlshk+&m1Do&D=K-TO8p5Ud_Ie{R9q|@v{oIPT zuif#-sj{0}Dd3RP9~~4j^sTGedePgr!yEhZb9VnS9}(1gTep03X$zRje^V>gMHNhX zb^~RQ?pCa%3yPbesk2yT7j8gPqmQ&MoZ-rEQ*+hH3i&*b6N?MFC^t|k+@S>bYj?HI zd7bH)7Y_)&-F*1(>)INf*;ix@i1mmO(aI%5_@XDxBu{p`L z=JoE(8!|o9?-Wc4`nS2^6VX%XDobTN;l$InHW}aa%6TjCJ3OvK6!7w7R;I8{=yETw zf7kEDt6%caeJx(LTAUhpdAa-U%jKJRr+RC(*zU>x1i>KJ>gFOU`4<}V223K8);t;* zw(LzmJC9WrR@#8vuz#;2^oJ5+bKdIDb!SG|CnbRZI~sFLlZFr7VP4`3ViOL7L*7gp}d zL8yZ?U^HSE`3yp4zPOwicjKUgt&IRByS`ZWX4e+*nV1eY;5XY%SZ5!<$|7#B9#ZD3 zwq}x;7e{c@FrvuN*HEJ!8^<2jubRc<_z7Kp@w_u(J`_ul3J#>rj5H&`FtD`i*nQ z4_Wz?8z}p|v`2`2^q4u7bs~EymsqE{gf*!|zX)(P3dF<=q3CIg>$}4vF45`cEmX{W z0F4asn62o})~9U{e2H!{2{jdY>vq8VINsG3PaFSd$Sj)g5pZsWPp6FL{jh*ncqH0E zueGRuz%#bxn0pvE=s4pMN>TY$E!DYqa8stQ;*__HTt%vHDhQ_qP0qZu3=ePxn#|Wd z)tB-HdW&}Y#%z^}MX8q}@41S~zUfRW&b%IShzjb}PhV4Y+7fn9dmx`;^t^+(U&zr!NP5B)FPCfjo1Bn+I8Fu z*PSdEA$-~fTR9S|zvn)yS9?z&bK(y>f$LW9tqn?;WunMoo^Bik4ljD;Nz4TH{%xa6X;Ns;u_J4p z%ZJ!<2LEMRPIwC4fK~z?EffaLqY7DrA2|=J^sxYU#9D~>KK(*7V@#C0D}KrepYP++ z%*dFFR`1*_@mxo?V+L)8%)8!DC>wC+d>OWx1XY;+8WW%P$fJUgu`ra+zGK(I3FD~~ zB~&QlJeGL*WS-Xr@Q1an)g~6`kdxt6cJ>SPL-ecBPU-X;(&`+j#cDW1Dxz*d^9M|PK+#y(QBd%41(BWRB zV=;ZiN?}| z+#e51ej-Apunb5%v^6x(>E*rdf9H@;LHtnNY;ra-x}L3#WjJUND9G+>)xD}?PE5VN z{e-Rz#+^FSlUxf66{{txXQ!3>%rV#*s&qXP@Hn`hZT+<-!>AIWijk@})SJyPA0&UFi$aS948c<0NO_(4C?Grd9T-0*#^9 z61VhDa4S0xNkHw96>i={x~GTZQN(qN`v|Gg#b?4XN}2cMuZr)wO^+;dP9}zkU36%x53`c zCTruYmpecJcjGg_@ua;^g4*n`q%4eET?V5cXa44L&+BW6H=Ut8lzY6DE#lkD^Cw=F z`x_u1KdscKMNa8I$(+PLWX_mLao%mIZz{yIyO1=o%nr^wU9msrb5vEIKqxSLyYWtS zz_&FRz|youv~_F1cIN;_`dp!FwlKOj2>aw%BxWH5+iD%~yEc5)KNgc5Vse#3e|z(L zI;m>F(pZ0`O!&@?gxah4RiKVF%AF^ho&7`pymIv~+Jk?yyG`2vV0XxcHjaIDH=pl5 z&;HHsK$1`FE*qDJk~Y3VLd=B`tw-0|vEg*8wM)_^nB0?cU$}?MOS;sH+K7OovCAyi zX_hcRUyFbpAb(;@575YuD=(y6TMq1pjp*IVmik*vw1~sg?Qr{N4~#1IXVEtxJ3nug z?YfY)f#1=wS6>c zSUf8HN$!Oyp}GS#8*;z5ZyZ)$@^W9~a%Xaq(@{R{e_pc6Id#{rNXhjnc}2yD<5Jc$ ze4v!Bg-E3_9={;uF_LPL%Q)V3kOPu89ilgy$y5)gR1-RGuf;BP%NdN^?dO^0oPyqA z%WGsX)tFP^aJf*XktYFEZw05~o$@?7D|9uRA=(UFyr3|DDwW+z3nr+JSZoyaigWJB z-#9fFPT8BmP=xqtf~5vCa6GLpU(;RR$91$C9Vaw%O+t@1FVsLDjha2ZorZbDr$x^1 zjI&ndEm0C1l;fFicyDFKtWAaJ30^Iy>%zcvM{9H zBT1Lz%6-gnYhZyH)ID(A*uW19?v+NPlwgS^?#_2^>Z*f2bR#%clokjZTx?V!RV2dK|@{aja zMIWNQw37LczW`pG`2XO=|0h-azXN;klHNYu->QVXrqV}ZW?M?fWPBN*`3~L7Bx757 zsj0!bC6cCj2q}q!boKc&?urU_BV)7DLo?3(jkjeE8T+}VNi_u!t;dDWehh~${r5=; z;ywc)Zh)@=`3=;4JG*;XBfZjm->2u-YJq(ZhBEFM2K`RHTe`>{8T9r1K8zlq^VR$N zE|Gc0X>wDQ4VOMfOU*Hr51!7Vv3wJp&)|rkbijzAk_^DC*=~T*;}=?(A-BS0qWJl5 zTo`ie^amE^9>Z5!f@R&rJSYtD8e3#lt^&i38PejW5=bMO>si@}g$LG&Tw+Zg>o+wj4CP7aN!nd47hiGa=sNs$X zj;i;i@hdpb)ei4qKrj`+O}rTqXiX*=kl~xV!syIt4Zbx`=F|KdqDa`-%w@^*px#}6 z{d8y%e;DyKuC0vR!XU?$Y6GLIZ0}v0@B62Gh&e2M-#|^mlb8<~bOD0!GmoEqqv2DJ zlx-_g^ly=wyTStf#PpeQ z`)az zx%kHG6>kLVR5SNAbaE02IJoGR0?{e(L(yXNLJb_$)D4Rs@$4Ay^7t@hW+kYkvAZC2 zX?fp2mHc|D8Rq2@lZqyK`sbQikR8Na?{iwfEh0kTO7Jq#GX?GLF)u2%L*YrC@OUl< zf`H53^4tgF`g#;Zq`A3?SQLCe^q(t)mRe2 za7V=7^&LCPFL)0~z-Gw5{|w@4&fR|&=#}(s0-L+Syp~mVsMeIDlgTsFa1~7JEC zEbcCYpn#toXKP6J5_-GxytD?0g5_rXd0lKn9q0pGwl9l=G0VTBAOAwF|K3ecZ?rDO zeZWk-j7Kk5s<_$0&m_?)&G^7ypQ@q{s>qCwi4k&Cc7HBz!_d3Ml06loKL1;1le!Q8 z(%|cN=7g@YHmA|htrJZlz53>5q`;s096npbfh*WdLQZR)K-sT>@!QrlHtNm-98Lnn zmAhF@>M(W`C#2kcZt!G%M=s1oh|@X|eKW>Lml9n~0=nc9>A5amz~JKzx8E#NQn)5x zy{6lJ7j@wFrQ_~qRkzCvNJFZ+S5mo>TR`O#Ol(3_5+t#VfrkzMO?xCYlJ2yC%o~z+ z6B6kqUx)D+8n1fQ;KK$G2WmE^8<@ziTnd0>nJ$n%;n_41k%kGoo?L!B5o0{w+<*Di zVBgA?eBF=8!+?*n}($r)7A&y_!O zS?!)M4ToY5)mZXbAa89|JouqC=A#iUz1R`U%3-J8e7p9wlo z%Us{?Tm4>Sy;WifFc#LQJR|t62r&^`a{aWRHIyJTHk@z?pY~C2!?hRuVpwF%K{$9O zOsdVFML#I*QEZzTN5DzRok@pZ`4X^CAN=xkkw>enW6tlWReBGV>V_IWCaXCzvylCE zSi`}EZGB!)$DYhQ@2YfQh*!^Oopc>s2DF3Nd?>bS;HDQ={ALdGu=1e*zo>%5Roc%! z^}r~x3AtGQd0x4jNioS~`Wp9DOZ75SSTK7ACih+W^T$`*y<*Q((eVS7rw$a>FQ_T5 z>uX3r73Jwe*qJJ9*Hky3c|GmSUOsgJG%%{Lq(Z;HpB2-{ws7L4H00ZxS_wK%swQ#5 zaTdw=oj*qf)qZ$82F?Qq6kJ}wYSM8BlYUZoFmm;zrE|+mgtTvR{P^+VAkOJ#*wi%J zb1*ISG-gvCjxY_~^HusYEn`6zbEl=<6`y-$DxBaV?{e$I!Sx)^1NVH380uq1Q{qxzxXtP91T7m z3Tj*90sox`~Ey5gxKG3>Ai}X)v5`s0_5jIU+{wQrUMs&iqsG6<6iY9W%Hm(3y zX@gM7PDxaEf((xuK_f+KaMO*VxAHzbJtEPP!Szuvyh4K6zD!+XI!0&~xtx{837h;3 z&PdFFhpyIU6>m&X@9}K@LK8y_mJvf-zEUU)2}&=x<&;O+Nao^deiFkk9KXUz%%)S5KIf#{^&4FYyN8(jByZ zLJINmVcT}C0@9qp;W+bZw|S$>f|CX2gRtea+Z`a5nr6lp3nwVkzwfGwL)!Pg@4$BV z(voC}G&!*HIs?ZIE8bG6t3_IR;#!e#p09fdvoHg%#?>rwxVSCEPrM<@er9yj#Ppzp z+}JVKtnCP&=j`~Sf?4Y?fe;uNqOX#o@0BXc5~dv|CwY}J41OzeS^fTwZY)h7l5kso zo*9KTjv4MzWca|e;rNCF7S}VX%p*YLfKZ#yS{Z$bFZ&783;pp^ zFHXWU(xwcU_Ms7kaD#i4f7;+!c_kAG(myE?ynb^Z)A;IHoMo`rp5a#*+>~yqFeTr0 zAN#g)CzBMF#YOGUyAT(iI>md?2H%BStX9uN$`=BH@D_>`D`$#!Dy%c z0w&%%6=V=;t55mzylPEoF9m=6c%WXr4%xviw#(b;V@qCnK?f=I#254?CJOnjyPd@9 zN_rnM*Tr|lOLcVk?kEj?H0W@ysj2_g&-~FreYU;zyIogH=38#sI)~aLmDiW33 z-FJ*svE*)4xFxW5_JAq+yVqXuuYX7m%ji8j!#v-KZ~M7$fC*WH)k0laivqv-O0)wW z{Ek64fTkXdnhqT02BVQ5-cI}vE`yh*utR(U8Ra0}SBSZSmv-eD#llGrI!wgTyq2kb z9TT2Pf7nB=Rc;b!;OeZjtbH_aq>`%=LmRYLzL9^6?`Lt_s$WfqeI4+?JWfdv@-^tw2s8G(F94PlkOE&Df8wMy4LL{8P=NW zBD48woFuC4qJ44$pfEsONjr^+*y|ItJML2c@$kGC*iUhTi$=^$1%Z=Y^5&WUp+D8zq6Hs1(bhoN{qhv2szPrG}I3>wa!=}W!e~-KUV}sAy83FUHDha4@6OoK0 zO=zEtq?cEzK#|`1AE;7&>QH4-ILe2h*^G8^&C31|pSATuqLYsI-V@;xy@1P>lTP!$ zvKrv9>gy2ZMne7%?-a$4C%GJ-#na}_=J={FnSh-Rbe0!y{ffOBO7<^gNnl&o3oR;Y zV&oo-Jw}V6D!1R8WH?$1yZoKvED^(-@K|pKN$C{7r=?UsL z{#xCNtrNehg^ORmhP<;#Nh#JHx%@+5x|aVCm~lIvYDcbyxHwzh>s$(ubjsuBY)dH} zf9v`$=mzq=AD-u;Sv^qNe#&{cCQj>h1mZInK702Qp=d0_Xka4yClo6xRP%fKuer~v zn+2)$qru?dptT3`TpjM@FZK}MwQ0>e<$}Hc0wk^DPk@AnUsLpjTG-&>!9tkK@ELlk zF%WEX8LZ8dA=ir4-)f4kP%kUWb>JD4<$eyw{Vw#)bK!5ef|^*@$F_#4e!@c!Z!>3Q zmYE5amR`a$^10q|OTGTIj)BT79fB6Av4Y1Gv;^XgB}Ty2{FGpM!*)7d)ABw_$9DCc zh8her^+;t8j}0~ao#2<0FvaI{MS*rG;XSC!R@E{*u24qCBwvl*&z5y3!a{AaKw1Lc zzjg~apoES9vsmD_!GJ2Z&{2yzN5Ff#7F zKW@F->}+r3ioNLHrfqG(PJ_K?uA@QQ#_oEh7Mggqb|}xhfr-u`6((619%$9W)rivHVk98HDA}3fgOtd@I2E<^o#EZz?>RZ7k0!j+2LzFlC|c4 zFM*Y=tPyQ6=80(P8kZvjFG_7Z_#VAEkAPS1ZQ!qDpqZ;lN#$MP>WSu=Z%FWPTVKK& zQ|4>>$4e$f=49p*c2wOM$nPhUg!$*)ccq30<`@?G@vbZ)v)!+wgP6j@GiVB~Cj$NC zHcB_f@j022EWzcI34iX6G%5C$skAaDPKuoYpVj`KTf{ZpD{rup+cCZEx_BP_Jj2S9 zUto!Sh+9@LF0R?aOsxI$Mj$50~6>Nz@uHwW`eyZh}#1* zOS>v6J$sA$IjQ8Mro7ZOI=?h>zKA`bqNVk69GLkPXx)u=W!SvK*DnRQ0#_Zt6`Gov z^GDp$WaT;>CTtE+A{%9FkKhye!10tM;L@e`6Q$|^{<80Jqa5JoS`NM>4LSGlAl%xd zT`ri@c9p9!ZXmhJU#LA94jZnV8H9X({E69F$HKeBD3D8zQ!*sh;(>|h)pfvimrI(+ zV{N$a-8vHTzi>fU&jOc&QkNenF-NSgQM2j(z{P)~2c4vv?-Qnw)SxK0_)^HaOZhWP z6&xV+;U%u^GijdR={UY(mahm%wg|Y9(7a3pZGuZcZ!p44`LnYijz1h2I(PG*Q%2|H zM`nHD!*QEj3NFQ?LC0LBzck={zD#`uN3Vwb5)Tf3hle+u&nvfhndq8r@oQ_q8S4)L zVx7rbJg(|((R!X&q9Zt9TXLdV_-*#z3w0Tp*PiJ6UO}ew7;h7XhjNW9L20*xH|g3B zID0&F!eaZn%o%EhOWnU3T0D$mFtOcKQ%*VMS2^O3<5m5?mk9{IoUw z;xwPoZT^r5Q2-wk%p{mmqXa9bx(xbT@d~aA)xaSLDN;yC`Z)0pem(p?1jYLM}3es>6|X9U=npM2mNbQu1;XwWFRS=0mWWcgt*gTZLqOs(ThJT3K#sdj>r)1i275ac&6vyJ%f zzMk~`+jJrbfZI;CV8r~aWha;!KlMvK$fnRQ$2_{|q;<6D1uG>YreNLU%dwpe40%@$tc)Mf^rGJcCs^f=ZMXwD{2^yJDl6vngSVF(k?aV5Oan` zJDzf$OyeW-vX#@QgC+BztSt$J(DGRRSFEwAkysUV?=HLop5DS2w-1GBGQIM~#!O(H zxj0+*MdreEw5K*NLd_4wSu#Wk7_6M;JD)3{iPXNQr@zv+zw35ykZs&v+gbW2+_IdL zbux#RF$SGM+}D6Z9aw2={6qip(?rQXSZ^M(c;)0rXli~Hp(O;zX#f@WIKTSDi?jDS#1OVO5RcW8> zgvC!q$To};?&mmWkec3*@brfhEzUVv2pV301vwompe1sB$?^ zEv6s7t$hoD{t?*668;w0Pjr^ga}G}$h%5Ddu%wN_nU9e<|@xp{w&R*BtATbOXWMC@4B-8Bg6L~{v*SC z)jN2=yewJMqu09HBHl{jVf099DKTm&H953}Pyt4vybB(6o;d4m1!atTqjbziEWOaI zTeG7%r_=%VQ`a8uZ|3!%HwvNlLci3exajEU6NZTsT60hF0B4x+=7}Z=HA6u_As(4| zKgzRsq>tC`I8~(Vs;qYUQM>+8Wr?Xf3A3wsv0N$56b5JSF{vi9a`!;`A++JQncUtCCl76?$ zg;k5Hak)b(;b9Ep9;$y^Qj$VZd5{_ep8-ily~t{EsdYa8n<)Rz44dh&0s21`cB=oO zum^X*3-(S zrkG{q_jh^B(^ao&gk~Lq?u&5C-Y^-%GQo!^=WXAdSBw!kOpj*hz)8|ZUZTc9sb3ICAJmX(?}FBHaSmP12)^TzKr_+9T16```H@pk8?W|^x zpOdnw?|$#W+n@q1;P8zi*~(UA{0v4WSz%7*T(DkM?(GI|9<2iIj%LTey$YH!rj|&8+R05zWO~5po{Z<@MUa#Thw7)oS8w3SP`M;DJlv4XYX(K z7n{1;2r#Ek#PlX=<-#JfGy(ifA2sD(JS&d`XsfEiv6A8K4f)H`fliNWYq9G<`23Or z2ZtqL;FNSf;B(~SvZm0aOzgXpVg68AL^S~!;U0UnI0v+|f)i#u4D zz?F{BNAA5!qk)}lc^TtD*28aUGDde|pohHBgP{de&}|g)`w!qS60z$5kM)fHt0mi1 z3a^}R4fwFFxKe7DP*M^xoHx*ux{mbn;^H)P3K!<=%zN7d4X9AVw^nXn z2ae{g+Jr!!#o*)QC&dni5%Jzjvl-GQmF?3*2C#p8;bT6L;5Tzz+2mlUDOftv?oS<< zN{E?HV8~#)_p%xvn7ZOjgb4J&AJQwlLxwz;1nO$(yGF}{ggggVTty+$Qt(u?tvsswi)~HVc3F7|l zjP5_lt&=LxoQU!DnR}PHW>t+=RRD*DF4bA8x2~U&)~o87qC?RN$@;X&zi)C;P=dM8 z`=~jly_1yj6Y7WfS|>f_uSq+j{<3{j5}c% zcFdFk#)tPhqzU3;cZJk#^ATdt*V=f<@IesgOXB&OpF8x-K*!cn>1Mxwg)I4DLE)6X zd%*$=;oUwr>_z(8$H(&B%EjGL$qZ`4r~4fzV+4LFVHr_;pmkL#XJf$SB=biVSDz5Q z8_VORtqJl{6-2(c+S?wr)2GYwLON5W{2bQ`m>W3R9K*IjP;)#3bJe5v^A~h|Myj5p zZ1pe8RK{F{{bV#R39T%QIe#aox1{wK{4qT!+&Mquk0d{N9{5U)k|U2d-~MV$@13`4 zICMN{28e0esNGF`PXsM@o4WcA>r9ucaxhYX)DWK8`l^LAiX zcbVtUnN6gqk8D2D^0Tll85zmeJ;K=m@wc*#$>sOdCT#pd<`;KQ3Vl>O9{|a1G82*~~Pmb8VIh^Fj zVs2x102Q0XYN}WaLrwZ}H*gL(*3N9xYPU0V9@Q{-o}qMuNb5eqQSyj_Kp;Hx`xo)d z$X+XUYy4 zzm%42Nd6M+)=nH@qQ{i-lhlK&F9_#_eAGK$L5W zshiinl(aH64ZCT1Lr2RS(K0AG3NC!YX2uf{I*)VY-=oazdsegt$jw#Ib%LYrdFF|& zWV)_aaEnmNnjMPifirn`xhwm8lS@H9FmZ)paEM`WNUS99{!mBv2_2_DQc=0hovmcL zu5)2OH9Hs@UePddg(2^5;L?}C*ta^`+SRmd$B9f6^VM~+l6hyH2H%}3Dz|te|H!=) zSAq5;91lLM;cTz!+5V#hK@f>!qBxe2NHPKE9OaMg#Pswu`Um>czHYwa*(9N&Sp31| z{u=>*a+c2mbw^iWAbxHZ$9_;94;-?>NCxx`E;H7%=)*9Vfe@1A^(BIs$&h zRXOT@&2i+Yj)VIY1dG*~{YdV+UxZ)Hl&zPc>9-s$hd3Hu(c$+isBcKUv9an)PIFRT z8GYTjsiX5K(0SBq<7(#tnM<;pDUY4RUg@WoaoQY zwq#`FwEepJosQ~lO2%!%ipX9|j(jJDKJ4toNj_dZN2aZ4KN>!;#3a;s-(HTgjSBX> z%)&2>DxUh4hOHIi^wE*eVgaJ;=(h_HUmfA(`P15(yz-?_bW5up)$-@xs;Xwr6uCNi3x__!yNneb5w3)j%{T&>Y5I2l8FhzvYi literal 0 HcmV?d00001 diff --git a/docs/vo/conesearch/images/validator_html_3.png b/docs/vo/conesearch/images/validator_html_3.png new file mode 100644 index 0000000000000000000000000000000000000000..aa3ac256d955b03497e69af7f2e907cf388055b7 GIT binary patch literal 45038 zcmcG$1yEc~v@SZhTY`IVcXtR*fCPfO6WoKl1q~iNfdIi_aJS$N!96%Lxcl4r|D1Df zomX}1-c#=tLou^w@9w?3SFiQ0ufN5ow<>ZNsKlrs5C}s-URn(Vf*k>YV4{%`fg{uA zKh=Rhh^9(%(xB%*KVRF5l7J&9PVzdgAQ0NiKff@bj4VRnAd;JcvJBD+EFP9HRrX_} z9|%MVQjnH>@3{zV3$P>6XnuY^VoI?r*(yyhX*>H09{TDL*5;-`qHXWHKhrmX=Po9* zWFGyC&|5~WQCCg78ijH(c_y*m`jx>246N=VJS%OnH2)DEEn8k$2t(aukukNRJ-i0} zqGooaCK06YAjW}u^g8|%{gWX1v%LIWe_nzkf8R)Aa{qfo@^jCdzu%$_V4ODR7JiuEiLI6S_t@tiee`e>%4{Vn z@>q$SWO>*BY$ttyfHt(h=fwV`r$dou6zAVB(DpdSS)PJrRl_%y=27F9Uh7i-dmH#M z8SNJhcDF9p+~=Et!%1B55fa3IzjJsK;k9tK4Fa*I1oLrYvG0veC%dIw?1St8UjCE3LRsOwmmaLvu zpt~$Z{a~JA5U7?2V=)HNGF<0G3)MnL78*_H8Hd#G>?A-s!Gj^vPkpKZJ0*yc<4%su zKRt{2@5q_LknXTpd3mgz9j*qjdq4jwP%=K)=}$;yABgnB3%SN*Ck&T7(|<7~xOpR} zoaz1>Lin>ivJ1{71LdCvBE3AmaA#W-j>+#+P~`HfWu+_WOuv^Nd-JOx} zs#At-WJyV+A5O?M8@p!?QU%=DjEc1u?|_#1KoVF-&p9P>o`Ca4^M+|L=Nkj6l-z4% zH+Uvjs0C3765`~&-ElCZ&Yh-|u^uSF`ujnx=_%pU%Kh=%XE}saO{4nxeL{!l?Zb&@ zEq3O%pgF{e=Hj~FpH%C-N2a#9+dfwg5c<0kiVFLHE7D5T>n^`IlCWs0Z2x+^&-*em zoDj{TRd{S;u4S7!Izr9*5`>(MESdg3P)I_XwzFf>@uC#e7Z-0Ll@XYs7W?EDi~KC= zUxi_p6O(ylC$;CvP}*Sy(Qy-9wRrNm7+|1de-Q{I;=lcI-$Gxjh85{7VtOcsTpyR?~rRPXlr z;BJg=z@egAy?f?P*$S=yrF)FyL~<~F&x?Pu^gG*XBO#aorIn&kJO}>sDy|+ zgM`Gtpf9-&KqJ;)8m*oIHGRABYSgDd&7EVfI6PvkaQr-f=u`=Ubt zE8EY4pVQS^@cNK}xc@u$as!vPfb#AGZev_YHevh{oQ6in+MT{82txxSr*1Z%64~M- zRo#z%UsvkId#Qoe7k=!T24A?Gk@o_MDOTy&TgePvDY3{=^3D@fdUQ;+YaKrzbtFcV zlpwbeNS###)nNji*>sB&dFd_4(8RWE<(TSetj`clOl z(KqjA6+=2tHcWpMGXz`ROqmV2D_S8oWO7|<(nLR@49m3m748EG<-ADT}dRV4weIqX;W^ATq9jEol| zlW-d9oEJOTgmIkl6LqB>irZ%Nk8UIv%lkpQV{Mv4f!IEL^GC1#7t93z0-b!C9DIg=q0>F@C?5*8N9dGb?3v;=*7Cjlwc zta;s_*fN1$_1x?KMnxsqgwXT&%oI9L`1B|qW6uSB?$5xvHN7rGN?2_-M5iF;^RKj; zXs;uKW5@b}vtc_|ffqATI?d$qV1g7Z7cybVQ|sW++toqFbDUy96l4!0E&fT$4tMhM zzM|FI)p9Y6YT~r=x06t|#vr>^d{6`|p$G@^Go5%Ieg4}DraeC}!9mPqmwe*nM3!x~ z^}@WVXp8dPZYg0NeQawsaHOe$)^|Al^M_cUP!cZJR-w zmrr|ORcWyG{YokLcEtLP&{S%INAZ=~ga^}SrB{AM=kd9p@@w+Bhu@KST2L;%tD$se z3)=Ii6%2cxKg=q8A#IT{a^ODk+hFv4J(MTqC-9IV zQvf$uoLutZ&9ZeG`nnDz$T4KR9z$EyNbQOCE=KK=y&ypNtmaPCi|$iBt9_?hputdH zaWTrNCnLHdsgp2oyGC08&^iF4i5QM%>*FWY#5qx z8cuxTkm3Dcp>nDxndMYA^7$(eXnm@K4F;sEt2Z&d5)w2QBgr&D4jMoGl93_Blp+<~ zQDEmspQPY=+o*QCeZaZ0p!70vK+q%-rz6jpgy#u8xBVh55o0dZ?Y2p##nMLYlseT&yqsX|YUF0&lFGJ?DHz%N>sD9F#~@ z;I%?(C}yOi;Fc;%aOjH zjX}QsZnuO86>+2uC>85bc~m;+NPb}P*TOaH-fhjyJB76Ng+<(+gtuX!4IZ1o*OgJ zU@e||<4-gklAbWZ@>m`IlxPK2CiMG=7r<6S&NAG+??!doyL^1g?cBI>R5JXInS|xq zzqDDagj+)+;SJqJiq#R!8swEXFG@B^cZ?&gT=0Z_%)~S zRM=JF*3>b{*+-7Nd_~2<7CJl>-L3kfE7n7Kx}^E@WpZeMv$)zcBlT}}>-9%XsV#H( zr`V_9eP_~$u?XSGOHTstoP(f%Bnn`)Z@k=qTIY4~*vy+5sAh9v*JgQB;7hiy#OMgRuxOB1*v!(tJ$ z4~D6Rb_sjl@ZX>j$IP_tkFmLWF@7voP*g;g>(SvJx`;3uVvL|f{^MMJ<`7(c-Q6b_ zaCFUk4tok#sR1sC%yO-(u@n7{F}>}Km(^AnKtR&l1TJH8G%`IaHJ^2HEZ?#^D9Rbv zZDRN?zFpEL>YzEkJ=;?=7BWele`0?j*-rVot)$yMv-H5_y=@4BumQ=nUP_%>b^(OT z)41xZU&_?S!jawAZ0XIUVj}2kqGIwdU%daZvo(#$(qpOWU^Ublpo#A>HBi^{?e%M? zO85NpZz?~ynYt;$Iq%l;`x2dZz*byA0K8kQueGqp8h8_Lu~5eF!>I=Jj3_P}ABV3` zq`;1ZMM2JIWX)XE2AE%|&{FkF=Jff5AdB?$ER;R9*RuH$azUe4&|*JZVX6Mte{hst z8>gV^b?;f&!jF~XmM9g_@T5uglbp6mW$t)gkV_6b&9j=HZ1G*%R6y~r(qoQ)u2S~9 zE;b8gdZBYscIL+vv-93XyW$JirLQYbCCO05eY)6tOnxeQYHZTrYL^?)tK98FlM1*- zZ_KoI!T3!7)I%QIG+OIos-;&uURX6%iV}(nK*A;9=$_$t!G#{OrKMm!9d7!&XcHSA z2F#MSY(^iISDZD_c&UWkdt`AC>%vgnw7qblH?~hSf0AHQ>+m7Wwhm1jRX#Cv<8=>S zN>4+Vm78ge&*vrGrLPQ%lB@(R{D+-%>YqKaq6uI5r0~^4Qog1SXtx~SGLWtwda`Y{)G!9fsKB+N}eY6ygOT^h-kl} zkhHFp068OaFr!BEU$s(jq|NS}{GwbrjTlgF;ay9pt9H#MN zL_O|YJLhL#Rp4+ks#4*{`&B-ExXtaW;7pgQLitm>s#918vtNpDpvBnu2GRU~o+JrrC}fkJfnlb)l8}KEE{U@wQj3_o_0njEL*G$$%eN9Hz@cBYSIzmZx|gDn_M5C< z!nk_0UpKYMq=lCx7*&u*m-FCG<+_%)?fL9*XP96;GkYv;?z9&s10uz}tsOsW!cwnF6OK?$wS6sDDP_imlqn35w2JI!#e#{e zB|ZST%iKuMpKVXw9*IMk{Fn}Til|4?t?ZlyF1y5|$0BdND|Y0DEZ8;yvP0fKdnAMG zQ>ipfl7av{(!9{-p~tbe;~)VY;0Opsg-}NbGf$?&$IeBTZCCn8=G^PuYP~Le#ZR5 zNr#hv9Fy^`;i~-5sYzdg%t_3-eXk9~AtDg7Dt_4wIj3@`MJUXX+zrV_xaQo=8Gg9Z zbIb=^{_6cDS)=78NB< zLO^%r5rU_ByV5u2gb%1g=)?e$=G=r0M=su^|3aEYZ@GW&S~-x^qf8S!t}< zYHewOyEfBvsxm8be8ec%chx5Ow639l;Nf$kpX{f*v|k;uK%7{0QAO)prQp81T5$cs zo&Q7guEtEfV9PAUzq)#etza=O#lc)bmWG(q5R|>raAa^Sb0RA0jmtyJdZn{5tvbUJp`?m1{pgSjk=5TX%xoL|di$JEX1Vx+<4jG#26iRESe zNv~u6yf^qh-tdxdNy@?xNOwq!h(KcTS;6br;MCjF-in?q%zgR}6{(ERBkJ6Zh?u7N z$h+lZXB|(kvEkp*ed6xR>Bh|--GVT908eAcBB%$IMnULd;XTh>s< zZ~re{hMcGCuU3<68J}e)FeWlvp8CDxLh3Iy$zRwb0^#9y|B`k{s0{H#rofWAb3 zefX(4t}ovBDh+ViK7KO8Q*;+rkC~g=0-3}(^M|Z=Bb^jI$P{s1&s9j-X;S&3X8Ba59>ljm$AOgD9XtA3tiL>$^_0GOzJ@TTW^5M9S>+P!A1+hTw>gA(2;B z8J%Zk7J@^5^-N83L@oqbm^>@Yc^0Ke#^smEgB9QSJh|+2gjmfkE(sHhFnm-Yzn-g# z4xaKzn`rX5WUsuA{(5o*Y-n76&Mzt&?c`{Koyr#8ZKBA3Y0v!5fARDi!tx~ zilqzS5c#O>XQ=bxfsJo$O^Als>6C|KWX zXLX!|<)=Glt@t+U2XT-4y>%n%&tnB&3*NtGQ^s@?+YDJRd(+`wcQkTcAHU-s7*;V8 zeB+F=jLKIU!r*3KJwbsQ=<2jJ!)A&f{m5b|Y#0-YDcw0+vv*fY@dHk(vcIas3|ETb zGq3!Zm3Ra8Kw@y)VZIZ8^!%kAi4z_&g_1lfKAeh5f&L+*GHo+L=ebo5e=eWJ>%|z&345YXb}(N z^hapIZa#7wQGd?I#q%sWbyYzD&GA^0jfNM5oGS(}VtO+UzkYclR><67Y33zbWCJI^k1<2rzpH z>ix!QYS>=lpQQE2AZSB21bD`CaRiF8?W}Cl@$@*iZOuy%GyiB%FOBvwi zHrS2|UmwQl@$b&Yo_){%VN#7L3nEi1#um6i_??Xjxt&yQ+>hGxax*URWh#R67lOor zdj(HE0VTuINXUsfA#ncE6Jh$L^~d_mK-MPh@?sR(s$>jV)VFOMv(KW05KxDU!@BDVqlDYo?4h;) z3KuZ$x&xw*G0(5~y&tIwZ(bv}9oOot=JJ~;G7-+>NkNX=kVP^DbzZKoK&7Cer8bZA zDbb8ca}R~*uT~E_J;p&){j{~BLus^`y7N9KmjW4lf|E^-4jMmaXmIO7Toz?$=j%4Z z5)*dGanSpzd<8ToOfV@Ff?ZF%a14dFe@N%(T*G0{306H4z(r<#p3p4lfdV~g3T&#t z08?rAybW}aNNZvAGQ!J_A$$Vj{6>WdTMKQ^bIghj>-%eHmsX6$Da-4zMn0zNFG}DK zv;`Fm*pAuC)d@=Klp@S$0=sn$DMR3^xR~Dm&Bj6FxM!v## zI=qX32^42mtnJ{a1cIK(*l|`ruUY;)NFDx?%it(|)pYa#C(1nq5wE41;4qxeqFh2E z??pV)2+`KbInK|1`mZ0DvKmmV17=sjS`afs4s>iG9_CFdi=^LNh`I3+LZX7U&_Anp z)Rx9$OGm}KH_zJRATx4^(3=l^-oVYGO)&EAu|<>^n3k42R?_f75olO zbRh*T>B;eN`Emc*$_6irS^cP(8TAJ5cWZ6*ZS5vP(xh5v!@{SDZ-nnCj@Wx}zbd!}F_v;BP`F4*X-n@1KR}!9cf97r?m$O}%2AP;Iu5|0u#xy@5 zf_j%?7_i@9izkdESqb8TbJfmjO8Z^R)kfl!s-F`m6Y1$EF7>(ZRx!TbTZsRx6R9m- zY+ZbvGrC|m=dl#Ka=}H-b}uaDwEC*^UQesue!&`;_~jDS9{0=uiG@Q{P-_k5yXSn# zA5ps1<--IR3#ZI3bhu!XhIZxf6>t7ek>sjU*=(P+@Z4E-+Fg^dvuhUoCb?FWk|xJj9tcFX^Go@&zEvJwZrY=V3ED+ev5}! zl4lCIA`W!4v*bn68PIgD@`gAH%>1A7oNK5*mRhurlV=r ze@;uirrAJ5I&z5@vzC2jUrg+^^SxAGlk*qBm<xUK^_| z`2dH+@!HZz-%#cH#CfLV&v>O1d)M^IBB_mg(^roM)b=|$47&zY9uF>{9(I|DbJ=JM z1?qm<#>^VwIH@_Xp>`9dK!8Q)!zP8|iNgzS%RY4Ph$V0i+1)f@Ez#HKm8&M5>+HTM zEkPGXzUgxZ7b;(#^txL7bY9crvV7h%HQYC369njATwKZ>CFkxGV%l*f$KOWRjU6bZ z*UL2x7H+bc37l_x}d6XyYS0Qkc;s> zrZ7#%=ReW*7+-7lD_`!_TXf;W<}44hhTOYk;t4#BYTqf5GC0m9JL#;!R@Jw22GFqlJ z(15G$lEG-=R+2-x@3!CaO0dD)mGmzjAIF6$QuhN4A}2CmVSdtzW_iPn1*w695K=e z&r)NDm)I+?q@|vrSpDauJ5dKhxj#Br?O%OYBleclwY{?Ab!or}A977ZwP$tZ(X7Y# zZbPOsK#P-7{ZS8lRawQA*8af>-KPOz@-dYn=Q1zMOqgF1cO~$i97pt-Gcv&s2P(Lz zS-l*_D}tDpEuWQ)6R-GLED1-?Gc7{!!0V{t zFCs->580pc##+c)Smbh=_Di&Qf2tB*Zg}zy<5bFCJyN*AauaVw6HC43yjbS96S%{( zGh*WI*7vmfq`Bdb?I2^pT{%zxf=4;*o;+>4u1h=&M+o=z{YyiEW8AJ;k~yD;o{8G5 zX1p&5g=$O!NaBtz&MP0#WjEzaoyr}99?D_&nTb{^M~UnYqJmFM3b^PeEEm)mXHwaq zL(>l42-He^5Ze?vT3j$i%UMlLeA_qMKCW&P;)QkVR4BwF`-#S|cb(Sp^Yj<7lVuB|#^@cy%L3adO+0lX3Y9CK>XHFdW7#~qp?dS7H(4_(1%^nTUE^j?-TNQEOU1)KDnj_$(`Uw!iglQ9_*}ju19C(wxnK zjgQ^B_IEYipeISV{%QoWmeps{u!~-rtJZ1VRgx>(oO0Qt-Y9 zfpD21+{(;K87h$7zQ4b-?=b z)f2J4-&|H}qAJ}cy8q5q$#vr3Om*Q+Y7oFVG|a)$dff~jTGv@aem>qkzF~?pv)}Ko z5IHQ=Dr50D0A^^=S)$HL9neXTrO&gdi6IaS@g+|bL%gTh*Lc?f*%zlu5FUN#;w2 z)Z;{=8~n&8vu1j9uXx9VW#F--nM#Zf$##V9n|`X$d@B)iZPL()wH+&z9cH2GPqOyQ zE8gNA+3G0IB*%rWe{hiAx`7vBD>c>vf58Ht7#Rp#TaWMn!Z~rU84=2ytm#5wWFz4& zD>zq!2lDN6cTNL|zb=leLC0wKt}pK@X(o_Ny!^hq&SV~i~mV}_l|m?{lRP^DGmu^l%{5?fq! zSQ{VMRT-S-wOPk(2{h{MmpobnxrRtB+^weoJ$ODqXXHTVxW^o=Lz?dZDch@$`k3J&6=DOs*_{_ zHGSEGs0`jxCN~Vw@@Q<09E~&f9BO24@{o|sw?`rhQ~K};*EQtDr+Iie>iA#Z!v{{` zy$x-PKefUqL^P^&uDiYivKJo5!PVElp3Gl*KTz-4lZAKmU^t{W9|Qq#)K{PDnrnNg zm~=7cB(Fc0r5LT{tTxl|8`n~~vx$oKuEh*yPB@Cf%90bF(6@| zEz)q=`m!P-oO5pS@t(u`p~u?qmo-VT0>%b)wN6B4@4)$F8Lf-bF2tGr$|Ht~j}0cn zd7`5uzv*kpR=_}PP|<67X^Zn~#dxh~(&0F8d^hZmCns>1=Ni(hme%`Capyde2Tl!k)NzT5QqOktnc69(9Y$sW`L` zFf~F?U-VHu5zE}9hxbzbhPf6kq&H@S$|TYp%$DK|r@ExY(WuSO#6N~sKR&o1uPVm^ ze9F!|j%bF&L#X+Z^0uHhx(Id35mOJVFu@Pgy zIrafh(-n9uneF$OzjetUyFiH7GF2VaJS|zjC7xRrnyZ*qTlVpoYF6%|Y`repUUmZZ z0$t%ljWYw#N_Y*5&hTK6Rja(g47M*VYAcW44!>?Sb^b^%v?$fPmO9CFyGk9Y(^Hb! zO(1S?w+Eyh+2FEAZo_@D(X?{r~W&~kf#59AYkNcf;mcu&D^$MLAJF+w1*S))f1%r1lN z%jnasf1(t_y)6zHSC3rc&eOz2QT0;p2d0}3(6W2i0g>3whEasc!9V07VlL{d6t?lZ zJ}5ON^Un%>UGmdF3&POV7;4{N)=0n0VS$N33%HJBz$%9pEkqNw)}Y;I$txO;o+=sK|Qe8~pD%c+H(e`%#kCa!McG)Snk37<2u~`;-X8u=v6De>M zUef&h!66r0Dm;a0dHvIoqN&SC#+W)>0{_q|aKjxra zuf#o!AuNM{f`hs9_gLDg2*Y6B6eX(k&cp&Gc`P)ECU8zAPxQhIU@A`GK7M&Q2%pMQ zP0sU^mkL)#=dI$SLxk02k9jbO8m?3Ge-mMg>|y$1z&z9b;7RBl}VR zt>f71(Rb@o!`l%RNPvXz7+_rL{Wa?^+TKv6ChtNUR@M;xX^pOEBsDaEJ;+YJs&%ci zJ-_;f12AAt%TAc*rczGGggzh9qw*v)Gys`{%+jO%j38qv%m*qf<20)PTllQfpmP-b zlA-RJS*Nel@b$%eeu-g1qrsa&ZR1+}^7RfjGFwSd6p1})wPtUAHM75%Aux#2guNz%&}Aw zX}Y%E7T7wbQRE3gx3E>_pModz1ATFEecQ>V)DdWRBKY5=Wp6-M6yuvb%U!48OjC&)R5 zy3hB#TZc6tfs-KwY{rOQ!+VhQ1E!ZU$~4tL>yKnB zkHOLatrn@uYK=S}%cm7>OfTD_NZ7?1-f#Re$lmxBGHi41Uf+6V2v7Hmf5yGq6Fs+I ztRaqt5w)2D@<;42YPJOG=z zu`ox|0BaLsTRvPr0;rFi#Y`TK*486jbh_fip`G(BW^Xm{65^@XwNC0Ut~=|ICQgl! z-fM(WBJX)M_oq5UFG_v_6W%#VWorUTl1YHYOKr0|p2q|=UQ{Z*ZC3}^S830vYG`aW z2yH%d)7xo|r!Xna^%%UxML5IrT3t#YLlte9XlA9eRi0kAz1r~oE%NIF9w7IIC4=$t zlgH6nQs!qBH^{EFP5bsvvFmkKJ6l< z+?Q`;+!;#xV|bM6Q-~qsinE#)3P!3$UVI4#5sF?H)pC~xEg$!RTRCpK&p9&OWz&;O z&wAczeb;eZI8r-C2VFpy#QquE9Sk+GB(Y4RgMjR1;XrQ;)8RqOPs2;27Sk;OuKa4n z$%k&YDI!9Jtkq4AQVq@*MSh^UcvO2Oxmof$5WkIEwjxf^x*_Z`)1Ls`ZU`+ z14^7)f^x(y#Ub{vFBI^HEb!Y~v;`c?4fz1)`%@NYBfdK_06*fHJYa^)cHvj>i6`Mf za#UP|5ca&LHwxb&%lBzj1BIENK@1wHQnxxYD;pgCN$dk_ErV=b+$=B}Bj{NN1G4uAWOOEX4; zEkZ19tn{_{NfEo#nKaR8)Lt~Yf75Agrw}KVGLoXGCreOX)5rY9QfD3@69lCE5rUrw z2+RTfZk?|K^ybSgRYD%a{QdH3>q{CMh2Tb6mV6eFI+7E!B~r3*&|VSo&jrzuHerqL zg+W1c^RQf;Pszibs{}xh18bzv#s=h5w2fbO`$a6pRA?}9Z-axY&HCEaSP855a9S+*8qM2O{UFW1015IzoxqtiS|GGFV$;_Y|S7@=X`awmo1N` z#fBI8kucbzp76HaZP=NVBT2Mrk_Y2UdUm8f(p3j|&7Ul4z*}re{|&ezb#}CT{%Ox6 zGS*b{`FlwTQ7_xCaRSKrB86T5k=4i7fZrPC`cp=bjpaUttc3kDe%i#a-cD!MXwjMV z>dJ=^6O$ZXeY|soqW$NzXickYo}{-Rr7p;pMYuk4{Pm8b7b6qnk;sQESIq zIb-&66Iq6tW7o%qH$wwp;QZwX+wxHxS+{EJ4BnDE0~O>}-nDwLnHW|NF4F(S=u%v@ z-2&Ic1A#b_<<6at@T1PN(Op;hR!q4-htoHArP6W@mSDofvI&nnaX` zfK?!VL-Y&4kv(TY6~${xe21OC2F(BHjj!lDC)_qjUc ztfWo+RCX`=(fhMN(!gCgcGGw@i}3YKwM=BAY}wu>d&=ij<$vL$o%ozb0Le{?ug*BA zuc<1?o`~<%&T+=#)jz0MI}5X>B+p%)nJQNr%x2|}*bHdPtV_ayfB;C*$bsSq^ma~b z^1^2?sO#5?6M}xJeks7JsohxKPwWH8=umI*RH7fwLeO?+tyVEr9pOI#n9hF#U>wI; zM*jfwNRlO!RGRf-^7X>Z^~V%~#TZNf2l%;!%WrXjUU>cz%2ri|qYU-WBMFH>w71~) zw!>}7SqTFwC>YYvlYpA2w>W+DKB_`7Et-#z2?Z5ZTWP^twG%XhbsIybKf6Ru%#W`Cx`>)Xc0+W~65 z5e7G*%KsG+X3jN#6kSCCL{i{sGC`hTAU*@=78|~+>Jz`xQfR}VtuMx@5U=My&xK#w z<4c8ZX9MHVgy89l?b-NJ8};V+lv;$GFRM(IdR+h$08j&9^urRwaeH!Zl%nZRTXO|u z`ang@%a<=t)*$?OBmJ)tj?u_5W}(c62-&rlhSnM3+P)4_Ks|%Bap~!0>EUx7SrP%A z^Hb*empcd4#oF+d77wceoF|T$bCk`GfPNa#j{d1x>RaU%6DPeDfc5WP_TSbPv6Nm# zJNa)AhyIvc=H0}jHowPl7Bet#_z!TPv}pu`x!7mJIs3V`7v?&$Oew+h3{(>i-#0y4 zaGseQbnrocME3Vat5_RFuva1tMSC<+|5*C(d*?56k)4i5IhShBF#2kN1TogdfQ#JR z+zfXWAP)~n!Z85qZ2BIRb`wnw(wiXoG2|pN=LVy)G<6FR>pop&YYT{G6lbfu*Lmj~ zDoZm1Zup1o@8_Be0AZ!8+FzyB3jPqUaZ8slUS@SXYg{qE1C%o zNSSR@<8se^LpKcmB;f-BtV&uejxt5D>k^H?uRiVHHNM56sshU_U|depIGVH#f6ssS zPf@}6YRAa0aao{pO!&=?I~uSwcA`U`F#LNsN95(Q`L13WHM^lGDoXBBUuEsQv6?(m zat~sKfsZFtF#`(M%JpH<4rO`(nl8t_XG*zjb*lM%>6o*7YIq`w-Yyuq&CeP! zyw62_q4~Yy`^YW~AWlx7 zxU7`|1ym17&d{T!SX!mDb%*Dbi3%I!faK%%XCm=O{mq;%p6071fZ#DKQ4R1y5`XFx zS!Lf(S9imJzBKb_)Kjm5ixDnZIj|6CGc?X>rZ4Zxj=O2|&` z^Yaa-w;j(~lcokb8F8>{9XCYxn>v5VH+NU&dhup5Kosd5TTKfjFq1$zpzu>wkDKW> z8u-*S17XMl!K^>#aabGmD`x$b$+-yDwGcld7U9d3AUL=a$CD+dfQmOm3CiD^i>80R z@wiKuPfT|2`0x^|-t4-j1u)#t77JCMA`~mJmmf1*fYOwN8?K;@1QXB9)?a7;PLwo| zo|5=>R&%aD+_FRnp1Z~HR2~K3fvO(XQa4j`S76r=jRp^2c9}`KQn(dPr+T&O-;r)- zIy9pUasxZo+`He|^y*n4?C0=NisL4jb{Ld*3OtTV-CbQF#s@@=`hA0VS}DMvZJHYTxj}xS=!M1F<>95A(6X2|Diit-p|_JSO}r<`;Ec`|G#QPx7ZBrKePaUZdGtZ5Xha^y>e*o;HVPb7!M}W$^;P1MH=hOp5^pf zDt$7CRZ3-R`exsrD0Cq*1D9$6fT4w548+TFw7Eip=tH)8@JJd4J|rz_`y+Ln;LrFG zlltkee@ULcLua4Pa}gl3Zy1c0=TXzh5UHf{5PH5#EI|G47}Uv!_n?T!Qrg`h8Rlzp zlv95nyOzqLf!_w+M&km?r+E*}#p3-8&t2G(Ss%5BiWyL^IvOc}`VlM*ja}B<|Hc7< zKtRnR;0}Q@2Y=f5)$BHqG3J;3Add63=t;4F0gu^#A;kiwb-_3*Pf99x6)VWUfO7{3 zqLfCT9S7Q5r*?OG5Xc*tb7KQ+9pYNdPCQ-Ij)Wfab4#XovYk!D0=dTDN?1f}F_45}kJ<2%)*3nUJccw7CROvz zFU7fl>`8)*a+zhsR;)Zz7w4A7=ZshgCpaK~&FM$(QBa}HpBTCOau-L4ynCG_1jzPF zN{Hc1$VLLji(FN4`#z>&A*~5C8jr6qT)I|x{Pyj3f{NIRBHN)YfA+gIC5FQ@ShYQA`1`{lOP}GyoHNGmg=X9e|>qfGl&CD17o=MDY0K;o-Dk z+;kIO)|26$D6p?MSFulq^>J?*RmJLnsO_=K<8iUhw%MSmHUvn3xaFNT|vOU5J$l?^$$qyv$%nyezsBSxLNbqFQeh&qKf>AD$-%rVW6UP zKQ=aYVS8z@0%(f+?Cz}WsCBfM8$F+YZMtk}GPeRE)WZ=T2ReUtH<&AG&z#CU{xF;F zE&JzAZs$ht0CLZ2zw-W*Lw2#%MWGSzzWSQ-0ytLDU2i1S%>YnXbyOZ;M!W{N14sRp z9|2F#VhtMC<7T;#*isVrxXlZo-n?e2o~cN~UvpdJep_e7t5p3@+nXDh=Q%cuAsS>% zlRk`kifNaJ8x8LLOc7(hxbLFxjcUiB0q#JbJ6jhWvQJhWBDTo|Ig{e?qk|`h9UbNT zD>KR2Rw`}yNq(s|nVKJT2WyywnZI_Zd03WqG<-0c5wVhM=skG*!RJSzf4Ud(!L7da;pxa7*M8{UKz4Sq4-7sp4Bw5^cf}0RjK4qTMnV zM>Jh?!~TqFB7sbFL(}PPkw&Dg^aT{0W>nD3YN-T-A|N!unH|Rgq$_|}0*y=@#}}^R z4)?qp(EysYoob26s`#PI?kOk^=}%qzK`pBCw9OQG*}>aStA~qfKwurCk2wR%49mIo z{^C?bkhEBA;HE(1Y%}FBS8XY=NQ2kTV)nES*q(*%o@Ivb@OwJ*cnNeQ{0Vho!L%#R zA-X;Vb?G&0WNutf+n7O8EmO5YDWAcpD?pewWwAqU_mawoO#5@Sp=K(y1K^nVObFHQ zq`$6MBRMpX0QK^?3y-g4zC@1}PK_Jf=h|9I(}*Uu{s%v`(1ff)BOa{IC3On1Y`Y&k zDzU_H#DaK394q(z0{EUDu8DDW-IFy|4LP zi>|bdMSfKUsnXroBzk#;%aw$!9h7t5{#sptRYv76I?R68n4o8s;eC4O13Rn zB@%bmYZXXWvjt+=S9wUVV1NQMd+-*7j=xvbNb*)1FxPCy?2KjpvJQRUSMXYDL(p!s zk**4~e9B;QVgGtOi;iuw1zzD4)+bgmYEKa$b+?C3V8WR~edv6euhTWW2G)m;87qSLbMlq@rBGcLlv-pQ{s+kLKs(@y z9IXZk^ji9R;e^yUi|ac)FtUKRw>q20>ihMrzVF&BbCzHGh<_flB@pZlP;s5NV)ITdX+8?k0z}EW<-9ehHI%5WlbULChXhOwu_wt@T zv~wnJj>CRMLx!)n#EpVa(;{}nGLyGY%B{Q;e8QH8AnmJNm5^^fmoVv?njR_WBB=ff zbdsDa(Y$n4wTkW2sIOo4o12f<)R$PE{7A^lkSki%rM517sCmj{F4HB^Vft_qf)!YC z@@1oskoHzNVD{eewe5}J@_!>%pJE@1XW3(MuQmKoG`k;a?OtnN?d@;&mI2z=mfoGd ztg5=FOd+{u%<}28ls7nJ*kWl)!v_k1B?Q(O-}|hdo2TjK?qrT|^gc6wMuNR$gaA0% zeW|x!g+e>LB9~Y&eIX#e^>fSL#hNY2|4238&^5GQFtn!MY@|3Y?OEcoI_36yZMY`KQ@KJYqa?fKmuX?T`?Qx5fmAD85%sCphohjx+*8HC>%71fR%73^nR9ht8 zz9j21wJTJ0C$ft$vV${bUW4%>;e3Y?-%9{d;Jy?+7# zuglfTlEY;ltVEHCs29n32<%m)1CDyP3|B|#y*X(v zwU;$cH0SNs?!A0=RyI?acy(yB@PPT!=(;S$5{mIL5A@BnWuCdE#M!O-tt4$;<-v8( zeiv%o)aKJQid>ND&n(jU?xteAtj4J} zW2CM_UvUTLpXltv{GpQB``66aT`iCNjznl07PdZgEa`RP+`rGN_w)()YPhm_iFzOY zg|zA>^~(hW%jXP^#yC1K!J-_(f-k_vA-E&!LfPHxyedsA0N~q%1f-45?8Zr|ycZ>GEo1g1_lN zfYKL*zORH-Zidev>52mldF0IgMRxILK0&y5{qj3BVqJ58oJ$jZ*xu%_CtD*YQ=?ns z@cA1`CQn=2ffH*F`HN^Pk>?AjH>@8k*+kpOStOfPR3!T6iC^SC5U&u{lW#-6#Ldo4 z(@y)imp~DM@@C!&0a8tB>c`lNBidFm&#LVO>6u%({N|Q?e_|*XN9&Om_<7^Fd6+0o z#Q3!85UfdLDaZER@T8waqnk}-Q}m!3WhSuLtVnSG(hZv8|QehV?`>Fhz$f7mg+ z^dKix@#a1mR$5RNOa-$IHr628(rjKNQ@8cklS$?jxvqvUys)O=-FYhuPm3zS=!WUg z^eO@AS&0fk?2COUR`zQB_TI1{yL#WLl%on?;gG$gkmrK#)uPs#f5(zHZ)O&_4D+s6 z&t60$Qy_2Z7D}BZ{kodxzO9)4R%Y&pNsFj}?BDVB6RzuGO8;d`NS5BZ*N3pf2?4c) zLy@eT3s-?bc{OSe7Dbw|>PYwA^s*#Na;!l6O(_dEAARZ_uQK zJ$CPn(PR-6Os{TrZ%sX}qmt&CwSTxnGgDzV{_=AwlY#$y-}#BHffuAaZYpfe$n!De z*qdJvpZW9j8ogV~150b|x15rtRXezMPRE^i$Q?v=o>98f^;JPZk%@oN#!rltoa{52 z))qO580qF6;Y9h^0bJW_HyknDyX|NGWGuavcegMYM0Eqh-hElJ>2fLC^ba|Lrb~@x zK2?v`DDSL8Ie27?e49@E`en2~{Kh%4n)Jn=JNk^)^Vt`yA#m3|E*B#ouTGdkfK&{t z5BR)aseOq&h?d#)`t@xCZ&8$zX7D{o*l~AR0 zg9G`@Z#V4f15i{_tX1i`LhP_&yWsGYP`YDXU(Lck&y9 zsy#Y#EE0amS;ub9%FHz%%!GFgS!EpQ`^2qzS-IM7sXX?gJwZ$5j@Ou=gW%QPM9Rlqh{i9uS6r! z`n-%XHE3&?AtdDEIKPE-^y2)w^BQDZe4etfd@3$To;{SjA_YF&Ni z1@+!h(dnhv^Gbdhp2&uH$DNgYogps^a4zCrJ`PTwCo>>B?#r-TJ+upUj8!Xn6R@Al z(y)+%rIf&|n|_>Fk0{Zy#e(L9YI86eN~ioFqK-V!F#w+_p~X(3KA%KPOpIuh?|SuZ zag#N*Kh8SubX;|DG7e%&4>yAH3!*!MCke{yJktmi$XROHCN9~A?I~`hmCU*=B-s4N z5Ef4DHE9u7tZf&V8iem4>R z8$pCD@R$E?qWW#G;IDo4PJ+J{guo7e?LLyxfBwh+4LO>sLNYi$@zt79 zhs-@3QDJn}vNZ=<5?{llfP2E?;%Aa=Bp88u6^X3}&nNgYXnTB(dGN3@e`sS#M?d!W z&i~@)6N-eAOD9c_DkSeQU-?Sp*WvL3LMuinb$ytR0DX=UzUZfYE0z1rZ{A-pKEzDc zwN9_gFxWp53DWm5;ozlQKp1;HN!VR9t!ZGv;FUuXZx~-ZlNK*nWe>9W)h**8QPz?f zg`1Yo^TCUOEl{5e+$7x&awMB&?$<3VLJ8&Fr z>B2pwGuLNup1~s6y5APh z{bp!kHnM#Xt>QKS2JTYojjhuVDf(mKhC1>V%NE~DKaEC)Nxb)UuAu#kS9m*D5k&Py z<-kCvN8i5Fwq%n5+IsP&%xn}F5a%e6Nw!D{uiaVb`oW>#bY5~l=;5R`(1Cayb1q`9 z8-C0wKGJ^*bvNQo*AQIe05WLV_3P+c5}*fb>;+~CdS^bPqt5^sWLBY*lW*gAriu}jYjkb|z@Ipa3Uxj0mcEDgKQwhR{@0 zSogm}avqqfKH>xWJlPaGXJl4T6g7>hP9j1iTS)^Bn-TYntVKMy7yg{J{sCDPl@m*e z%UfgAYojZ}9+^3NN8p(n-crW?Cjt(v8&!Gz+)#^vzZTRYHY$B$Gc?C#5(V~>05)R^CNTJ*NR;Y)J$Lf!M>(_hA&4HI`0xv=Lw zdEO@`Cajk){EN>6kUDaDUdTK4-m)_WFfEiy|AX^?l`8hsgp!sWt+p!_uW z$Bfa0E|v6`>GUcSq2Y+QBGoS|4ixVN6WP9`mkzA<^iCyuHaXb49qlw;td4HKH@eV5 zH*%4l6oS_!)VduhuhRz}o?;dpsE(qXWEa=;LS&Hy4_V4ON*)v06i?>s-1R6$8$lxQ z(Z1F6>C9S0NJs}pbPpSd#%s~-ovYyd_yY4kK&}^y5xhrv>VqlG*1>J zhEBa^6pprtImYtkR@Obs9--;*m1pdf(qg0^2>E48MTn*;*iyWwpi7i9ozbmPd0=!Qtwou+MUDHveyOYEE{we>IIhrm4IhyzrBMlJ1go1G8nl?4iVKu zA)3KAb!+z*e;V8JOdt#wAtpZ+8Kt@#cpSwp8?$J)SthVw7;@2szDM$LJ9D zCD2DcKcO#%>dtPsRW;Mf4m^KOl~xC{~Cb z%^oNxbM8l`PW)BwOvqJD07sv7e22w=Zk&#p_!~jIKrR(Ux1e8$J)Jx?`Zs#m6synY zH^mU0@!paoX*}Kb#X$rJ%~!T^>D-G{9&7(6ed+bqOF8@{^?bA*h=L-8_mj3LiBz|9 zd0S+m5HWO*XGYRWm;<5rQ_-Ui-!6{n%4LF4&mSj9Cp>E^`$TQz{)}o?etxpObK5Dy>4Wy zmz+*&U5VCIleg*&SM=3FyVV`LV&`9uM+xp!n@{nu+AI5_PdsO#b65qhdrZlNb+LKN zjU}OpwlQ(;NAQ8Hnghv5MPj_RFek+hB77SN>uTr#ZT@j-*ZnKO4^cztmxCwSu)&?K ze)blU>j$)=Gmp$bN zVCL4lp5f8YX7z8u$Xs)f@8Ru&gED8QHj-cnGbf}-wf*Q5+Cjnju`G)fB7sn|334AP z4c`!$-~ETAaJRhFB2Qy>G)VIr>mG%dasK>CK#-{=BQ&$=))Tld)Pbow=Hqp@jdg2) z2EnV1@I+MwpVQ48cr(D+^ASLxT$c}vcXDzRZPEQe`_RNivo%=aKbp8S_kB9&ge}TF zQ%QBP1W_ByA#0AMxlK;VOx$iW&t{@%DiQj)A?H$SkzSoMQPR}grM~BLr7_w(4*NI& zCNTE8LV2R<<=@luUuvIQ9s1LNa*#{R;zfvDtAc1giAXc!vu5UP=)HG`+;`2-_2bDj z#DeP)Kovof*+C0I+1KrMlyHyF{$vu;k8dmJrCRlf3xgh)v`6(f)n!}HN2@ZNpB(Y; zitv+lFAg1}`2skP>;!hIuW2ploEm2hb7%LRI%EAx^V7u4;^j-qb446V3XewXBJ3Vc zlq=mIkuBKAvkchoY>0PA)jS9_;#D#^T@rGcje>^0XP*k{e7f9Ag z8wjL*Xi5%Q1fF8kt*jX(0+8}h&uj5GMymlkI7fgn* zlis|Tiqdj9A?qw~VN=Lwiz9QaPP5i5KATcO6Bs z-AOfmv}!8i-GvQ)RGe>E+f)*KF(#y2wsx^mn1lUv(?9t%%6j^UvudYf$wtoO$s)Gi zA~_P_`8x=l)e!h4sweMlECi14S(LYn#a-G8ghxNx{8pqz%E3743q|!w`F@m8!eAAC zJ^uFm1tdB=RB*&H~8$L^64r6yb$PU-n6Mspv{p|&yjkQJLbgRNlHbxDdzWKyd zPU)RK>N+%KpN9sp`7wRHASz_)I7U_yr)A~)Ge9!P&*--3%3W$kQ>E@a4?V@*QLi_n z2D`hmidq|8Uw*C-f=s=%=(1-Q!hyI7lR-lWer(&v3Frpb)|HkMQ%+8w1|wgY@ugz( zQk2EtZ^2ugmItd)hDwht96`3fZ9{Ic+uWyHKv=5rJ`QgW3HpsdP$u(-MK@9GL_>`a zU?9va6O&{Er+v9e%~7=TLVhrNLjjkumvQRntnYa^kFS^fJ$hE-fruQZWtgJ2WgZ@2j`YBX*WIX`URUX1;}+KJT`?YO{>$E=+7d zmf|gQg;-+&5%vPJ`}*#V+`Uq;u?`I%;E!IpGR+k}3i#yOfKhX0wnS^+R-prpejI`V zfHRynqYMhy3EvcY#h$69JTzG}sN2N`k@#jtR!b#qW9^U)OS}=u-BEw!uIZt%;^ma~ zONo1hDBOerX&}vAqSXAfh9*Cklq;0=^qbiH;BfFB~O_vjf48827b4cCx#z7n|Qmk$o zu3YRFP@4uAAOeld6_S`-dX)26p!8=?YiUl-zz+MX8r<~mU#VW zDZp$z_#PZnPZIURP}3%M5_qdrbd|t-6p`}7R(4eV3f>(Yz}`A{L*OI*I@TGbDgykM z{xP`!Z)WrV!^HpphYtAXyQWivSxY)vJ=^(lw<|y&@2JpEAjpK)VNNJK2A@=Ncy{ReIp6HP$9?%pdk^U zVeqQ9O@H{Nz`==qR`-}#5rS_rI~mw^$u(wSBDR)td61QW&tb76uIKRxF@lR^Y9fg% zO^X^4HMEn>x?Kzq5V}~N&-W|=-xRQBD?iZPINPtn9O^;#%)kE9_$Tobe;^2e8SM!G z;*Jd=Ss5oKs-RmSs;IcEFnyfnY+lMm{X>8E&nLTVOkU`?JlMRBRkf6;%HeDmR1S^SVEExI9p#Io*qCbfFF z&peB7zF7HmiOKdS(X|y`2gyx^gIL`DaTrR1wu%Z&R5|C?iw+ID=N<}yP~*3%ZL-3I zS1@H2l~1hrC#_}O;Pe^8e4MT?c@&Vpo%L=DV}80gF}kZo()Pjxxnj>?Utpzi>(X&B ziHwV5Hf{YBxo>6H&f_&*e~|Uhn6$r12N~C+VnCC)7v9^x^9~;}(X4>B=EVaJi0b=$ zF%=bjuz_@?z{NeT2f=MC4u;xXfgPwTg^1LJ`i(z)+wJd%d2wT1ncY!GMV1e|f9{7S z3Q;O(J6)s4;U@){;^Qts-67)F{GT7UCwvd1`|~vbaS!GO@I;YMZkbG`nm}g=()akK zU7v?HR_iUeNwAN0TpLHV{N7t|LE=xeM)R(J)liV>WJ$VRNBJ2sbO6W}ay@aGm zNelIt8Wb@03Twdp{kk#)oJoA0W`>sX;@oBcZ918BxK_kF73Pa{DI-JZk?a5!G$}%@ z+z{wPFF9I9f+n~%*;};WoK=IQ!q>oh~YFPx2nRi=UU zhe>-UqpxQ9EKOXRCFGUSExcUaR1Kj;x@(%oMmq`CP;OL_m+$<)WTp-Vc3PmZgW?1R zK_u=qX4%@S5o_lL8N}80%=R$dvcHvr{~|Efp=Qgtwj7*X_^}B9=!jRe9j^2ORVXs@ z_SE(eT}?~ke9LBHV|r4W@X;fwUr>YcX=;CFiTf}ntm(IYcCr%JA26=W9=$o;u=b9) z_aJMD6(Y=$^|-v{UgJcq-g*Z*Rx<%DE%P)SnKj5cEv-jo(?LI_ZKW#EyyltU8ycEY zu$i6sZ@OWEGJNB;pkna4I$ows9g`XNiK0*2fN+Dpv=TIQBuSYQ9)ddINynD+5wtKvb|MQd< zjz&_5-ekJ=r+MvrDk`A#5$jleyVRn~8L%^LwHQfdfCgv@Y0D9C$jaOy>+0%S^qfkw zpI$S|7TxSCh8=Dk-2q+8p?NmWes*;hYh$wk$8m5n^W+7?FHSZz~~&8EeB zgJHywjf8jt^3GHC{>Uyj5KM2*)H;&K&Bi%oN8#iM#H1th?eYl$DwBz!q2U{@5mM_s z039ul2D!{nygp50*k&55yKPMx*PLlGo9xx;3}Nkf>hjH=<~lWP7+5+`<*7_Bj65}n zc>^1iGX)$=_QF>Vujye8m!vUb=ccl>%ViWx!NhS#{aG6mb)eKfwUnto36N(pIUPSO zp5>W4c(B8LxnrVZ$uW6w3Uh)^oWIP@Gfj3n^{YyD%jPri_^MwKdq^E&J~4=~0S)%( z@3GEtDIx`rq#+rgtg2ia%WrwP!@LopW^$Vf)2iJVP%*Zz5o>UjK6a_Hr(WRgUA;__ z^~f_1bhuzVzDyeU#!X37c64#N6?WCnTdHxjGP`D6>*tm|d(GbzzVlJWZTz-Wr)qX5 z*{cq^;~81aHzyd%7N~)@aEVd#0-6kg7)+ihPRR;QzjG(8{DZ-u2?j579{?&=@+$b$ zcO+`^J7FVl#5W=Q_vudh*JdOJ2Fv9M(|X#O@)bC$KJiDF`6B=t6*CyENo1^IlOfAv zKikdEEEuC>fb%RH_M(_9TzPd1?t^sqw(n*f&B;N8CmXf*_l}K!&?v?yt*{>^NeD6I zc;zv8oo!gb7&`TRWYcBoc37ZmQ5w_i-(v?Ix>MutX7F%MUJ;Lf*$Wi_ya3+JpQ8JsFZq7G znY@T^9&%@@2E_S3|E9*r>e2Dg=|>h^hPpw#y`24DYXd%eg3Gh*#P^|vFuBe%ZO`*} z;gY{}orhkZTq~IcCXJGg$ckWSV`N(ClB$+y1T0F4kO+W1a>V@pI_f)##_4SVa#jSL zh+yr3M-a3Qe+SCOWh7DMH>sa7^LIbU3g)xDB@avu~JyKnh|Pu|5G z55Mb{BwAw5vMC;wC)~rL;awp1F5E0Kb|7y=2j8L&O5xP!B*>&G3||aDex&qb^vGn+ zL~dm70P!x|C0!c9=6+?%eu%zLp{`2i(hBM)TzVVa^{pKl()z@g0V}wISZM29_viRqkiN@dMZ%%uU&hleCFrN?zqva;{3xDt0=z%nL1+ zy33v=AyP@0%n7C)`fpW=>(YI;qBd3#IvKd54D*7ZP|O~I;9Ym#@^`cCeSmuurjNq8r1u{vm3vQ*aXa+gN%wL`&RM;! z2{<@%q@M8f8%rQVc;-DxCh%21Yh5zy_X@1g=_d5WMgD<}1GpLTfm1cc1pYYcHAeUr z(f7J<8C))&bN+^szg|n*CEbe%cU@d{=oP*nhR<>>tMh`3!HnEYtmAgm+c*Vxo|NgU zHRRcqD~?TdW^EG^_&Z}Yu9yQdEsporM`?k=&qE^F`{)@Rfw!1!;J)&pyy7m?;O2Y4 z`7~&QznF7Hb?h|-WwTR+X>n>{*FWyHQ|(4NCO7tP#WLn>NVO?k#+#9^-D1d{GE@&O zhM(lq7(=4)G7kM1V_Z;*Nem;ZDS0GvFPz4$kN9tP*TDBx3-=ty6kZp!oh8HD#_2YLX@>#uk)fVQZs&^ln$T3?C+?qGQ+ zbPn4XE03|pi3JzN6|(BrPdE9x_G)f!*WpS-zap`VFLZ7Wm^a951Z=vm_0<&d;9?!1 zTwdhXpY_94kIp*keBH=}XS*wvqs_q{={s`Ogq~mNGjj8G(zwpk_gC;3f&5_*-Vf%= zeZkdcC`_Pc=!-P1(E33N$Xld)7 zd-F*$PRt2uyRzFy7H3I2R$Lz8+_;$+U+9S7a9;c=549g$f&&`dvuXt%EBxBo;=iABpI2RFQy?1pGuzRj=0o^j9 zxY(U^F2!cbXP~nNq#2-dvkKg8)lH?7NjQkJ(D{}OSYP!# zQ^4X(#=?0!m|-2vm*DOiz-eHl_pVzu*dIAn`{jShUV!a9T_6hv;_m)i5E#Bwtc{s! zf|g#S@nHJ!^?R0mjklD2HRCl9?_q1HO!u<3#_5#mlHGb<07TbMe$DdJ%*PrDK>)TF z8RdX2q|9W+4%YPtrn3vIKOZ$fM)$sPJcwgZTfC~5CpQ$Fgq|G0{?^9ad%4v-x|VI< zbGxxVAok(S{Gz-hlEM4&*eKnKihlt{5B((Wz!)-=tBKGp>+#iUHI3juAt#IWhlMfy z?M*_-mSHL8zwqxEP2Z(nRw^LqP6j8lH^%e83B%4ZQFPq;ZK|Kc+Nx-c zpN3a~TMJf+)EkPjsA2c=B<|_Q4Kba-$c9l~`f3y+dleu*>N-DJ?ImwbjCIc(!m))> zbYSA1%j)8GKRQ~bE)p0mX5!@*iD@;dcoKjHx35>@yS%!$LW72r;Up$D+F zFEj*7J<-fnuHPHoZznj;B_@j7Jm}AF+WqSe;*& zyGqttQG#~p-ZP)Tp9s5PeAnvHI%j-5UPYcd{^QY{v>`M}{>scv$%_c<-lGjI!p|9T zad9i!%tjtIMtp=TDcM4OOm6xM4jZd;#z)^|^jpRG?gr-7JrnncaA#**<9yeCHC?=% zw{;ZU61#UoonUXzIt#Z$mE1U=B+pq*UN^$r7pNML4!WoVh#w)nv}i-+-q(9x&Uivy z-zUQK4m#b{-Tk&|8iRHw@#bU|W@k1t*^K}R=4`U0u9YaeOzo+Abz9SN)&4801}uAn zuOoHifjxyUVOyhyNilexh!JdotU?VLmG3?c?vBox@J{)Ei__F~ls8PUYPmvjLOnJm z1bW@p922mQ#MBP<89&(rp!22@SbU;&%%4>bd*?P zDnf&gBvi($x!u(JJmX6TY0?>vFWDyRT^eWFTV}A<{N%nB8TvB^T(n3#p#6*u6Akb3 z&NEtcpxIxpX z9PkN~_5jajG`=2O1Y;N&z_ieHUSM>M)EH1)A$YIZV*gaOy?bo{5orEC%4)R69_8#7 zJg*D-)hFD-gBML)P{FqoX#MY$PdlT{bso$<%n}63rc)y+@E-MRdVP0pb?^{8(8}AA z5&V6ZCxN1|c{=`fJXrUWQSUs}|w=LAMAFN=LP&vpG~+3q^sm@iOSF!~vI52&_93p*$}h6TS(f7vjACO1w<+gj`@{8kCBRB#WLF z^~3&N(J>mx7_1iSUKy9LBHpLMkX`mU%=tk8cNP3|^c171K65`kyAaoXnTl<>SDoA3 z^2^?e=`~;C^x?0EFQ^}g>IMu=kKd8++|m6K1&lHao_{ln7f<-RnFoxmpv_qJU%*rz zLl&$zIc1$Iyfs?_BvZa+lsD);z(S#L(fblsQ`M5iKW|F{DgRq7v$>SA&F7WN1Y{@h zL5`cHCD8X3>e$gjlhWtYWAWJw(@WXn6MAD4zM9)<#P{}TozQ_nSl9IH)4``eJK*Ud zF_pq9f);()qDZKiaBjIGqIBScp?i_a5ox0y7d-ea(q*rNZ;Vs_TU|U9sn2g(q+h*>QW>fD+I{SaoA=KEhdHvohdDh6n1;+evGanpd|KxO`Lm1GRMGD5 z0B_(l6U%POz&|+b9Uu7M776z0A#7Jann-tW{sLKR5AV;pFjlC)lIg>xeDV_54;c0| zgQraHDiwH!qyJ0xLmAE@x^~CWbl3dcgM5FEi{vfS`tk?EU9@4+(m(KusGV&%%Tup0 zkI&hP5M=eJG+GfoqVm}tBO!<6I+xM2H)#(sr)0L*xAqJShb;dIvdU88 zM8)gw{Yy@az5Aa=rhhV*ziq_-e<;uY2M5$#HiG?M)+g(#028#@osmxoqLBo^=%is( zUoll@B>iJ>LZDn8drI}iB%tksYU-{6gVtOi<0v?ZF z?dFAxk^rMPAEVLiylMN4{zPxuNG%|x-;U;iNl(+J;z>tYd-rcV;(9hR04(RwOOvtODjNuXo!o18=c;nnkrh+em zxJ8$1B%*uyI&GwCK^Gzlw z-Tr}t1C2VsRUUPmc(L=U#Y1qxiI9ZYAG|;C3N5QkshT2!iwhKM*W_q_aCDVsloZHZ zan^`=d0P}Il_}$nEvvjb6dses?vSBFC(N9YF^;49bf6Y$KV-qix@UC#=CvXo(BmTW z)KbFM%)FFCD~b#VFO1+6Z;<@g;jyXL3OY{`D1oL6lnf~Va0PE>yT4m!+2Svibxfo6 z0TUZOzTYjY3LINx|}_CTmUWo0GB9ol2urE0Jo9=126uMwIjjqLUYA4Em8pHk6k&oILwR8 z5g^$M5uz75z|7BG#HFO7B41_FK*km)psc;dkkT+vvzHvltSHn|GsXd&Pgn&1&(0^w zhUI>|E5STC5c`e7DC8W?-i(($tr7DYBX5Dqn_1VG&9to1)C^m5mRQW?a6K3O+Ns4R@8nj@^ufcdfxeN{$lps&=P#yirV>3!euI+B zV6qzmN%5@we(vi0#~Cfi+#UOp8YCaWz(|XdyX>%OpBVVTyG_pLoAYH*m3dm*jVpe? zEaDe;F4Rj`p99;rR=0F|hol>WXA8TkY5C6o(N@b{Y0^cJBdaJ_LyNo)duNLd+)*FapCA zN5x5>y@NsrodChjUb1E}LrX0iQ%UeeL@1uc4ivf;3+xWtLo zySv5q!!R9gtR|<&Ta#*5MdJJJIA*V)PjRMjznN(YJ`j=2dWI|V;wi8s5#6%$xc;IY zyMs^QOMrpJ%a*(0^26`^H3@LH3(J=Yl4cLND$=}ueN*{B>UMBA=lMwJ_!_1q+EvMg z#BkAY)65IJ;MBo0(MvtSZibwJ5zl|Vcm(i_njjsjj69Eg6Sq->M%z)VPoC>dX zo`FCtyJsqIZP*(dEjy*1u2-rYL%>TTeLkDBH@if)}yV-(CQpKr%>P-Vrhc7$df4s=%)m zzp29D`u<`0Hm*r4hFHyZMOV$PkgA(kKmpY|1OovNA%b!FM=dC7*fh6OWgnXoQF)*4 z*!&m9-)o_9zZ<_1vDFC7 zOFSX!DAhP%qJZC{1`FZs7a6#7dydTC-9z*d+=59xU>+764cWinlT7aWV>o7FYyXRv zx|?X48SsSyBo@@~7Y}xpP-1SG6xajt7QMX|?#g|4^Fh$wrQ$2w>)=)Zkj^C)Z$7^UTTt-z9d7>WY))HMKnw~P}yirq1( zT{$rZm(wL?ty!8(2D`@_-`w8l!FzcxL2@N#A0tdGeOM{`?Qp8WxA-~j#D=@*iQ0(DA*iSg@)FW&(#AdqpPhGA}NnBwB$ zKnD^zm9%JgMFh1*i$CyJvw*97=>{HH5Kr>h;mj%fphi;b`lr6!)=m#~WySTBdt#Ab zoH+*2BXK%Mv$vvp?;BicaX$tV9w-jdb;qdQZRrpobkT%Qh3W&{oTgyl_3z-8!7*Do zjlzD~R^R1gNOjX3BH;9XC$Mx?4dr9A{}al%tl6%9%cC-cCVcfQy~l)nio9sU6WvpX zb25D@_vnqzRtF|+Lf1v z_n2H_tRn$cc;}o2-J%G2q7+508fkExG1$U|o}ci%xcvU=%;p%BZj$Vb z>fasz(9##z`eoIBIIJucd)8)G#ejqjE)OdrvhZ!gc~2a}KI}(+BpY;V@bdgG4e-#t zWhJFZaQT1ek1t%=<+&fE?|H__zkb5|P-1en<)u51+?{RD?{yKaHkb;%RUW{ z7;yHWpTr9b4{wLrY?eq76hf1x0eN5OtOlfX5a0ihkbgB&x(QdY#L^T_G%MF0&(+r} z?C4GC{+D5F^`1@8?mIACWg`|djB*H%dM=Gtr=M}|ER3eWecce?nl|GHbK6j#Wy8+)6nLHK7QF7mI8z!U?G>CW(I~5NVWKF{r>lW*venM#X5&f z3$2{}&BG-s{BNRSZ!}A+O;fyTHAXpPi*Ci5L86p?tLYVZ0|0AATb`=bkFD(k&D1e~ z(?Ae&+IqqIjL73;8_k&WOww6t+IV#oc)iDT!H1LN{Yyr5nGDr{Q#6tntLxZRz3Efj z+6^ve)BK0gQ_RCy9hn<&ss_0C`&{m6?1K$oKW|*RXECq1d3;~_q1EREAg*}Pt#-3f z-7M#=mCyor@UC!^t#oErm-mB5D$$mV{@y; z<@dat$v2_Jm?LP$wMdYR7p)sFm<+jPxXA;4b=sKEijEoobXQavPgJsPywqqAguX8w z+UnI>Tir23`!C1Y)ze|pAZ0cT&VPi#bq( zF3Zg)sPT#RiSPZ`N7Cwhy2HBl6PP#p51Xw>-tP#H1V;a_VrQ_;C!SuHrbN%t*q)w% zeQf^W_9m2>j0gbFr%Ue^2$Jt)-H${#K7K0SotQQ#KEFfl>J|J4eNi6FxL^8fkaNVy zP7}zKx=34S23-PJIkn?-*MpVA8b-YU8#%f7YM)ffe36=hIgVu{`cH>iCkT6h3kwJr zzYvstQBs@dh}Sy?ZBG$6YcO5E<=}IS!1jMV9h#gC?C|0Z113iH2zT`o)w`ahx?daR zoTOhlVb`C@o#5`WrHmc*6jQP_u&)iAYx!qtEd^Hc(W7<$?c(Sc zwI{4agYWsVUd_2Kw*U+jb15bT@};k+(c&+56BPahh2)5WWq(z{#2Z=9)c&1lW4RAc zv>nG9Ls_=`j4J7_^uKd6gS#4JJ7;-0nr0vpiW!CU4TBEM?}dPxE_^@QR|2jpCjMB% z^`NsAtZp8SKfR0|{9OZ#{t;!X^SUGTM8D1b%JKxU*`w{-i`CG5l2#sQy~inU-<{vC zH7Z|igVs3266?CGtLEJ>0uu`4H}fVUE6fZhlLMC_hwr;>A#H`6MUUrbfa86GyfbZ+ zCje-nU2Boe>ZYXH2i$x;>N-ryIuFXe8)`7I12u%P7ZBd}Q}|OJ zmqh`WUk>T>V$V|I(m<`QHwzcL#$iJ{$CTHOJcbTLL-{WOB(&iPi|5W$OZc~<*@L!{ ze(0NDoOse+4<{{^Ug!YI61&L>k}k!sezjZ!cb;drGQShT?L_=HI1-#&yS2jrX2Zw+ z!pCMD1 zgfNLT#1W1!9%=+KVt|xgqQ?ccNE`q9jL1jv_2lBtp8*^O5o2TIUh&XNpPgj=;@ls9 z=hL4wSblyfNx{=oOO*&4GM!(QT`+fi!>GqxkzLjr87ERF;Z7gXqpPaF-N*{kqTotc zsbT=CD>YgKF7Leq8?gMA_x|7WcmGXW{Xcm893oNs0AP3#{gt@67YOjch}WtAy>U(q z9%5(2OrPY-#b)YsXHtCxtb=nk2==rj$?ST`MCx=Qm9$CoH^3~BJCbU>f3MZr`!zkF zMl8IX+(f!DcxG{kLh@FZZXVwCBy4x8M?sKEIUwXGNb-!rsNUHU1AaU+uIkLAejndn zCVv*hsjs&Lyq~Y^_Is&^cc+oh>{tGRc`gDvEv8IPX@>SU^WS$*58>u$H3u4_8Q_fv zEuH^po1JbJ%xDl8`Kf`8o&xr(Qv8RB-we6LTSGgR(mmzs^EPKyXCaWZsD|pRT5=3g zqR8cQHOHU@1>1~5%*qbbJuJ2P3q=+DOdE7~*z&cCA@)GDomhM4AQRIC;0gfXPD-F7 zf?MLo>?eY(^?&69{Sibs{s?zDi)BXE;jF|YU@W9ZFLuks8o0H|!&Zu{Sq8K{$Y?V4bvcxWRZxTp#1?qeZves4mlg(G9GU z8*P#MGBw5Xr|EoYB=tI5&J!VM0=*DPnT>)ejOU?i7j?1y>Qb))QYRle4Rt>00KRWm$6b7$K9Hb@0T|&KBqx*3NrfzRSi)PTA4w z;iyDaLob<>Fj=z`A--IpGuc;138do~apv0Q$$2|&p3sS@u}{E$mIhlahHHq>(?5u9 z>r_4go~;9Fq{kLqjns*e6-r5R$~)|jOR%WT#SQJ`z!Tv+S;!AFFiHV;5a8SfWUf{; zp+vphQprYZMtMzrDo#}#>}1SheDDBkwSGpj=xkx%k@KB%&+pv(-@X2__HWkAT5D$B z_kEt3d7px>>%MkaD@Mo&M3ZFd_5TG0{kFT(aEdv`zQ#k%uLXI9FTRyhgQZ;OnZH34 zv5g~G(5uSDm%Lti+Yle1`+F!39LBRF09ivoN|mpM4k4qjMEpiZ-5cz#0@YGT6NfJJ zMM1_GwD+-BrGD-ycHyYui{$g?Bi3N+miVZ3^R>NNO`*B&rkBIFfJ;@r|DPn%#PT=d z*8P*EvXZhND=y%mi;KE=aBy%%cP9?nV=NWbwR_ix0pvQfKJLbr9W=+GT^$tj-tN?e z@n9NawC-MFcGCQ9jMnNqTbk=$_mwTAE|S=Ok-y)Sb1-yUeCz9o+sJdDjkxiODdhJU z%CFmQBWjKt+eJkm2ny3oYkqnZMe@59J376l9Tr%F(rSZ1TtI{L>5-SxGP8EMU zi6fPLah^T-yAMW8yi>Zy?P%%QhgfN5@|vSf1ZoYDzJ};8Tx(oI?A*fcE8gUTB~0Ja z##xk=dPrwx=0Pj7MlR9pPi0IRGSHfv)8&&vmi7Jf(mw0apLy;^{yGw)?l?B({0960 zGdpr?jQ3dCoi@kX>{FViiPc&Bi#E6zV_#8h+Eufk)Cz)_bT>HAite#Q*yqy_AVJYe zc_)3%`D{F7@W_I)k;zHJ1OH${eujmuiYo}+xn*8UQ=`2&?{sUi)d@nFz9#5rf5+1Q zPNgv{|AYZl5^c#&E>8hZVKlBjR?1F_>_NlwD-z>uAhqb#2OsVegi^Ha}E8`MfA77?Py#KSZBcPF<{Q z)F0U7wb%H5*@!g{6Y75sK)gZ=ID1Wsv(VMTT;fZezj# zO_MsG$>a3|0^YpYy{o5`t3Gf3Nqco}&Y!RS9K(A%5^MCAH+jUu$3(oG(1L%sV*+_!1vW`j8c|c6KdS5A z)gWIY3w`8|rF|+rD7DF2QE8~C=VQ~V9Y>)rs%goAK7|Zgd0~?eA!wsp2Or`(IK zMD+p3;^xoBg>wA1{M^S*M=S_F(v7g7vvaW@oo(p!qnum{c{Zin?A>x~trJ&{9k_1< zJM?*(EU{et9#cmNYFi##iyu&>z-@k$+g)i?=Ktb3EdHbv!$ECBDj-9i$U<1;3e8KW zey@;d8T*zf@OuMv&vZ22*S)03E{8YTCdgSN^(wt^L7#(u5EXT0{>iJxN1rYF#Ua8Z zwfjq>zU@w!;lQ3S8 z@ODdahiJZ1x_aNI4fo4CtQqL^+5}62EhGDdD}KKF8B-$u!TC-n);h+h>yfxfmvK}B zNd+DpPaji?KCXFp5=0A%_s255?W3llF|bh&C33&9^h%6+;vci{@bJ4A)k&%6zJhS+ z4r3$Zh$XSHGkf;ojxEh&%>svNsY`;3(eg|+vskTIua|=d^)+?+W_khQcW$-OYC4)2 zZTRaR#T1t&ZA1um%oxoUL{^N%^+TWU%T(@9_1r2y6kPQlX|JRI0G^^^)5p#J)1>GW zc${+Cw;>1dw(;;~55>e@nB|>L!A?$sa9^f!sRwcQt?zEXSQoL$*&_}#8)(1&9{WiO z%C;~U@ARWGO6GKKbb~iUmSWZA_nPheJI?uN#Q-vefX6#qPkBo_!@(A9`9#U5j939Y zn7SM11tiey{*PnTc{eL~+1+P7rG%kKYXtU_rb4C&!Q(x?_@+gp?`P%CbxiIX8lyeE zB=j}BX~OF+^#Z4-;$n{^SVxF#_Fg^WT__7QK;XIFBxELIIJu{0s7PrLf-mX9=tDGkLjxfmGxxX;(<$+s-1T z7;*o(hpstY<5t@U2YxkwH+qoO>15a-)ZJpzUb`qYu1vPogQ`W0;8eHP@A31z;nxzW zg!m)PEAVg5yH0D*)sS4)-hz-}s((MNS0 zj#8m6o=x9jqLyUVdV5N9J9h_6kfL+NUXsyQ!aUSb|Cpd$Ox6lZKR(nTU7g#6fqFR;7PF)$YE=On`u1uWI zAk6D|dD!*K&omA1x|zJcvKIMapIg}|t-*PbbgU{Qxx6)O$e?G+-qYP<1QnTBXI3DE z^CV!X*{_GaO^`vueZiF84w!VBpmEff!Wi>AOJXIehqu?mtp5xUiz?-|&%s$1%vllP{8~iLoA?Ds-a{WYM>ok_yM?4$xQzP=4-Xj1Oa~8!ET|k3%G~*naV{^?bCW=w)%Si8 zrXfQ@Bu8=Mvj2Q=WX9@i+b4pNkf0f(nZhxvctcJovTft_=2kXe>9NrM`}a$f>pq2K zUCK(wZ>QhI8NF%-Q3r51B#n;)_92n%1M~u4_!G2Z5mybHhtth1z3XJ7Gs$FplSlao zm^{Dk0CQNeE?L<{gJQ$rgNz z_BFX;(+%yKo0PcbI*8r9`tbX%2iZ!iQ;UW})*=!70f;{+W*WY{RmpdslKT7Ad_4ot z{ntxDkA~ktNL}vVhX|cnxW{wb5uFR$GmLVR>a}h789xgW*K;eRYI(q+8MpHx*dS=K zz_goANhJ2JQIOZlnWmbFUKS=!!f(|_`U^{jfC0)l!j$;EfOVzpXNuLh#JHsXT?wcV ze0@B`WlrawB1bK`fy?BtZhbR&F>w{X{#s$0L8;=3X zdKb#4iNwK-;Tfykz+4Klgg)_6lrI$C-u&pmx6C(xls_zXwns2{dAQry!UPg+XO}N^ zc9KR%uQH2duSGJ65c}jA^DWL_r8BVa6@F77X@XeYw-nqG?H!vwIX;!q7+D{jcOn>U zE@aF{-Xm!e`hpbmv-j1ZsxdwmX&Eurrc>n?mQh0{URlv=VMOqZ(nznFx?37 zDfH2CQ;Ky3VItx&I}{1V+V{Eflg(*F4#+C*?3-obM~WqyLi#nB32*4Qe)Oxls#>A< zJ+61ZVt5V=ZB6tVFiDXax?L|@G>EBx1x#Sb~6mP$8r06kLM{U~uj>o(*DQ9}U z!gJvDGjXpkcGK`D7xR=>S31|@!t`{}>AUQ3>0F3=)4|4uivo7MB2Kc0XrW!?7VcP6 zixG4$2a?n8jHX8gj!VJ@t97B7BAF#2xFa?vEb?bNA-(NhqSlLQ}!S&Nief&Mf81*VXjB~zReI8r#yr*(DJ8EUOc(+1MR5`8^ zR4dDmnv8zClxP=@ETbIm-ikg^iR834(|gi#b$oIR5uRbj!Xkqx$;oINsv&6>xFWQb zVt(#+D9PeF^9wq=++h5CD>Yq0wWVs0)ykmbEy|C4=pUj*eD~Dw{`UP@^xo4ZX$o8o z?m_;ni3OC5BqurQ+4^S)Pa@+>Wtydnu=oVS`I+^R+Isd~(%?D9K#W0)v;ik!)-UF? z@ezmhO_O8VbF-gis?0J2K}p;s$yB92RoR)98r$B#g1P9sO<{MJId<2p;zNr2f`_&8 zl<06Z*pjB{gqtRU&P!JprWSwv459o&wa7s9zrhNnO#;b(s5S&<68)!uCaC_e4on~m zz$X7nHwgciUMRpf|Dqn%#J@@xpe$$4zdy-P!v2ry$p0Vi^_Au7=?Yg0&3Z=P)c@KM z4ulW2b#&tFPtT0+=#Xja7?1h16O3o4+P!4IZnw^ft7pI7vyW)_;C&n~(vHmyscy*P zNM30=p9e|OKSRV?s6&~x!`Bwkx|2~Za5b+{DMP7fZsLIt8)Hb9}20%sj zI!XNj+_EiH!@T!M_2%Ko3e{>$vf>4DXYY6yZ7jJG?w$s&VLRhQ#Xn|JVb0(&pcd+Jdwc<@d@YgI!A$*z z1%S)|U)A6}app5aCSN`>Xf_$>L}fTZ06?j!K%ZKkLJisI$SQQU-+6$wr3zpDLWggth3p zj{$BvPhqTYo9CJUPKv`rTMmpUSu3Z-E|N+U`qIpU80|4N z%WmUAYQAngJg|CkAcB{aLTt}~cV}0dOS-R9?#t?`a(vDU-OZ_sDw-q}{6SX_PWsva zAOjs(EMxVM?u;Js>?b7{1^|=d%E}<&dxY+pcIq@hoN3d$AZrK7rF>7r>pVH4sdT4( zBT7wHgQ~|*;%2!TZ$ZySiwZl19%34kiqeGI-mGRFpbx0XWQpyf$|?2MdrWI7Kl!W3 ziJDEhrY0pWiH9$Pv1dy0`YXJYjp2xo(wFMMTE?h}RIC~`mVa%AFkb_r4q9;W3#&|D zD%am11`h_9OP6qR(-P>RD=;wlIVA$rYNP(Fv{W1RLlC~cq6D{i9h^p+YJT}mt*q-a zA;a!PBHx_Zb~m1m|In!XSj!=<-90JRbhjxl@5pVT?>W0yT&(UW-Z@$M)l>V%Do zSVp+-Zdo-Unm{Q|NU{~MzuPY}q5#Lk$Vvm#8YKi5zOfnyeph!q7Yd6i%;!Gl&p zraygezk&!E9lNpUyT3VP&3>zwh(ozQ@jIh*264E{8?~ zIE6-T?qZup(d^gPHHdnAlQ{VAI)2>QrT=! zzzaHq;pV~Qn`>V(HX3ALRA}&FmJV2iH)B{Q%{6*00=fSYG_iL-^;v(7Tct|pl%%xd6p+2X zW1ZilId~*~wEIE>%{!_9LsV)T^jvq-&%bnIc?o9^3O!a!!tY?AwfO+IjlFIDmDtGm z_}euvVNZtFyEFBH%V)bwC_G30t}oGQdHcU0kib2uKa8GIOm6OQI<9Ftyo9SgO^xRj zlDJ~E1+|GLj*>4(L&^4kSp~eAHOEbpT|c|hcr9}0DM8AS?;B}mxi_+%OS0?dts0)t z*R8dcb1}R*oAg+~=A>S38ML{Oe4G6+2R*V#@nF~{n_h4et5k!dFVOp+PVWiVF%ZYR zJX+@to?{N=4#QJ_?U~-W0A*GH*ygu)qdXsJS00V?+t7h@@=fN_kXh3S&G&Cx#6t;M zE?GcHKx#x>I`_CFl>Tdw<#O_4INlEu zA#k1d$S>K#7_S&VmUeO6ymn$m*ciF{ETP9yzN!koYLAa#|KFL|KRI&Vesh(t)~JAA z@l03L`DY_cpUq3~{WoXpM0D}7v?_>ueu>m+>ufLrwP?hJ;Lhd2k+E0RFsiDm(S7vb zw+MP{=e&cVqu3wxh`$-4nqVOqk#(iK)ma_9M89B;!TV=6E&rTS&5)^%{X}nf@*JR;F|~LnXVQfEdS#jP_vlP-f?% ze!D83rXRTIMs=TYUXD^9njDLi=%@<@e+eb2B${_qml zG3&uw^_vWyK3J93*3qF{;HG(uo`0*uzzMHfj^@Ww2L3NvdAaj*n;j$h`8p4i&&|%P zJZH~>e6>0z`};}^nyWbxk4d!?;HhZC&BH6&^zj;}IUdA2eGEdPPO?tek-x3F>z$l& zTH+(AF*7mJY5*a>l;*ISM3Y0970*2NQOM?zDkt6;4#6xHj@uDCdoUGpTFZ#`ML~R) zwQ_~yebW|ju;QZP*F^2b z@ve?Tm)=@;gKqcj_X>b{+}Nw9{~(`5ad`=dmn7?NA$M;RiJ->YlXx-^E~9$(Ny&@{ zqt=y^6FqrCcdI`ij;c|gOZ^B&F~HI;BuKs8?On>ptpN;B*RpSfa(c^yfL}%;QYj(nyf0`Cfvxh0$r5 zs{wNt6kqikvK*LPTx7esLnQcF&CHmz^TxLu_Rh}bRvC|Cg)%Za`Z@vd2?sPUeTIY8 zl)hGSI1_&khRi-g(;k-f{Sho!uR(qIMT<~H#)IYkTYF5Y^6tjZ&S93$&J>Y}yW zEp{CBGuZ52Q_eZdn-p_`Wlx;bx<1~t0F423XW$?xA=+)v;1E%`F!8xn)(a`DEP3#a zy$4na9{-x^mY0swXR3S-z!;qSXsY_4;T~b23{Be}X=R5sJ;1eH8(u?gfcD5*-`a1& zl21goX5_u0Ne)nC!j`7RKB+3bSq|1fV30~`^IbxBEUc10OT^2C4}Bx0WevyEfnsOG zi2Kt1fG=OhF&@@&iMK$t zXEqhimIbv9@!7?q@0$~V2<`mbtFC43fs!fareBbHFoiUoD6gW@f1_-BtaFJU2myNF zEJK%B-Sp$7%bBLAf0xxmU`0j?j9{GQ=wB7%*v}~gP7nU}V-nNI=3hY3$?piyP zN4pNzd?WpA!pa?X1DN;pN8Y>}?O_fHsNmhX@yO;!7sN4mSZLJdwTZstwHCTiqHqn2 zn{9Qh+t(>yBxI}@=32t0HQiyNeJPV)`9^}D*kN@C2C8tz>db*ylV5%(<8X~nNH5t} zwF595i${uipSg))khFLDT=BgM?cD}Ml2pF6`{Ayyo!qy^%iSbM^V;5F#cnmXNN)0hE#wDUp%}X-TO=igb5LcX!`~@ArOV ze7}3|A9viX;#hm1z4ksU=A6$npGl~Sk~9VyDH;RBBND6?E1oY_bKFUu;-0FY$4+hQ3&{SlE12j~FYSd&^5|7_K!E zriCxfyjX~3B?=9R-J>E(b3Q03U)s+UrcxU*OWQMo@4A+ZHyEAOnnNrsEiGxfxgbsM zd01wO7(Jte4BTiT>(k5n4X_ugR0*;KjY4t?ElfO9v7*WO3YwT z!Dl0q|L2Y#5h-jW8STXo$rc#}`5wvpE}yu)+4T)Az={L3 zVx8DP^bt7E+idF2fgBnFj$Lv2)6zS4?cxty&iXj-kbLw&A|Ge`OcA`pm=dr{lRf z)Ecac$n*!U4+hyY97PvDP@{wxhlG95*CKeU@6*dtWA@-pxd zJS~3q>5GyTXLWJa6E9A*aVBlu;hs0TQu{90MRr9it~I!*=krIITPEXn-Hsdbs6};p zIFTQS8cM8KK6WHW(VW24n-1H9S-ogJM%*>xl;1g2W!+pvo)1(MsDmB*3>{ zE$TfG?+6-r_f7oFvy7jWmr~yFk-TEA+M;jnviyYQB<@N9t0)w zA&qkP3j*q5!iBj_tu0MRrlm9|qvDe|h5VC^_gXcu6N8dt(;4KnF5=8dnz>&-Ev7TURVa1%(fV^T z5&B8rZ%>43+4hH)X!r;C#6cLR-lIcKJ-VFCv!<+ItZM0#IG zLe^pBRDE9b$#zOb{sjC4ervi>Qy~NjR+EDdx)BM%EY;r7{jwV*>cdY49hy1c{y9yi((^vhkoGx!sAOm+caJ^=#_1|@@ zkSzNwY<`Lti#87k%O9R9y3M|;*vrLhdQ3g`(Q0Ze%g_paM`)ZP=2raXOKNHmH44~$ zZR?4u!l#I?wP_o${40fQHP32A>UZBpCm$@PM2hMgGg_%&DqSi3j(a@q*`pZ4trTT74~#am}6qEpHjS8Cb_*a`3>5 ziP}1X+p}v&2WMI*(FYef;l($GGQD{O@Td70GSgmUq}9BHjp(tF{Y0t~eFYs=Mb+sC z9TBy^UINVy3z*v^CAC%Hlv3E*3fCi{qnp_p3`GWxb{)So?Tiaf?-oz9IC`GT3}XL3RtYB=8(D( z)ynT*OJ%>qjz{SF)7b?a>DWLVtuZcq#St^?)C9yt#CJ8Y9b7OD5m9=OJ9I_@0sPhH&-ulB_U6_O>^k+7lhTW23-2V&V>X{StV_B7fXE;`gqVnn`oG z{F=V}U{Mj}sF&?T{A!rt5<9HJsl>U=AWcnI7e|4T2iO*L4_t7;w^`^656eW}s>dd61_v>Xcb=~hnEF@D}AtPd5+YX z?!Lk;qxA+)nXrj}jh@7IKPNw*P$gsC>B(t;xvGl3zz{A^2*gB3AFWq@O88 z7&*RnxDCOyNATxHcph<7f%!2I{%Mdq^NW%~=1RUNt=uqbt9jG!J@jl?x?ro!b=WGb z?3j7Kfu(?AOZ?I%Wl|5E4wOMKCJHa5(_-fm<^BYSlSm| z>NTW~*X>4o2#4H7G4|EK=I1RXV$9Gm?afPCiryg9pNfRVxe>_dMrh&TFvij_}3?&qci5yjwv6Ed(8 zA?;?ss~x+~OQH)J?Y_9Wbwkm7Aa%#DcY0x@luGdNen~JWhG)G@=g4)a$+l42Z~PN@ zCcysL3Ab77QAC7?E=y_Q59(uUl$z{R@r5H(2D#NsqRXd9Nu%-3Q=bNfd-U(J)WB05 z9rQUeD*otPbomX=ugSw`U8T~`$vW>|!P!rtf>a8ciAwp@C{t@vhb6IGa1LnrHY~qH zQ>%agIPf543=j;3fMue~2PIVRz>$9A@_`9K-544O)2}1sg{&g)xJ!OOdg=FBO<8#B zz1Ek4UJlJ9Y7k`bCXIToIJ=X7?tF?nj9TOzQ<6S*S#jy;W5t3bAMtRPbvKznHQjEQ zDE*>!89lfsyXJ$+)2uqg7~ga&#szFioHbFw;JI<{!Gkc&3CgPUyOZ1Gl*CulIeRA4 zW0P`y!7lF$`cxB+?3FLQZHu1c#gAt9{)~X(j4SHrYg}4isM`%j9IteVUF4Jrj0E~t zb5H^&9G*<-Z(D#KT7oAttB}6CP%9byy3Cwot$m>BG_Xa_J~ex35!Rnp?nBVMcVXR# zzriKO9;}4fd_qfI+g#V^u2SyQ`lS96zE0R~+}xskgUw_P0-gnR>Op zge~REm!Skrw0t7!|0j|9t2@tDjzI5xTIZXg2+|N+iJIyfPA9LYXS0((ty#tCgDa3H zUCRF5zI%eX66EF7&elf#@d<9I#Gt^8R)MBa zddpHEm1s|OkxEgJxROOqVwQG_?0f^U;PVeT6pDHJ+Q=-(Pi0#M4$^a^xtrb6&&B(m($ytdD!Ai?swydn}L~q(?!t6x3_SB*~{1joi_Nnk4P4Z|IIezVkT!5iFhT!9kQ z_J8*1otdjt*?F!*?MnBZ8C;nzMsipi8%aS~*>)kmB){K>xKk|Cyiw(XrQ&YdwFoU4 zv+431!EmUx#OuMj+P zk1Id4jY_O$CkH@)6Ge*oHkS^pGPri+{zi>Y?Nejyh-TxEd;9m}2s_>QFne1)(}2pc z@5SGE`QDNVP<1w<8)hrf%Fe0;yGDGV-Re_*;y+lPITcIlT&X+F3w3|mu&CN>$4Xcs zJR2a=zpEM*vF*6hU?OVra=8ENz~EeZw(_OUM)bEJdeQ;tH!@&|5Baz*jrN9pjn7ml zdMvno_RcHnbfG238>AFRxrr=v_n|>!^iP8eOMQp86*jky&}AQ_$dkzAgMnSSqH0<< znjjBh%#KIWk}YKuTs0giAMK1mNZDC_ToLrAe6T)1jjv1G+f zk3`_pE#m3pyckEYwL3i;FR{R%5{k|1fFlp4TC5u^BbWyT@O>dTT*= z?LBw4ar6uXiRm&Z#l4$WAHka9HSfJjChDCYjphTsVMIhjo9DVc(gAp^>S-KdvYyy! zeH;GpSbsA(d;D$tX1Lh_d;DFyukp6`3n;Ouk7(iUofKNMjD#3L<%?SqMzO%5+=*L< zj!GsU(eO81p{r@nGp)VxUcF8aK$pg5wU?liwxko`qWv>aM|c!dQ;0A}Xl*qVG-(-gdTg?jDXS`VSU}Kgy4*A4&`UV62V$3V(YnAnyqxj<>?wC?s3oc z93gf49i0&Unm%jet8{*BmQO!Z6pVoKt$z=GH4(X0~WH)be zY#SxBKh&mhWE>R1k~|31Ygu*-=)-M^Zu_Ns{KaF}i;xYqgEKhG^md z%Yei^^kjOeE}KqI3+leWF4;03#`s54E!$IzE$fr2$ME9Rt1;vv$2$7p^@7mSt$?H$Po<19Ixh-GA zxthhfUQteK{?YaL<$yc>2c}Brm|F)%KzcQb8}8Rn=sSAoejZ5(N@TUr(biAzVbBYt z1*GAv1!?i4gtBy4?bdlzUN`OhmhB`=f3cZ|z2__*3j+hgyrE>%NFk;U3X^Da{xlKI zIFX3p2mc`UUWaxwhnY2gF!J#?%%WfH9W;t^Q%n~3Aj!62UTVh8OEAh*Rp+9GKtNZarRyqn4 ztmejg>H+`vy|gRxcUCW?GfCD&%J65#=l&KzulP8ewj90EmSs-+%UcdsceuyDDRGwP zMz<)hx_M>_CvMcijmCu+veAvYO8pPFXJwre&zgR$w=opQ zZx&s8`B?(dVS?_Sb-X7wt)!*$c)8c^dCQR9*P3T~et}E~D*O+`|B{PhSC}z=|6F^n zOUp&RMSXu;hIcV-EArZ3vUA4ytv3)g!#(`K|E}A?sa0`?|Q2Tk~%> z>&3tR!Hn@yOfQJhyU@;4t3)Qw_s9;! z$Fa(C?V@&%099!Ytr{%DA)_F};ivdr*Pl}7W|sN-^_T!nj;@*xwYf+aUwpDf0x=IA4@5(OP7cxpER~ImVdGN-J7$B$l36M zmx*44NjI0}#MR35`Y;g6x%PGN^~^3W{xoVee7BKono3@GZ5DmU|Fa<1sttD}CgtcY zq^Y5|=w{l$?h$0C&J0*_5Q>ABA8gLF@~*}BnDGtZrX6SCX)1(AGtgVXd5)yf(u;c@ zgdI1O+E*Zz;5PHU^upXK8>5l+EPPXHw$+vY;1-pfLii)zUfS=E?)g_Nl2>6ndJ}NP zN0=DY$Bbl5@^Sy4-{89%@2T*T?#LPX349Qq*W{gc^n{;K;>_Rvd70%%WYet#yZXY^0#cLxq=cy6*#0Ze3yujx{J_}omfeMwP5qf_v?v7xJ2HbOVj zQuRnjo(tK>5Lcs6ibTM4Wkbaf&*x5Qz_)$J>OZ$@1i&fsH%#qzwqFCM3S{;Do(K>y zEyoRG$QlTT&q<0|*(%@@MjXe6@a-0(JtNmFb197X)F>B1qLu;s@U?N*H)dtTfEHdw z?jew$IOeGU5x*IcnO;AycD*xdDu&tlIR`*TKI&3ORTdx=Qai4>eeH{+^bp{GQ$xGJ z(pL<%FiF*cLl=3iwPGD&x!w|&8U0jy1d90t9Vfz3*OVg%e_xI5COFb zfO?>97&Eg%7skWT4I}_e6otP2ncU&5k67Sg#-*G+NL7Oo@l*I^Fr3RDvj=tSGF~KW z`yW;72)k8nOI$mVpJKSN3eO_f;#~3|*%oQmEg35burp#D#a4>gd1J$IgIy~L0QOtP zFdi#AioJNX<}J{{NN?yg$|}|O^Obp?&IO}}R^^^sKgHFn$A^j}4e;GS9!44-%2(H6 z{K-**u9=%seUm;5z1ncnNa^U>{Nx`xd8sT~!K4B(@z*quDal5v^6$O0C_7lCKTFEP zWQ;S-hu2-L8Vy{3%|X3Q4PQ$FM9lgY5HU5Wj_GTosfFhxwW9cWLTh2ho7YB__$)%OUvHS^eINT0ebi<$h6SUyH3j(X?~w$ue7tU zy29;RZ29ryi}SQhib3Erd{Qh^D-V=8qWX!WZOUW#WK?u7t==koa?Ir5Bm#(v1kcuyGs@@#gCxVCRuvoH>?KN3s$T>9*hnv%QhA|283?O*P;fuDp zy-@{~Rq%14hNGTlBbV{I5BmxKN{)PSZToM51omwyz*+wIPT}pdJHd{2KbS7p8HlN~ zl;pl!9AJ~2dYpbUl}9wdDE{0xX<#m@O&W_E^=d=IUYA8fneehtriONnc4Gb~6_xKk zO=0Ec!KKlLI1BE3*Wl-tUMzIbh*-tU@$|yOt6ji$zwa10`Z%SciT^OVpY~I+jNa?q zyAxj*itQoNa`Yw3qc$bLaK^gj}X0bn(&=HS$<4{%P3a|T`;epnZ85o z@@QwkG<}Th(<~$o#AEX9PETaPgj48F)8lVF4DG>gX{XVu%*@*ZFIG0Q2$Z$VzT~SkcuM8iM*xXj;=-ai zVv~#my8@$2<1#>WZ1`Gm)-%E&WWCB+WMOELshCjp&I78omOjBNHPmC}0co zQfv}iTw$0zt5{&y=Jo68@#*OL_>se&Mx&b)9>%yhc$_GfoFYA~DXWGB8r1UXOyil4 zl}E-)TZtS8Vtgj!Fj-^Tzq^+vUC(dFl zKM@FE#yHeHu5RykrL`@IK;C5VzzlhFYYT(I@_dAYG}WrA$zS* z9S$ivjR!Bvp6H&Tre`~d^o(O%3$i--Rx&6m&*MC_zOH;dqlA-7-_YH-jXl>2hjAIH z5qI^n8~(&TUTKk;kV~J@_~YFXtizI%pm#J-gAhGV(uLowZTJZkI!Pt8vfM+$f5vikbZ!DX>2@m1j(+F-_3eu z+Mqv$njQb&1HLlizw#Tbc0mUwoxKFF6s{l8vL8Byg|XiDWS!OD>o&5AN=iCHv6Q}D zt=Mm$>`HbIvr6X1w=cZ|MEfy(FzP6my=-u5uj%xlNgrMDHf) zP2l$)C!;c+!UvmFCDMQ1LO?>fhPQ46t+vHgL!jd)S>N5&O0wZ*DIpWLdj_WA+e0c8h@_m9x-r&bxvwrAD8_uIEyZ$CAT-PsFrrg-0>SH(2On|S}$asnQlUHlz8Zk1Xa z+pj+}+dVVSI-Sdmz9 zlTjy+i=9?36eEpXYFJ%SUQIgAE7@sy#fV9)ni;kibo5OpvGXit!ON?ex6$3_;rvDE zm*eMZe^-aBGSy+j((o=?U8oy{2;ZXZmXHFz}~mK;;Q&SHSpHuEz88aqM{;AZKA|F9psJx zfogOZrIeW$+iWo%_65)9xViQ!&d(N7t?D0rHcX$gcAN81?iSg%-IBMqt)NcuO){(X zF((G~^(s9rd3LXkco$cbC-`0EC(8r`}&1ul{nOHZM4U2*gt9DKU|PGkIZd0>rxO{>6YPV*4;~(;?;kv-={*M z@9!*>_ODK6^LpX}OO4>O5or-{8DCP$(LVyeBK_ zbn9g)P@ITf4O>3TRxYmStTc2~Fr=FT2`R43Z`S%T>B#G=>;(+PxTaNKH~}aDp`i=q z`iRyLXT`M>a6zAqrMiI&0@1`DSU(o9@7YrUw|tjJn6h%(PhH?c7#Ck3gM2YCI`2Ck zfpLEi00hlg$OOPsRkNGjn$KjrepdX5whPZnjNZo=taf6IrCecwzkQk(-bQOkB6jb} zPZjk02K$}5JO!-3loTSoa8~E8^_|V0`B4#8GbP&=-?Gk9QA4?djv$7J@^IU@E3`+gQg3{y5Lfn8~XKTLU|0PXC8nW)Y>^+U#kr|o6o?oXO2|%$6`~tx9 zUCX2S5h2)Qt6FusxAMKq=m`2VwRx#U9vlL0YW**~?b5#x@{bp8@lKBJ_Vd z_76hq3)!kaBQ*GADtQI<3g%pxsj%IJ2YtPyXQq&XT_eQUR`r>%Z9r9Flv6 z0tl^w{_+hW6#ujV)$xCBPelJ^DgOKVzwLiJ_|Nvg9sYa$-}b*9tov%Y(qrvbUqsg= z58SefK5fA}v$~_4eg5%$gI}RQVPDQ@$Jtup)o~F+4!aVt5Z`7rRLavIMM5!ks&y)N zyb{h{8PMC@DN)H|Bk9<%LLi=MQqtdP@p4XlpEk-gUAeQfVCkU+2{cfFjr1)4g#mrb zZ88P8efIP)>#?M&(=QwdosuH86US$6AH9i^rvt79Td52hb88L6!lEIWjhR5*;-&q; zdXno~x&F;vBc+L3RmD6O2BLQZ2!+0##Vt+;_ak~6KR`lI!T+;Q=I89Z(sDoCF7Ucc zTCUU@33i1S73A&$UeU5 zz-~cq)$NeEv!=+wFk3D6G!zhEqdf?mN(^Zr>LyzayxKU=K`b_|`&5(sVjmt5LK(VQ zMZ^~(087)V&SY~qCk(nV;_!Hj4In$%mU|a?PvahtRwFDZ`@b2QaKJT26mvnmDfv$E z^;UZnM&D==`1fU%-QHxeKXaiNh+>*ZtG2zP5+NLQ5MxWvX?(=dd*iL}Dvv(P%`39T z)a%7f%dsEoF0Bnc!w6mg4;=y|nO!g}sPo{J#qm=&;T)`P)}?nzu)_01kM`ld(VbdL zG)-k=tnH|Y!CSQ}5m*2N+Q2RDA0e}AK17EHV|q=M=4^Xad3Tf^!xMXTm@Y3b)y#JO z(rNTUT=`^-=kq76x_fPaj+Q(R@NLW5wRFYSLbXBuqd_iX-k8KM>R+3?$(0gTJrWsd zLP%|o>w1N=C5dA3vN~n!WHACZUXWiV>Te-&+3(Z!%fOq0@dJ4truv=%EJd=*Hw6O*;-TRjSG+$q(+*Kj=Cys>Z6dSrB$ zl*w?e#jydn2ePO^9enyn$|mRMj$?bS5l1q_WGR2IX2Pl3C43&-b3=c;-#cE{As+Jm z*sVTben$>HW9SR)dkKhqAapZUPCIRIE-zo7_bD&mEN5*fd6O!A!BQyNI&|k{QI{#& zRE3Em-PMm}jaync1fv_vOCv&ya@!3Lycf>YRM!q(>9K41Iy?gjjwd{nW)YHL`Mt@a zrR{cVh8wtltiyC`wyDe*oJC2OYx)`0XRY4V_S~#*X(Fv#fjx{uT?h*g_QlVxK(RXI zvj)`{=|%)B(`L74A(T-qgR6eBJl6d?GHEiOvad5Cp<$@!<5z?5A5;s~t)+$K-!9%; zgq$p9a6M}<24V~jbsuja<2d7WwMn@XfuH^a$|PZB%hZw$FG&iTGLbj$+s*vJ-yNFg z5g>SZvr}#1iUAZ;K&mnMbL{l^mTQ>dsRa;+c9x2h__NJX2D zN^9?!+WvxJVh??o4@fhI*p5V#u|}CCVrriI)T|pHxFF6K7`5v6)Xy0nJ(yLJ0F3-CEz0kC<)i`l~Y?Sq)Ag3eO z^rnpT(be#gpw`|BRU8gq$iBhhw7_9#@r8RE z4kR%K`Ub?LltVc=*}2Ip>UVjU+p1=QF%BES*9c9z#(Etkxa!@In`5qdA-}Vq?P(gj ztS^=WZp(i^JaHDMu$6r^{?e7n0vIV8lm~r*tBQm8l4Ay7X4@ir)OKfd_ddV|qQC@4 z-ZuII0aBUU&Xh1~SC9A|$e#(M@Z`Shx*N%p=G%c8Ci8U8lldKX(&_oB zN?ROJDk+$;Sn=JF;Pw04&q7}JD|+7TzjBQ7)2pQB`A=m(yT~K;(|4GkZ~9N$>q!Rz_w4Id?oOA6I zLM@urn8h{j3!KtOFFwl_OX*H&^UwKwMaiR3(cj~5>|%eZWh&2uYrqKnf^lMcdf9gw z5SZv@&nq?3Wo~Ij2iH3w#KXz~APZef^QLU1om5|u>JQJ@8x;jCfk{zi3orqIL%i35 zzA$_s;44tv-mhmnT0}Q?{igi%vC^R)ZUA*t!)OGcz=Bs0zHcnZX zY(AB7wFH9HwhN>dhYy@L7d$49kWfG@Rcc?>a$7+`S0IaCEzAfQ1IVV34VVoLu7};? zgcE7ytp|nTME!x&-ixXA^PR(7-yn;bPYg{>*|RXYkTCyoU`8xGiK6`s_uJO z4^7m&UuY2dCiGwX-~to=LBvn?4l1bdGg1=`Q2tNSq*V0v!$RRoO+cY|g$a3*2H1Wp zk(RX+b`lB!kq7hoqR&g#JkpLbDzl&O9e~ZH&Rs6U1un8l}crAt=4ip zYCR%Wj8$DDH6n`q4VoaAJ#bTC4|4x{w~!Y%t6s;qV9qRV+;X*P+J~IhZ)nb=uKY<% zE~j`nGAaA(`t=>*fJohjTC;=InZdw+{mYcqzWrXswr7~8hs=P6FX<12EJezdEu(*G znKxf%dP4bR5~}-lm5v*S^oRCV(-2?KiFp}qygWL!FzS^TnV(yi2C^C2Txk#1mCD%l zR9+!)c;8%}Om2;r^v~IP4RBAtPo2bzN*xfr+B`GvrNqVTJ=Hkrx!t+DE?PgGa@Q1@ zx`Un0r}dtGvvk{p+*i71mYzrX$Rg!bx7)2Vp1Pyz(Of{^Y>CjryJOXL0qx!V zTG%fLn-tK$v%1+H(V&9^A7`;eRg`82D-+o(1OCPW0e>cE2D)`RI4h7OsvY4ocj(ex zxMMoK4Zq!kb8SqIbEBJ_I>2tGDdbCDp=Gb!pSD2gNxfP((3^e?mCoK5jWLLaI!C9EJ_~wlPbmL)kNv^20s_JuJE|@~A2- zEj%GjryF4$*57XaD5Q}h@Th&!)Uo~#Ln_v5+*cwt2D8#L93tQ5Fu6ob0HAlRFz zFG*t#cWH0y_+POHKbFkidO?)Mp8Yw4T>~8m-h-O*AwJSRlGRwCBm-1=(fZ?rn9;!{ zRoi|e(-M1l-t`w~S+-g7}sT@QLENK%f3JcQ~R0W)m=dzH3v9KyUUD@X8;54W3nD(Ncvs9llug;Vt4= zX!{eIyy1}>`+0nx7bqhkkO0f~_0M!$T-9%-q?V6RfV{Ro{!mZOMY*ovf5T|6CiWeQ zri=7e@Pr7Nh1(i4jEk-@EtnWCzCT{c3D|GuaC~j_@T4BG==Ehj_X1yw-g}j|*yYTl z+R2U$nm(j*Hgwsx25dp*uB#6|T??X)Z?#A5c~zeC4pwK_ocCOUlD&|VCrIVrrj^tm zPNbDt4sELt z_o3YArqLcsA*B?t9N8^zV*pgzAhJn3z4@IV-NVT%FmV0G*$vS~%1&oR9=X+GwJX?! zi7|CSh8R37QV~<9NXw;uB$|M)znrpi$+j}%Y;@b&G4A(~&w`GECx_GQ<7Cut3kQ$H zhcP`_8{j3`AbgL#&o6HkYZYG4$sJQ}26`uvso#BhWySKwgQ`}bkO((h+sN73P+WxO zxOlr=r4_rSB#6fNbNODOPq9%J>lPs=(am3+Sm2SuE<_X;bQq_=->c=kvb&$Xq-V?c z#eQRF)%N&#%cX=xpTi$9M44pvWWMw=?3`Q9hi6)RUdCJPU3*0m;1xdn5|`Y|ig+Xi z49_6=rpf$ViFCL$Y_?O0L3&vEY3PHDCN-4OF)$zB|Qp zJ{YqYwROHj^ti85XwGk|v1q5I{5I+ z_5(#J(AUm9xPI@;dTk99pC*-?N6vqC{6DUFraKa|Vr3igspsPdz#fwtQ~Qq(UES)* zfuh8hK+o`{UNAJZk&^s2;vg>YQ1gWz)TFPNu?^#iaDp%Z(8~sOMa4y!sd>n!Yumc+ z3X8B1+G{<2?>jlqmnXg(fp)=OXWEyU8ZPIJ*noxbaTR^aPH%b@icti!2{ebUXt6Sp zYCnU0MfB@5dnI2gd1WNaMC^t|`;3fi)$M;E;KA`>ZAWkI_vpd5S1QeGVm%hl%|S)U zv8KFRduL#FT3TNMkO14vZv2cy)_b1at)zkb1C|;ywVuDoQ z=`m2oZr*mub_YiA#hA1N`(vT!>Q{a%BF-XEm1LZfWhCslFe7?hS~MIrH!(JeUH1h_ z*-u4*i}Z4QKg=+khM#aGd-(jk#{O%M+5Kdh>q>RVQ#ig=%jFg@?5oGn37a%yV0ChH zL(dZD1Ysc1R+8WLnuf!KQ(dJeWrU?giC+RPD-!0uOkReq9j1Fha`(jOK$smLGjk<< zWXl57A{1^F>Pbh8s~O2h7|4fCM01+h*r18Pcm4M$(ge3B)ox2I7v)y%?Y$;{`v}@E zH)b+VN%Bxyf6zW`0o3$%9rux1_EF-afD_e|01g;Rtw!;DO$8d`wVsZ9!uPCSThFlS zjlCCcrovXadv%>1h`KL-8yL_#cRauG6jXHLHKVNfc5+rUC8^xVLc9Ow+j^P!5=})_ zo;p21hyzDklnRQ91UH7j~8Zob@`IcDo*VA!*OZw4QKO(e6E}2 zGcOh|Z9?J>(O|+~fk)YR(I0ppr_QlC7&)9a#2Wpp1t1(ep6+SaxYX(L)Ykg^)P+JL zR|Y&DCxH5ofz{jy>fl8CO4}8^iN2*cP&(>%SkI3~9w`5ERx+3KNx7F))Vjw#26<@9 zYC$;c*4WaWo~}e;lXNqCS7=Yt!5%HlMY2ndYO8h|-SE-jl9Fqa7N{V4?{t1zli?=Z4xbevgB@S~<0-&gb-)K~~it<$e}$J-1bewcEWIK?*?|ykPY<*|u&wk$(Toj?D@sY8EJinHF5|xd9r|p=BWc+`R+%z-8~6s_449{uQE?TfW-|^PQI-$fQ$>Cf>)BSQ)f3v z(B{1xf1IUMzr3a8>#-=ePi;KT{s4T@6mA82L^7rSbwnY0H8o1m>Mw1s1~kTJtT*p{ zfgm#d=mmWS5qC=JMgL=dUICX(>>c(h@vpHJLH3|r%UW$H-oDRU+dT|@=V}^wukZ5W zpmg6mZ9c3OF}QM#a?03VdS5}MPMr!Lxu*F<@~|%3?LoK;TEsG# z!fDDM`R+$+f*j(xL2{Ae`wG~2?<$L9%}GEdl+)@v$zr^ulss4kC87gHEPg-8TQnaP zlIG~yvf5~kub+~UquVeKXCIZ{1S%xvYorpPNeir`Pq=cEqk>;C0d(0M_@e({scF{t zmjRr=f9RRcC_4D}xk{Vf*3Tt7(55DJ8!}jlxBu zdNKc|yIf3+uqeO<@5f&Ann51UuO~6jt~xlZ+qGG)HAGCj-L`AHXyN?5)lc5u7uN_4ey1caIE`=ba zVrJtM)4v=Ync004l#IwFBIlHgdJCCaojIfb{8fBfu z8O^ra<_{O>7?CJ;1F+ia4!@dBzQXzb(9R_bR9iM~K&9y(|0Nk@Q;KsK&@kbq*B5mz z2TSvjmKxd*sxTw5bOFR-n!wmX{FNK-mXU!VbM*<8`gOa@Q^ z_eO*P)TP$*{b3k=o5__g+K^)vn>y&7Xj4u7%W-)#0^gRiPV z)#H@elyzwKnLZI8C|Nv@I(KD{fA*v{K@+6qLqZyF&Mn-ZTvpvD<3mDFreekKIPVtY z@9075DnPyaZ{B%5we|m!na+5)yP?_bR18hi@MHQ`l};E*L^1XC=zp+ZRj1I#=PSd@ zvLqDwkr`VZr1d8#67YH@|?`ZWZG2RP@bay72vy{FincwmBRM8T#H~^nOYT6BMH&6 zmQTZl1s<47y^MjQy4G!?*_S}8>$SIjLz+}>u(SF{SD1(z4Zg!9#RTnL@Mqbe!jRIN zo}ks?Np^Gtj~SmUgBru)8e$CXa}(n9RO~isi3%ir6R)NZBYcdH?>abeiAJmGg^b^^ z39>DWx1!uULF&Krs37{@negWRat~J0px{_n`KSBAlXL4=V2x>v{a>`bby!v1);_$E zRMMcOL{PfBMG$F4y1N9VyHgtJ7U@R1ySux)yYrho>N&pWciu1l__(&XuYE1{nrp5( z$GFEm?lD&3Y))Lz{~$Ty>5n;N%MKzv*qr4r2jdRrS{~c6eX_z~Ri$(iZ9@!P)Mq0l z8cz0Va%@H(g5K=+-GF*J{calKA}N6|7*)Jp-aH$~mw{0%js%e#Rg|1lPBg#DgI0DJ zgZf8iFohQlxajM>%Y__kjh(Zx7Y;<9&&x)4%%zfbT*iOOH5gsV9%tiuy0@~ws_C*{ z;8ZIh{wIVtyh9>*q^a@cN6zt=8kOqgmEYk$1k#YRrv`D6qj^J%Sh<7o6!UKm!F$X0 zUGG5Dz9;RdlQR`af`LRau_bX!QP}+~>;?pi>T!CED(Bv3c-Faw5zm-eK&0u}A9XN* zG*k2v^NBShcR;P6Tw*(?`IGZL$dk1_rKrK79t>{cu!vjEni-A7Z?m!)uRLbMYp@Kk z-kgunE{mA$RudqqZxToW5nl3HiOPMje2-i7IGc;Y@6Q_nT7cZA4IQi}{(IU`-&HQ; zLUkqW+ll3(#sd&+fqbBA^kS8W$(HIK4HsVow3sWY+-Q&Ga~+qYL=?V=WsYwf!e-Q5 z9-O!ch^2~em2fI?>F=VTQY2n_u=DTVm=Fk-20+++-O2^ujx|sVee(cj$X$};JIGgN zkND=fD{~s&*PiIlc=&0Im*`2~e#_ zkO?bwh*}HNG{i~Rh0`QssT=cPsLX^3Mc}MzYM4%z>UafX8T9jEP>DovX|U-2!>-%^Mo38*{8hoyTHzMx7o< znjQVarNU<0QnJia--Jhx_`eFhiTxNcYfNXlp*)+7oB5Mdk1sj@!&TY7i_`GCU!d>_ z^S?mhGyV_D^8YkXvfj(m1{L5S*i*V&vphaKAqK2NiQyQ|R&!hk8h47gyuqc>Q&B@v zFDFbV4hO|WnK7AkYE(yWxiW>4m1F>hLM{b&^ipcy6P42-@GAS-Er`%{`F0_pUH5&@ zV_-0~d$Xm*uG)VGl2avsWJs>dCjfJfcgtrv!v5m&Zi3@(oHWm3&)FP3<$%1p@bM@7 zhbf{k=%w?It_CSMF;&#-y0XSo6~09ZTT&}A$ykr1d>J7vCXdST4V5t(tbL%7Wk#o` z*ch(xM8-3XVw_dFNHojLw7*a+t5oBK@3xPOY4P`Q1LF9zps#HS=VPoqgsyAE@?Uim zj;;=(kGgGp*FyL^*te5Gz=-!5gY?$6=i{4F2vGs?$`xok15xD(?EbD;` z1Tuc;hdwz8?=(6m+JZg3F#;1T%l5`8>z7yv36jU8(Vw_WU%5=`LPauIKj&3o{(_Y;5gkYq z0ztWfLQsV(e?(|2QL#Jzptu?(MZD_u=<3d1spfZQi1J#M-mowlo0>lzGc$85_!&=b z3@^(ofIP_2w1mgKVpiUbjHkFn8R}%Z87LKJZxo`u>xO6=>oy^(<+|g!Q!aceW}S4 ziKPqmJoY&fL}WvlG;<4(JSUSa64dm8OIV!^z1IibIg=07w&#gf7`QGr#z=3I9QJC$ zsGjPHsrt=o_-6}sov3`QeP_~BBbszg^Ncl66B2dx{_Mvk@>hP&w(au^n)t>BJ@Lom84^tk^z#jjAnE`I8X$d+VB^A!VP@m8?&Aqen|!8Bb(j~IZqObHhL_Up0!S=tFHD+*h&Cq1Os zI(~C3!I5jD%(3mIoRy*r2}vuPw~duee|g>GD45%svpt_1v?`MwBYF7p($qSZ+LUYL z8`;ZAjyqH|@*%T4qs}wy+qvm3O1KBPP~*&#{pV&&{0FhB>+Jgl>&PWsB{xlP&gs!O z;Vc|sqGYr<4I&@tijx)Tw(N6roN(S!I_6$!ez)^}BJt?X$?+i7=MJl_J`(!N-k0B~J5A|{dgnd0i2*}%yzg-C)eY6_2BU&EMdS#16_ zZI_l9ysLbW}eG-Qr7bIj8g zcKUwmNo4%EA%C&qh;y5ngT2p6r{f~_2DD^p0R3YWOsI0Ua$i$W@XO0%pG$M*L>yZW zB(1xb3v^fNl*s+8Gv$H1(?z@0sk5UZ(Jhf%nAnBtJEFxx+TY$M#1Be8xi)}F!0U9( zZh7Q{jykvJ;M4MKNm`CL!xS3{PL4`ucsEme$?2KpJl}din%J1)hoU0Ek;I55D)|F+ z!?BGGoCtCwLuC`sd}>8w2M48qJ~i61S%f<9m5 z4NaR1(coG4(J~W8W$=F%2%VBWv>fr8T6gs1@^I8K2^YxDqeM3_^FZ@2=^$A?3m=4d z1R_%wzGV5ukkuqpsx9O*`aNTsCDuUcygV(}QHbBHuTDn^m5G#6Epyy3x(VGyShn2M zVX3_5MuupWA8aS|cK0a5w(u4@^J&@i4m~qt>w^_vhj-fWRc7nL5mgGAroEnq(exxd zNwelWE2E+zm{;opL0bZQBm!wWxk$2Ek6HrMDixt*>CWg$Huh3g;9N}6<1X!&$)Aguum`ocsd<_rAR+#TyE0fPuJDgEYB`iB^k?&|J z7RquY%z#^!ydGtgEEce(u#8i6I>NN-I2t?TsF7!(Nwih(K8Pz8n|7?Tsqn6VcxN%I zNLgy*ff}YgHF=)Yz-tDafE<=-#ZN(-su}5}iP}@LU@jnXE1SY6lO_3i`j@W5d-+Z# zm9;ddN?Nlsv)>jw#>V=D?$S0Mw3(iX@tAS3_;+C&I(+RR53Ju3<{Z!~jWegvT&u*6 zje1=ZknOD4GdZi@wUCCT?K%dXQ&{-T^m$u#Rn6vJAR#^pzHiIztqu=6&vx_r`fHF* z8?evBT*RQucQE5*JE?rDdM&V0p>M{3w8Xjmwk%)Kv5MH4n*UZw5QIDt>69548F206 z&h=V!d=7jb7NTz)UZP}*y=lsY6QfUUWwSPUHxxp;n>vT_{$|i|Bp`wXau&)vM1j3y zOJi@4xHO1JxArmZB^P&2w~ySY!f=^MLH0z2xTIVG=6^_{P{5#4nBU`{cQ;f4153)tR-;G@AckP9{FNq<*J>7akg{1&~j({ zSeJ*>@3d@EofhlM^|~oz?V#L8PV-b}P^=?>>9kOEvJ`ohO|XH@uTAHa>7y97wz<#Z zVnj@P%(9as-lg7R*?$K=eq+BB$Hy`isUg*%W z4pyszt|!f+`wlFwXgF!W`F@MKkcp1}d$H^H8WRWj$G>;c>;XYu6fGMOkvxmubmBE>L5`=`> z;xEdGBd~6P&~WC)YO#-&Wi8WmR!y^|9`k1WrP}!-?c!XUWed&cXSCOA3&#;{Lqf`! z%nyqmK|`toyw}PPtr%{VpAW`9fs>#>eN>_9<)be+Ik`0w6vht^-rOPqg@TvLmY3-`Zj#lu)p((*)$ z?<8>mFKz#9S)21o%wnqnl|o7O1A8sQ$c}~& z?5K=2O02WHM>A}?tW2%qs#r;PkCLt&Sh^{C5UTNA_Sgsz7C$T`MTIQb0%9$hWg0PX zY2J1+S-hrmNX5~Ih9X`A0QjG^+bn|dQ|tpLqIPvwSKKW%FW3^^ZfD7GGowk(G<`H< zKprYgf&FUJD%y(kl#@^5GI!)r-g0Gh5bH_5W33`nI0J`trYiYmG*!bAuWDi;w3q?f z(4pXEtvxO+tr65}z4XeZQdgLpk-Nrz2eE>qk#m)@byc>KHKB)0m$Dl*141vF@!69Hjqf4d23XI^3L)UbgaQFmSUQ0a+Ah zgO%UxG!q6hKLR6Ryj@!A{1op;9?Qk>X_3iQVIjSLP*77zk7sXDya7j=Qp-osw}njK zk8o z#^eiZc1gG+9EHf(@(~Lrr|zmPO-IG>&)bq%rn7ekJJ%*lTnOT z*1Xu?>dhbVyae5E*+4goZfBjL=(sbTCu^TEuCmwKA=_H(%he^OwKdmNhS*gv=&W|K zkK3xv)Z>Uh`1mC%ecC#jezz7jw-Qe$BM+)nX--yEww)>NG@Pg4v_tYuD?)xIVsnf+ z8jCYSgp?^NUpr*+WX?+W^jcD^=-X$DRhTdreRUS?_EQ1&xWMQW#wE92O@k8EDM44= z_}!%JSd?kS2P4IrN&zFDp`(xWIA<(bxH(ZhI&dy*&O5`8Zo8uf^tu)I>=xg9mKhaf zB3|nLaAw%BU%jdbsob5;;(E8bTIMe;LvMBH=xEByOt#jW()Gim&;Llvhb~Zad$p(W zqWXohx=s zxBUp2cE18K$wb8~^ovkKiNe5e-|HTc3;UFhjY_s#k2ubT*+zd>y2mS}x0eDebNEtw zZDV;dbQ}E3A{uHf`3e50m%Miac)U-^Fp#3061`m_%`xmQ1w@8?Mh1W7~2xQxhkN$QbaCgo^|dS65+M8;78}6k2;7kLOPWZ;a%-K=Hgf?qX5N71M-+FB$A z$J%yZSFNy>ksk*32s`wuQg~U>fu&$zWjw@?M&$D@q%hMp$d38#s>%w?&tQE!ScF`|5RGy%D7?J9Ge9>(@f4x>ZgKWQJvDhFzL6#A z`N*U5<4}prHwbPv8H8i>e<%tJyt}v3ynon;e^V4_TJ3*6YSUH%BA!~T5~OO*PYu6Kc0e|v8+B?OHPzX4Qz)GrTr9EG~r zx?@KuW60IWTxx^-{{2!5nUT# z)#f|QCrbJkoCmP>-mX&mZ~14`D2ddG{<n`rN8LPIcGqpJ0N1$KdsnI1sCz?9 z>kvdA%+wmcbP`$@9Q~$JRsL`>4b2~)anD3!$++6dCQ|wB`{@}8PlK8pg;R5%r zm%1l?iH4lI%T@WuvISnOLv7npZEVm5AZkk4O+EG>W%8g6;tZF z+VG|mNui7GC$-`*Re`--`ZL9ir5NRqz_4c41wql<6!+ORE{ldHfal~j`SF& zgtBHp%CocYoxs}2^2ln-CS-r>A?^`e9{XLD`VW7G81zLItHj>m=k`x+J$*x0dvDwU z#Sb1dx^+8y75!2c%z}8qx_KlOIQHMQk72mbO4G*TEbeM?f8N4JriWEc95cNk`#2_L zRzxO3p7({&KSG;o-NNdP9siqaLC!L6B@|*iibrQwEDE!q8w%SZlB^`C8XeUyVSi0Z zFwxQc0m~JT`2#3iZiZ45X(t^??}VO6k_d9Zz(%HIry|I+FMM??)Tr%`4~7nODo_pB%r~!j$Wq z6}|R53TC*`eiFdzFMOp;`V+{^jD?oTYGXkQTSdE9aVU1q{P@^jaU(Qq0ft47^R4 z-b6KI=-t`n0&m+wP+%0Gu`0OSMtLc2zCHYBF}DZXetDbx0Y(Lt@LWaGl;9Ay^0XO8*y>d6{Nt?$hG zex&3f#(J=kA(|{LuPrG=T;HueE*YpbR_SF^eOk)b>%9?T@)ndM#KY{!hc>VVn z_Br!+B}Z^~VcSgiId=xi(e;s{5oa=Hljed$`XoGQymjW4eD=GJ>QLP08j~+?EZCOo z+AEc&R-asPYwH#*+t3@{NPZ?vs|-tB){%;cAfbwh#u>VYi$Rz1ib?S&A==otIxRK* zzY$a1f>ry`u?Xoe8kk>4{h!tBNo%Exw3E9Nd`a?2S~~MA73!f98~oK%E9HX@Of4ZJ z->#1*S0l7J^y`ma;f3ys*ydWwXxp#DdlV>3mIdu*?(r?YYv~udj^gdB#fNmt3=g5U z#C?z_bxoK%)ANr`I^xd?A=@4C%FW7h8>UIBDZ$gUZbf5H-PU^d%a(IiQf5KZY0Jxu zH8IuU|yU z-#&mKm>=ZEU?wyx(pcSqbj58S@t9$#P{>5Y=cJAHvgb8xAio*s*SGXg>Ybk7;BnqY*x4U4wuzs^dXXPCr;Ne6BZW-i!G&!eBk2a30=k}J}s zqbP^Pka$3=LtHqPy=thr>P^TDp&%g1FpM&6uxKDZ_yc7$wT#3=6}jmwT!M?2!HGDq zsE!SBiJjPh#;_@EaGB+B|I|iZPMvqL4d&jrXpCuo!87CIq{QehBc;)%k{=VHGR#6s z1J6^iucpoYFe!5@Dky7-x!pIlz`jXdVf=fR7WsM zn=D^*Wf1(Q?}#t!GMkKIz4tUw5Ve0i4N$M6co3;3TosudhBJ)lG;@n8VeT zefBy#CVk{+i5#o;x!|$Pv)vuz;_3;vdlF$MLBZpy@i_a%ud}Yk+sbbzDwu?#=Eb?I zXA*5yf`(VUs2p_c?2q<}RE||PJ1lan82=tFdaCo$9TU*L8(Ikjzs`$V(o5K&C~v1Z0|Yfdgy;DGU5F3c~?T^zU*r zDf9n04-nw-Z$^T6-K}lMb>g$&&aOi7bK@jctY5B{j;6cIzq?So>yeXF$+j$8Qwf{` z9pEPbTgo125vJ7);cc=;2J0UJS?1}&;^c~QMyvhpOKRg*<4SrTsW!t;A(rMuM=ba? z1<#6BRirAl1^0=+7Y~hvep)Ce@3)!>Y1t$NwBKXAckF0-$ZEh{DM05QZ~dMhj7TB| zE8U#+SC|8O&j5*tP>cCVDZDzhio7DQe3-BLI1Of9u5!SljO@&1wzbK8+8wR}3VDvX z{xnXk=G9sHOtVKo=V<3a$TY);%&o?>V~p;McC>O1s(NKdL;FYJOyM%uSLZpC^r|yj zPj(TLN3+Wxz|A4P*tdfK9@a_u7j$f1q}Kw1I8PqiQkzx4GazUGi)W~$?JzrFB83Zu zS~Tnw6$fUlUH=AgyNM<}APsm3q2{3oSN(v83E$t{^yUiT;+#5KG1ZKZM3MH6^NLy? z33ot2>bPqlJf~;+>UnKy3c4}U@-30+d2=E!Mlrh{O;zMRRqnIs$uNfSMrfbM-rV0g z&K;;W({<#ttg5a9PKi3|U$#!e6r0}Mbsg<7J9^`IJY8{FDY&_k($c9k+bWOK!hMNo zzkKGkjv4&vuRQRVx9H?q5yl|{yU{@Mc=(L)Ao&UDf#w6QD?7;RDNJ2_Uw!-Dv`Qn- z*rpBn=qldxFIa(VctzwCd&>)UU8$>N)akl9VDI_dFh!K47VcI;fiuEps=l!|-&8?#TAx(xYD{G60-Wd{c5A2&&575~IL ze+5CX=dZFJ{P8D$`4B?>KPLQdfAZfC_(8Z5q~NL;QS71`&kap_8eq1`)n-a7O|B@> z_HI#6m@$(+>4d2H7N5VV44F2!V>W?Z>!Ehb(>t=jWt%702KWGL73>umE+54|4Aod) zB0^$BUG7edIos2WDN_DpuRkn=#DyFnUkytyECLUaTr?C zzx??yZ%o<8@hN|w^C_Vj=Z}yzmVLIS^n%^O)tfg{G2Adug8w8ry@PkDdiy|5W zw5K&26QK>LL#-mq7BJ@m&Xdvl$f{TgM}-wA6^wH?@Ylh0q3^SQyJvbW4C4Q=<6-#M zbvxw7gw-Jt*ta%Vi-x{@->%8NCh;I|^u41`y6p=$H!q2`U&CzU zAgTJxZod{3g@r$GVZB?a`|svBW#jig=WlUS+$IQWL!4dfWW$QUVH^X9+QMo4>%hPV zo5YhJXCG;rsft*hzaS7)%P(rG$0huXPWUS;p%mEkGRh2>XG{|zHxV#eG!KUifEa3K z9`8Em@S86tgf?0=5YTCJ+xLVz*dc)P!!Lse4Gl`F3Nc_K~KA~3o_%!1Ewj^z;X*32YqMi7siqJwa|l!7zq4luqiI1#R1g}Ff$-% z(XGk%wr4T{`1P3!B$h*e`$Zbf(Rp2J4>=K^x^~_9p;~^KyJ~iKvyh3fxy(B;>sa4T z;L|cr$c?Q4Qw?ZYUPc^}fZxchJvbWmAKRJsIg&bIZ3M5%uBm386(u2^D=RjfGO{*Q zcDV_`z}{eBpWprRCs%rRXY;!ytjY}Y0cV3#t&h%=${dvz9iFPAIELT}6?5D0ASD5s z1_I1zQZ74F5wK=EX!Cn_B1L=p{GSF;TH)nGHHR)pJqQJZ(L%E~5 z{ju`$_wN1+$l}xfKl3$z$xHsSWq%Pm&r;&DczG^*{rK1`e!$D}vJ5KuUTtUc-p`;N1Rn@>I9_`c1m~}wK@AcD!;qy{~1nkSSPAm6*Vg$V%v3d(+gCCjiJ_37)cE#TNM} z2Q^r>!yXhw6~#m94;2v&ddIZNv;}W|n_)jhGpa84>v*I)C`Wgt|Db*zt+9tOGr|QS zyJgRR0*G34OYF;YGc8ZQlNK~5X*%94iBH*rNDzdn^ur(Sq7@5w_zAY!R8UGLV&U3P ze4hL9I%L}5Z`<#*Of3|~F_#8bsmmpa=3>cU&2#Spd=9PN9=TF&dM}PzkZr8=UF7*k z6~P1fwb_7)NBs{IpTdz-ASDTZ=$;Ob*jWm|I?~tGV>Oh5U+u+V% zhQk(aR)1PerL1f8#7i6*T}+~qJPD9fJ}8fqo)Q7E#=7?}_{!(i-S`p^0swJS+JDAY zm{gdbUARZ%3tj3nrh239N&(@<2%@>T{{_oa%cYGJeSyA)#X4_tF=+P5_bJM1IzQWR zz=2(gD+y2-6_#qS6Y0=id}v4bSu8(sjg}!cJ;w!$(w-Rc53A}<{`2?7$zUsrhdlVN& zX&OgeMql%cih@OqI`p1yvYeccg<((JeXA6R$t?WeX{6$`-`EBznoVn>P#cz~PT@2= zhZf`}{ipHyzB9Rid?i|0w+19(0VTrBdJriPY{;4Y2J5Cs^L*BS^Fu|bzxW~Tw!}Dj z8QfY!7wt$`2;rSP_2wg4A8>VUqlB$>*xoB-hz4~Tp87Y_Mpz$;b2_dqQ6Kq?#nTPTF>Kn09_K1lWpX124gmw$l^3pe7VuBKsVW*GTIKc-di|bBlReF;PwX_B` zzFq43HH08nffgY6_W*%L2mN+=Tqb7C)-p0{_6rSbp!=GBP-}@0&EW;vgqa>vup9JB z`ev+Naj zRZQE4VUPwi-AF1WNhGQhV$kMU_R!7=CR@5+B4yV;3q_tMaJ|3**DN4K@+YLLfbQA% zI~w>-_y5y(ruO+%5b_&HDb;1o<$I}z^U1Pq$hGF_h4S|8*)z6w_g@f=us%6Uo0+wF zm2Yf4Ej5=cfAp9Qhi^Ndu4lB@wyRq9lQk|bD{}$il)yN32Ar&fBkB_#@{`bWx}>}= zQ#>Kb zZ$L%d6G3}Ly4ZA?$A>GqhPs$ucPVkP3^)5d8%E-Ppw_?io^-E*4Y7Y)slRh9fBEdc zB%y!#?Ei5T8Zg3)PFx2(5I)dlBTkS>-1-Q$HV&J|+pz-CCjsJX@H}b75}Bx+9`# zOlq=(`2oTgvcwEoQy|w2LQupXxy0@v-ww5uO$5f;l+9zjqeoYi25yZx46H`GMX*zy z9-h8Rb9yQ_K6+dX)q!elSi2RKu=1Esp9&{_x_jd| zJ10no7j@nS1N}X7=+hjY9`drdtS%PUWm&(L&ee3)9{cwqDrzUQ5GXZFo{9{m^Juy$ zLEIvv;vRc~BDtw3LJVB3_UTvrUQ4W2`z89Q(~03*L&(@5xtndD7YL<>*AVyv&j<|1 z%S_Kdsv8AmGJAYcZ8BhD$)vc+BO`)SBMYmnayKgNs+ z!^e9ywX<21w2)LytrxFba-i$gWw9Zpoluax8jaIF*8#ir5hwT=X3MK>u#3cKSXQW# zXLS0q5c^P2!58XH_NdW*x01-U%e{KT0*WJOqeX3%?>>%!W&Z~3!12Ta_cdDwyM-aQ zlUef$_`p}sV}Pa;nJdy>=R`49zuL}1ifXG%SRPpf8AmqOVTZLQAx6dcAa%N-47-|~L4AJ!kr9D% zFS@^!$*+ZTSQ(ynIc!9I?zF~FAKksR$}+@lOuBA_2r^x)flU*reGedu2ds9~$Msh^ z*0WbpErYEve0kc8#8QKnYcj8ZnnL`@>-8LN62LbM)GURaBjGra#TzF6Ao&kFS3SI! zOS6e1Lm4Ov{BuF;js66KGiT_^i^x5(4bjGgE$%j;Qf8^E>A(W=hcXuIHdB3TsMhh0 z{`Uf_t6|+eu~~j9M90nXouLz-#oKsDA~(#!DfYnnZub{VW^dtPPU|+d1ZslzFVEX* z@rB3uW{xa3KP%N%=H%pPo!D=oUu0rezcKja^ZmaUJILlZ8%9SuBWE-qm#Vur1kTp0yyU zs^faDM4QgL0f|h8cMJorvm!M}5JMn3IWs4e#p2SZVT@K<-jwRmF_+O^Hzdo^4K*r<*jY&r?YZVf$6Hya^Ng!bSH0x!|c1~rvTP6RN6v${2#E-&VWtxU>U z3~RbA_~BMO;qibKqGS!vdll$*orCn69=^H~{tbI0J2PA1D5CUUmQJA927IiMJC#Ul z)s*6J@W4}Ra*3faq)&TUy@w^e;b$8Q;PMRN;wlcm>lWxR;-EJz={oi-1Ca+j3V7;Y z8!(X!kZpzK820_{=W;3j@x#1n zkUtyU8cbEv?#)Se&nLV1IF>4w%H|l~sx$(oO2wBbG@JW)bd8cgjqQl-Dc%=jOZv0W z-EV_^=zr1O{N=6~{g&!TKOWK)$0hOnDw3-|wj|8j-D)h1fvy?2* zEg@9wk6fT#5qVLlD)G&jTds4@A!pA#lJDiI*)^-|Kb66)n4GIGoxpPUfUMZb=M=dy4d_H5X4FSyIbTj*-B4mUS4 zPg-+ZGyozpT#)pm&1X!$awvN;cR_7qovKXzM*#? ziC9*P`TpuNI=`fVhl}dpSv+Q!-4JuUPo_qtd4_x(2lD?7raM zd>9iC3lAq^i;f#B@I#+7CQz8wi$1bZo>Ic{MM`Q_biX@_y>oB931D*^k}5B&I1r!W z5SkVP)Hi}5%%{B^!Ltiv#$BUfl#F!$crffE8Jah#F6R=aJM(Ah<?}sJ*_#B|BQ_5@~!L}l>q$YoKf3;fV#tFM|u{Ky#E{Z@cQ^wll zvEwX!7F;LgA1%O{RT(>Lhf$2A78d9#Ry~;t!s+E~2ZEO!p}-Cr-M$Xk=@m zbm`{DGsi?KyDh2BNMEN_kus1dUfx<+BCtwUXTzmV2k&>2T=s(vGfg}_tfjAYWdk-hmGbnRtsUN@D|z|h zn2lsEN22ZVaV($}%UmT>Rem6Ok`HW|Ohsj(9&S~lnc)#b$tc`Pe;nHvfyzcuwZCim za43J{pl{Ut)D>v$rDT<0)8*gBU5aVtRYh$?KeDqZ>aX^6 z40u(Jj0qn?JBm)=_jDTW98Tyg0+INp2>_1@J62oS8dqmU-^K1%soCGa4{uIgYYbe= zM3P>r{DiA^s5_7+YRSIbF@6p3s+ZaFOzMi#85^+ZM<^e`JQ|D-qv)pw`!-dKbUcp@ za}W<&+NyKx`HXAST|L+hzM0(5A4*3>UxuSMbT3gWJUrZBCdEHlLme*`Z}Y7CF!5pu zTW3lQln|lZ|EJ5|r?tY>kD~?-?7lF3l?@bq_azF3KQ)ue^9AxUZL{VS`+%}P+B`c{ zT1IAddgk?5{if&|b_qT>CtDvJ``JK+1wQj+v7kaiWzL*p{_qef$sD*$nc^If5G+K} z2HL+OqkdxrP!;r{C|D^uacENalBrF{yE0SYXS`RQgQ@2Pb6OXdqcvfdf}c*vPRE%< zR$->uT2`ON!K%ZD@BH~k8|evF!%`A(CX`@ljfJroq|yw4vV#k084cVzAv8xLqB{H#_^!l`(d8cB0vE z6?~ums!z|PmOcGgnJ6HBl0?-j z0uOwz^R%1!T`hCbmv{o3jEmf}=h1KjlB}$$0DHa0Wi+OPdAp)| z2m7@H21phgi<jdcMj`s(L@@b-;FNsRn-pGIsxz41@*m- zul9pD$|@^Ln($MgnfU%Z!$PL<_8VSx06YZgmtunQLR4d}ZT^m=#?s_}C6F=xR{|OG zFSTGO1mZ*$bkCn3z=#@(71XG?-&k8Q%_<$%s;kc09}KVt!<>t8DtRlmslR%Ab z7*~A|5D|N=;STRODRwGAAdRF@CD5Hr7jFJ}x|P9kqCeO_A#WuV4}^%Q)Rd(NFlnXy zQDmTAKlOz|t9#rwF&F1)8vnQ?vP50#@ZVaCkAG<`VrD;<4GlcJ5W$Ab>q_Y}i-3|j z2*kP_**?DJyThz~9YncA84F~~Uc&=%BoSQ--JsbBKhWCUYY<~p$P=#x@n`XAkSLIV z9;hCiP+;2*DUYAp$6LT?Mm!ZB5;3PBXEUYH*&gJpBx2<-+v#Fq1jS_OoHv>-miax56Ef@FVf& z|CMMC|DQxNe5k%V;GT7)6Vt*s*m3UEpXZhNrSI7#my0gzz3oW+V7Lqg*_&^`W($ZD zW>d50c9csUCRJY_mK-?#&zz(a<*yZ^)y`=HB!ZweuH{}Zn3@Z_i)Z}SMBs64+)Zok z+8c?12`o#=_zL`GgB%NfHG>%;Ry4t`LJSLb4x$<&A}8Vml323_`rHIh!+DzedIGpl z#DWe2and!dbrNesud&v0YK9ACVjhhu3R6Fo=!aG}v;(rZy{gAsG}Fw0%?77hnB5iZ zilve}9j6Dw5Gbb+<-z~M)9>e6rzfxF7g?2!we83hWQ=2!NJU&WcQ=b0SRPt% z&wMR^PV+e|Z}DDz5}^$gF0it_4NIp$etaIQFNXY!7Q?1L3jq>BkgWtT{|HXP3YR2$bEt9t->}Xv{=cx! zuYL>3_*ZWq2M9j4;c}U`=W9`JR~%Q)0L3W)tZxPKoH|Zwla;`lbARNp)W+su#!pzn zXKY8-J1>PQT^lsyT)r*d3M+U|mq$NSA>3lwoQkrW%0klPFF^(_sehjz8plbGx5qOk z$C9#00F=v*hD>XVDNBsJr~admBwYGOA!$8O+ys`N9Xd-h7{W?(vgDQ_sZ6 z8>kkuvV?1BkcH+u*#K{{LfPD_`zB3C7uoK1Iyu6fjye0hv(v@hJM2yo znwOm26p=6pl3WJWro+1_p-OO0p{DW>Asj;Ls`ccH2|SMK15t|O)L375h`1L-RoR&B z>S_J_o!(LK`>Uz?TUpoqJ2<(qHSu&wHgXErq&g_C)Ja+h?LNu;co<4(e%`KZ6>Jds z8g;a6G~kt2YrUQOyNhq7j2!pT`psdL zL24F+%Ek+>8zQJPtW}r+q55f)R_Bb z>NTh*cUtZo9@E^e`Pr=PSmO(ww{*V2e*OfM#MX1XbImK_EtlBf$#a16Z*_^$6&}(3 ze%07G=6y9Y2C^4GCDI0b^FG0Tk3a z(oa@jh>>xyiP~n$dvnwYU3=udeMMygJ*#`<9fU*<4QbOMJSK8c74Bk@g&O99w{S*+ z36kP?((Lf07QBN}CeYV4Miy~pNDsT6S6@-zG`xntw!nA}2640>`X7j(vZ?gG-au{D zOZ2`10m}emtyr{CobN*S$-N%2s2jAj0BH|U5AVJ%Ifz;2BR@9mZLf+2^oCxk_x0>@ zV~PveM?@HYsas255P?o*-WDI!7SK7FIPjI)E_M0h$}MNz(o*G9pxWLQ%VTE}puxDP zM)Q1LXYAkS_`YDPcKz!6mPG0r+cCG=R7Uko08+1!;XFrynv8Mt!x5h zEk1#=-R#S|`4c_?*|fYKQirtK!)%)Xhly|ZUVEe+jOIR{k%mqRnBIP~auWM`_SB(k za(vD{0P&NLKCe}8s*JU{%%qiXjweda?&|iSh(EZ6?sQCm=dStA;7asy$4?1nY<^zgVI$w3W zq8&?<5+{H@d7vHWko@UdL0y=yD4QI}_kb!AFSCq=w59Rw*Ovob4gCHqLT*r
i` zWM}Mdmy0aa^79i>OW24roZ>+gx>G3s9h_BjPHhoZ_` z2v%AfjIs@xH-xecbdYV)wP@Rc5x+#*R#SgRuCQ8}vuYVssQ-(#w~WfF>%z7#Lb?=? z4ukFnX%Uo0y1S&i8v$vhkre6f?v(D52I&ULi+C6J?UV2G{`kiD{xF1r7kjV0*IIL) z^El_~rZ*qo-<{9*P?_6BG6;vVU55zL>NVeKR3FT7(%w%W4t`G(9hq1!qK3L{nV+ez zNrtnTjceN46306h0aF~}luY+}7)NvF>BaZ&{3mB3Y{>4#8l&HmmS~r(y$U>9F2&V1 z3{52-)OqRdFfq?pa%MUTAp$bf<`rtl!}ta&&2olAj^^G>V&9|U6v9Pa{rUoxeuT4rQLK%DSkp@hczwqfv6?v&j}#dT7bdW-ihVqmwH!GAh8Z$YK9#XpQ&{zj!*C-? z9-ZL4!j5S~{p&L}mTxy{gsR%1%o__@+?bp7vA%{It+=}MgbWLx-=o=xX1h(#^odS|ib}63+`XRL|>e4RLu@ps)J~S&>49)+|RUcsmzMp(!j%WSH#I0qaYx3Jv8dR1vow>$Mw-kvZ1hK1mu1UN9wTYb}o|gT%G4>{jI!VGL!-%W&1SfcY}`t8H*D3+RDvZaRH*+lHSviLMoV!gSvH5 zf%T_eSYoo)n8|6pj}uUY^5nYLKQL2q4At>1m4AQa{_}zy2c^w+5k{*C1DlH~y+C%+ zAe%Ze%U7q@;AHBhM~#QL<4@3EH=j#c(pIiek=#a*nd!{xCTYGh2;B+B8Jv7L0EST=B6Vv$H3se zNS|HF*6BFzL^{pSFbTzVJsl1+RX+phDwe8mo(@UM*boQJ-PfXfp+`n-ZG zW~|I2a5M4fc`l(GN;9eJSfwa@ogkXPZxE<9+%5nCazg`1}%lf9e` z8|7D=xF*Ap`)lipY~4-f4?l9|lXbYW#EotOp_+-ir;w4UDTGZ5D!yqx{Cg-jWw(41 zek7MFmR6lgi3-)^ho#K<{4eXdk6yfZfq(6hyXB}QA{I*QAu|T`#iJ^FOV9yO5iouYTlGEBi|T0&O0!kQw9J@^6U0n*J?}-8Yq~k}_^F zGJs(xux!$Lg=Zq+JUV>{n(~cx>g`?QH`X+ox_+5u;cPTZAW>{Wnnx$M;es4897fR| z>V76M-EQg~3ZkxEoGA03HY+Et(T64!QB!6tb$xud`!=BGn+1cF3~ScTZ{-*F`i}kUu5I?LL`TOF>r>Cw>;7C z-RGO{g-?Y(n;D){yS-#V+J!2EbJEk|ucuuT@*vVK24}X*(G!j@o0fTagF&GlBUxvj zGuAxuX{rgTj!b=#5LEd5oa53!bLpDD)P!rqnchj&ka7K+YOi9ppYVBg`-#MrD)=+DU}|=`ONf)`6(XVXCEB$4W@ovt_2HUUpAoflAxu%5Upl$NPLR z@}R)Nz?o)_n`l8QTut(|bjybj%bP|%`YAyLCcE|ITI=xD?&!6C6I z@5|($ao%Q&|9lrQJNPU3Ahg`q4Ry{3rF7|d8pcVsA;6MY$yzrlCks8B=Q?ssO|beG zDdPT9V;<_*fbuQ=i0B5{qzJSX9UJC;0sCjuc0^9-l0=&vN$mQEW!h?hN;braNnxG5Du?5l*#JsB+`c zc7W@?4x>jOr-6A5bAkTxRCd&kMaV1LHH~x4F0=QA_h*GHKC>nnQz=hwX?`HRNp3@o zSR2#s&yun~qnoViqRla#%l8iStgPC@wpOB>KH0h#!l9#6yZ^rMWY@c#KdG2__>QI+ z!!kOw@|I>4DaF(7QbVJZ+n!uzeN}1f3_jSviCXvm&@PppN$ARJw%dj@sN^!2DVB&<#YGg z$|=)6Mi%vvPcLtN!T~jJHJ=$B#onY^-ada>7fEG92U!~y+rFRaWeIB$#2A2lNkn3X zQ?$tUxfbQf*T|1v#l}Z(y)7nEn9hbgYW0PyiSjoqK>12S7?anR>4He*|LM(hAolY| zzN7ZDp+;MGqhK#FlRFI}RC|TJu-kN9dEuuZOb&Zg#phtX(>i_3%*5ueL&Ph_t0VwV z<-djGY>37bau|YX(9^?wHi!eP**Bku1g^3G zk|V4~w1Ctt^P~NzMSBooFLWj?MExMtzz#i!rqM^!xbZPh!9xB)Pe4CTQ6%6MOlk>V zZed|(XAO3_|I2_^PFTsC0DYsB;jSI=8pYVPr7L(_fv?Sx4vwJOtXyC1nYqM2-tUN+L;BGkaovvn{54G1gS;&sNfGuf*Q=lK0Z?dhSD6Nro&RboZCZ_XFtE zdGPrCr`Rh?s?h$UMZH8Q_=v39cU}%!vBjR2&{qY}%8NUi5IpThauTeadz}_~%k7hA zZuq9vM;XU^-t{^j4u@Z;LozP67GIQkWAncGZ8kkwxo+qu-guNW5wuUyT_*Rc;gx76 zcM9XeSsV#NhMT{-$M5m)wQz@0&mqU%H7D2K<*464^v6S`X*!BkwZ0G^?51Q?E7M)y zmev2dfQnicDO)TprD}XV`3>hYGdr0$xs<)c9^4Th6#A3W9-UXC4rix$##ScwElR*^ zi@C{a@+-AVy=wL2y|f|ue~i#)vsu1n=py2po`H;&(wrg|21+a1@(3?hGttmCk5;_Q zHNTfWF|c8K*;^RN)V)WxjOTQ9w0#MgHfMwgjR#4~6^9mS=cguxOEOwxug<#)9zRW0 z>xV4xR(wc=>6Qr-o=M}!;Bc=fNrx6+F0+%^UHVQQZEeY>S5&Q8g9Kr6 zw``<|gSncOp1*;!1qAgD>&-7c_ya4hBXTRqoR%1}xBw(|m}*Q*Btay+rc{uiYSzTD#8&Kt_nNU(V_J;x6}6?_&-#}g zmxXze@p_@14NvsLW6`us9p!pwlTt;dCF2P`$9yqTv;A!@-(MojZ#`GEo@*t=M~*5z zGZVhs>gtWl@;t1>isCs7jSxoMshQX`WQo3(*r5Z4xObmvo)wke0Cxsw)3Pph)Sw%svBXi0lC8;$$TlYkm&(OA)=E$O zWFsc1O{2_(RxuILv_3Rv^e56J8%v5!k2fzC{-7pjF&!iL8>2^>a0Av! zWveQ5BlQ)y#gA1+#|&r8TYuQ89xnSi^!x*MXP7pzWz^AMqX&M(#GhS=U?K>)*2?C_FgF#{cEde2MJ81_gnkOgx zMJ`cCvEIWQIkl#on6$KiLo$@giwb^hFx`_bN4?y*8gnyoIM%_xBh#=+K~$~4S#90B z?zbC)+C3ePf9aB*ekr!5+hQ7&)#F#TYamyow6grozZ{we2K#P%9`9?~=%(g(M%74- z(dq)rq1z1mk;F4gw6oX=70~77Gr7W>um!PPMOt=Fc0amxW)D|Y`IP19qbt{HzH_#E zSKicCu7_5kx^t#*#!+M?hxw7Wi7*~I*ULK&ZP!f7Gc~imwRPq%*>}BtQT?YJl~wOb z9vxs?<)$jN*~~$-`|(o0U_v$ps6wCgtY^}Gp;H_0qo|d{f_x>GE)yR>V?Jt!_%-Wp z1Wyr?c3Xk|%w&N~nErI@BQqmNu@bSB*IGy$T!ULRXC3bSp-Y^%PwA8kh0G<*#W+BY zrLg<8kt29%+dFYV*gdR)0Di&* z3847CG)T(8@g{|b8b(%x)g0;SB<>nUU~YFNP?f%@rU&N{YWQCyzBTBL>%0{Y25UdIYENMuGEn-U-1-O!&8qw0tD<--WqneIuD zc!HbMmV1e{j2X=_Eyk$IWjTg7Hs-Cf+g8hXP9>Ylf-?GT0_j&34sklOMM>@2B_}E< z84Z1Y;32wPOD+QBviZnJ>F~!#%kSt@W^}K)MC1fZ#CCNwzRjyP!l_kcb3_RY!=y{8 z1wJ(ibr5_KvaH#`^U=R9PZ(^~*NeOUc|9L{$tXt+cElqLKHOEj1Sy;50AW<4_b{I;*}XCG z5S@&;kwh*wGb`(Jg>?e(lbxy^*L~DRt|oE4Tw0|^T`Es5mAiimPmIlYM62B3^kw`G zEpDkzS1*YAD6~VDS0Xk2j~DO8OAutQATc*8>C!;)W+1J_bw?-Tp!*sTfk;AAa!N=V zPDP}5M4)>a1oKskrO9Z?)WWYI-v~R<1vkKBv?(VVf&OC7YL?bm8~fz&H}gD4tIf2} zM%%{Ag}u5AH#g3r=)7^lCep!ZMe1pq(QCB=b{dpLwD#ku7v?h$%Zo;WS@@YJ9k_@i z=xs0dr&;OiU|RhzL0@8sF0m24cg5+z%lP7i85Sz)zEN+V3kNa+i3T09TwLaJt!qzq z?Gn6thQzb|(3Q8^j;3#=jQaR@x;ve*CEc)(SIMJ8NS92(cSw!6Gi|1t0#v%G4|OuT&hndcU+Yc2BG%i>td;$-37EAfw$grez0ufZrN7TEnWMxx9ssN|6CJ+!BLMlumwCZ*<_6 z*zTK^XDUOfJgDHgf7LOQkr074JB4}h_z98MX~Q2NFrTa=p+Q*-3~_hfE+f{tA2Kja z5V5w1k6Mhi?w(Ofhd7+Qs!M{eqBp21%gv|TIoru(_K0rwTWv_ddEs?+4wif4fXIj0 zQgIdwDupk93}E{y`|QV{gA@QHd#(ICVj%Ogf;&yeJkZV_8u=@Uj#>Ym41F_iyg<-; zwInkO;kUJYO?VX0R2IyrGb~BTm(q~J%D+*zW`N{@`#F(LI%%WH&rrC>zP;jQzvx-A z#Md|6&b*KBzO2h$_LOFt7w?Wyko=Ad*{R+s+)xH3%w8`Un?`sC%&d?f9=$OqBH zh-)eKp!6WC@JlSD^s6jmhvvmZM|q80GO)|RI>VLy#(Xu}T*Ggmni9mjQv2&ulh$=2 zGR?7{{N{=bH)2Y{QTaI*8UhOlZGZF5tayOr?#=VZ@czOvF)^(t#T18b#xv$V_9HaX zV{p!JHeLtx)XjBrVccM$@7lv#Amyr+XUyq%msgV697T@w9G5~jsQMYcVTuDIC37$_ zf>xSr_C?A`<<4vqJgy@y);VfaeQReTa#$m0yNq2biSC-?1`N52Aa#oJzQB%{F1t@& zlEN}~L<%JdvWQZFAK<>~+7zAC9eEm*%bH=^ll-KNUW|Vj!8V~>zGTXN)hakB;;of- z@nI6entGW`yR^En>7ahLX`7=&XUALgN^5>^ZvW^mu}Gar)S#PnI2P<#Tio-$U;wFc zP(B_6jr`Wf9?&w@O`IJ9N5rNnSz3ugyoCqcPjYFSGX2bmJUCF)(QWfIVkgM z`R{1yT+$sl)6tFkuwhg79-LI4$wh!}5^5ES0pz#)kRvJleZ)D6SmwWJ7x{nUY^!Dd zg>ips_Ya%c-+3?i`fsT(@`&+0Y-yvpugL@)SQXXX@8A1H3p;$|9s7Bd#w`dVTSL`* z=gsG<0kci~QF93PQ@!M?&}TR>625)c5)*qRUs8Pk(KRm;yK=S#^6?c+pbDDl!Ym2| ze*~k|IXv-+G~Z=^`Me+41k@H1C1;*_=d0raS^UJke;|rHdN9qx>l(qcw8ikPA*COs z1~x!P6nw$1``{UiB*|2dLs+^Hz{jLQMQ$VW2k%O{o`#6k>*3w@VYi~R3 z(V1H;yF+w}gm#CNYsEAb#s7eZPYmNwK(qvcZbBl$wpcs-Wy)?Cz zL*SWyl_t#P%C$YJ^50@PYrV-%RR4o1m2KT*Ed98=^-73H(DhvEo5)pXEqKqv_-drG ziN@r9jUgQO^~6uLGUWM?yyz^pwmmRxRRMW(YD+ck4xR8VOR^0A=piC(Tn1X$mQQf( z5hC>1ntFVDpRG-SZHPc(`g5vs`PhNvhi7DnXA*w_(Nfo}LoDo9t*leVoQwmPNipa4y`PZ=G{NFoKti#g2EeJ{FD(Z~e);0uIhGvTn`v*3nJ^cl0-4nf~ z$=JVHNK2&@=jWpwyypUfu+&mg;66^pve@4?Ml4D6S6FQ0nM<3|=N%5jJ3^Ow_*2o! zi9|ptS~kS`f;7iITw-F)@LStc+1agx6s)DpCqI$=6zK2Yqfo2#^*^k~d3kECbbc59 zQO_Vo0rQsNBS0It|_c^b>T5^$tjsScziQ8r2YpRe8qvQU-Gqd9RR zkf6m7{5?tP-F^WXpN7f{jEfZ^W3 zr27rB48rK+gLmG>16Rl{_59+S4V~DvxSxl@#5ZO0T_{orutUHLW#ma=c0TO`&X0_< z9-K>BxoM}#62a->xl)mbb#N+M;-@6vNs&Xmr1Z@rgX}+bVmQHC?t|&q*z@8$5A`|Y z#!3v=KU9|c25#7cQ<{qL8T&0E8*eRsmIV#DQn#*iVmoI*=+KeYypb#JWc zwGT#b);=dx=aP*c(b4$^Tle?a@c3x>Lnf(w^p9X+&03(V_ouRsz=11A%X}xZE2fyXc<$55G#wDxXLK%`lEh8%%IzB%B{7WLx zsYJ1*GLW+~-J8~%c4 z_uV&tK8&V4F%>KpQL1G^yi=TPed;f_nvr#){Q^1@#GGrL=}`1xt^s!qm*3b>K@XnU z@H{H3{N;%+ssU z&)GF8YWSl98ZuIqjfRp$CMkYO)6}W-TOnTDRqT&IN)6A!z1efwZ68@4@%vb`2-&Lc zzN^#n;>@BA$HvxD?ki^y6Ki%|m+$IOufRDng7 zvfq+peOm8HG6o_&IBEl^H_v}FSDLO9Dh<;nDP{6G5$Ae~F;c&S?I3hI{AGL9Iq%8e zw!)fi7gw^Qlz+53*<}|pr%AOrdh_ntr&$@eKtV{LiqD8=>)mF%A2XRAY`|#DO}$1| zA0H2c7zp$rUvqri`^x?WD#?*p-NL77BxS5dA=;hTjk0no010w`gX|3~##bN|}J z8NazU$KDbcU&o}T>^GHC#L6{EQ~;89f<7e)aMgNV!GKYB@&mmW5zBpjg-0x z9Oi-Dmf=W4JCPge#qV3db&X8!rfX&ydht&q)$}l&>eMJv?Rw+Y7e1{h=`}&?&k6%q z%01(SwdPF}R5q7TmGYFR|NYe4ood)?!BWxM7{|Nh#vzRe zdtQuEcccEJeab}4RdbYyeV!$TTudLLlou5bRdfV3&n^|gA zR4uu3)SY_MrQla)O!f_|B%WR0dX0MbFlFF;n6w5Fvzp+e0{L479OuK>7G7BLL-go z24;OLq8hw3yp>~P>jX+%)nCA3IUg=CXf^z5d|AC`D(hx^P zL~5V)RkpNjg;&vG(}0)gq!spD^v3qg<&&l~N&LFnyh6jfkmm~642t36v|K`sJI%CU zT=RwptA?1~5NzaQaC;=yZF^G5WR+Xi3D3g?4yMONP0% z=Sih8#;K(aPApu(a8g}n8(clX`u+VU{9JW1eYDCIwW5M3qJBxRt8wODrsircXDu=; zWeF)*Vu-}=ak{EiW-7eA_523Dzul{Gg#;nRgmk$4K&{-FF(DjM?Zmeu~T;f7+( zJ%PExF*pcX=|W(56S`>dI{x*y&y<7c!<;`5zHe3RpqKqtEH09&e`2CL@-WEE?TglMa(u>&yM-6H%f?Xc0`KhRbi3G-i?qZN(OaFua`mugV@5xz zY}P)N&oocJO7}X}8NIa=%qfyyvT){P0=r1*tsMxZiGXeIb^)s6He%x6p6M)^jr1-O>0qdOmB^Wtz*5N%6XaqX8T zSnjy8b2>`mb_Hf-W;2%-le8f|U=WJC?zNHR)!IY4D2!d{aoe!$U(oGxp4&#U(y`{q zzyVE$Kr*ZE`_NvLhQ{jj%g!Rc``YzrGB^{TrCYpZLh))JrvtPKW*o9b@&An( z1?BuFX0*TfpO{f;uIK$FS7EZz^_DrzrmkiKGHX9y@%yXAYAH)g#?H+goTUx=bjmJvj&(3dE!(tmq zC0q^PBUX}0HBB3T+quz?|F*8x3MFCwo#^rGIjt$eAyruD#X78M+5E2%^Ug=4a|4?8 z#d92#uA;GzaRZszo@7U9=tAV58}kHg@bIsmfIhSh%JS{>VXBLxw&-dxkvQE&8ZXj5ca~ie;+%)PEqD`plNxU))1p=?w2#39Q+i zqaBspcT-l{C(>^y)T99Hsg1*{!?Sqc%oSp4Ztf4B2`D#)-Fp&a8(`BcbP>^iD%J(- zX!Oq}Yk8~QZo3J;FJB%5PNLFuU+pjK3G33jZ1=rbv!SKxtJ4pU84Pj1YqD+jjL{T4 z+Bdf&?%43xU;pmrX{U8Aot<~O*ojQ#K6gmiJ6saBPy4Waz$9$=y2lXAm!?uTlSj*@ z=nS}iI(x~?3X8_0|2pidDB)kyHrz>Lfs;GsmQabEX_GT=L* zQxSNn(|(>TuMMvKFX?vDucyQF`9=3HC~Ji)-_nDMyP-3VC{3*qt?ejNAP^_#ORQn|yq}O;bH$R3h)P^TA%HY+U-`aa~Fin)fJ&PoX&w1POqRVWKkyE(T zi@oOP%@?)eUh0uS1LD58is?%Q1tC+6oi|q#`+a>9YDp4Ya$-s#V?y$4OV3<=HN6Rw zesyKM-E8cSknfm>XAC9@61=J#rFGeC#9i!6z6Q{3DUaqc+q@QslrO!|+OL}(N2MpK z$@I+(n-5K(O^37hLmel`8Rjo3ThWqbzeLg{UW+pSc*?Yj#~31Bysl0ynOFFJFESGH@ z_1U`5g}$kIFKKeS3tdNmpSlS3EP!@4PkOEf#4~-myB}=!KgoyE&Q)smarTIEp%_fP zmKnGHYWOs4CoK=J&Q3VYw|BQdhy|9LQSTf04 zXCcKffPS)j6QCOUqN0D@g7n*{@KDX`y{O!xNeMmk8q+S+H0^|%JCz1eEAkf$Ri^Hv z79myfE|RVpKf*jsQ;mlw@4u=Y1z%%V)lS^QRFq~ew%#N*$LBoz#QhzUxF@SAPO@%e zM_5mae*L=C-m+figNu7(XKD?QW9K1M-g~;%SnFh|!X}1suQdua1#T5tqAw zO%_k+U9W`z$5qzJ;sjR@ zA`4CyNg?$CMV<=DpN9KJHoN$y zm>5Y4rid3JiMc-LF>*xAM%`gc-(pEcBl{Eu$lsryf9~y*$!4>p{&SSblYchAzYGv^ z|K~U4!hb1`A;Qa6%VnVz!<*mzfD>Gm+_d5zR_`w*4#6m5Exl0j`ICPax=IyNsfJ`* z$M?nUn246GAL-uAQs0vC@&q1V^cYYP4MT8~Xw$vkhwsGg{6cOwg&a69B-C?0qK04) z*TxbnYAB8DjigH{QFifi6TErZE+e@yefsI+bEKelxnh;qr}+pR-nn10*hZTc3uN7y zxN+g1Lv;E@F0wH^NH9-X*)FTfuuBxyMQm63L>dw2HTR7ZCixg3_>v=;oVkk@=E zSYIqL-CXUNGU@r#{T`Yf>CF>dA!?f6t)%$NIComs9u{XiE|4bw>f-^K zx{d=^q?g-E<-%gDlKPUT$F1T;F6Xi){}{t}RTct@sLdB`E;9+y8k3y3AN^sO;kbWe zd^`-acaxdUiuQ_3!$(vZ@?9deHf|@>gh?|>K!d`{$gKOffnv>Uol)CG*D!5_k}p&ij?$V?kqfA&i8~;8@`YqVfL&D4VwVZ=fvh8CwPRbH?nT2Yru*#jQ@2^@T|3{cLJ787UXg<;bJ8MBH&JfQ*IkcfW`(;6*JE1hf?|MGQW)# zxs#3pT>tyf#^Rw3@t$uO;EzRtT^VEh#9=v%Z{F7)&NV10Li1**iZ6mhF!0)B$LMvK zV$1X&vW<$&7z+$pv-B|K3T-UC4pk%e_9=y=t0C>U$8I8kpQ5o8svFU;mwM+YRld%X z@sx0XTcA%1GiN8{F5wOZZF~W~;L?0zxUd%8U%P_uknSbvDf3kH@Av)P=VvGF9ILOv zAQWCsq9nKvn$Ao??nwkSRWx~^&&F4pQtvmWa*Ip;8%#Ut8{^=7uUMSopSYg}GfYJK zVfirzvVC92a*-+@F0JTjpO}Ps7OO$C=t9@HspZF=#Dt-WwH6YjFk?m{z>opu#?+-4 zW}{|M^|$~RluK#1FE{(j1WEn%vtXr}IJ*$4M}HwlfJj`++M#XTu8m{W@9Go5rhq{F zSkqbtTuO`sCXkLd#`Uh$+4WoH}t*p^d!s;}PTVcB>|t+DppjysA`6;pA_ zrn{BEpn{;)BvWn1&(8hv|02^ekyl2RvCi?&b+;;>0Sg332_9xkDQOMmdhl%qNgOF^ zcX1@B7gA&~Xb{V1O#*Um>HgwS?|uRU;BDu!`X~fjhcC@Xg4r)uRY=Vo8Mw*6-Dq1y%xg)Ns*i(3h(`RtT;jHU|dLDiLdT@{fMR7JgWN zGT&8!W@^XYg|JPayBq)m@7Xqh9n8T8;JZr?0W=*_1eLyTW#1mt#?pQW(5QA(qY@fH zk)w^Z39n~9-ha~<>!hZZT$-V$0jEgI`7U2EzY;t3`r0jq!Nl4H%$`>F0vlWmCz`$$jTK8y(DH#d=m%lwVj^Fg|BQN9Ecyp9(TdoBUi;-bY{{zlYI!C@>{d4hU z6$MZNviGF{jU^o-+cAYeW9g^-Ig3GoSRUoe+sHCLEv}@TUDed{#>r@vTKh9zJYXs6 z#iuzf=|733EFd zRVSBsMD$rrmmTN`E)gf&+|bMuPPAf!4=z?bQ1I_3g!II416a8bqXnhfCk$RMxqI>y z{L0+*$-0yKGOuuwZvVkN;~3~^R#voPn5qU^V(wss^~{6+!g~5NsgtR@1_DT=-2#qux0XP5qqRJTPn^) zC-ecNLl^%8(uwp^SSGH1W}NSI$(zn+xf{GpK4m9su@`>8@e2RO@uF0+urHQ!r}I~C z8@SBpD`MYPvkscC;s@gxDvZxqz9CQvM2$c_X--d$E_~k=^q2|hX}`3?$ybSlAI|eO zw_T6GRCwt!ruy1Iyc$G5U=rB1A6|5TnmTa%od2xJI+||w)hLVI%bgu?aNv8_jfUcoqXbV#iwxbB##SzKa1+*r5D*sWo>fSCP2SKU)!*m?4d02&&i#_(=fsUJ<-s z^4@5=QfqmU9r%mX(O|V9xRI-Mhyz2C;G1&mjI+V^BdrHQuWPPUC+y^h>#C|xsio^Q z?4h$ZKyN>mmzTeU5p>a6#DqUYH1hK<1I?&2 zz#mA%r%^^<^ro4?F$zG4DxYqvTW#Rpt;Q)AeqpfbM9z7c7@ufF$J^&9!LCkZ5bdW_ z)~3kastkF7`2s2ILx>o5B^Pupe&72g0*5WdG2->EAv;;0V2c*BZ~(puWs(?4Ci zL`L33Vqy}6;Q~BCLyXnaly~E^lfk_j(cumB<{-Pl34}oJrldLOw)&pQ z`fWB-k5&&EuD)2{KG?OMJM+0xBS2h%mvVsTFl68PZ|E;7a}2T6|K;0eV)0j!k07vu z@KLMji|Iq87xe32ilk1z0SlrZrhOW@hO9bp<-h{Ge%tcXRnd$Z54r}?a^1lf{>OSv z4>aErSXTSw595t0pXobYWL)A5>vB^S#QihvYp_?y)b2idqe<3J)H7<00Wc@ms$j@W zrt-h(4Y3$!&wT9kqRG{n;{oVXGPX9x!*>F;(k-ER`Kp2;$S4#P6eIV(bw!Fw8TyVEr%wMQ{M-otBK&-QIY0b}k%yU6 zfVb=_Q949HdVuW<4F*kszsNqltjQAlC5Wtfw%%;64Dn3&=tGO<@hA|zOK#}UpmrPPkWWun6L9NNx|12L&i9>M?gx}p!ZTP5 z5;vchX1>ZJ)_t0u>b>F$!h4oq_^%BVUhwI-K1j>?60yIgZsG`lZ8nOusa!MIz?+ZK z(hMx|v-Xgv_DWAv|0_$jzP^4y+&^&rcCoBR9zFM*7z=FmnWy3z7Z9P-N(2D+ph3}q z?^dWvJ>cC-zdKB*Q8eq{oZy#pl7xC`)A^?wwc_ab{TqZ{ThtNUd>3{%WI{MvP9RQ?69QV=(lE6lVFU=p1Z7!q~%boZ2+ERljh zRQFdj*-7Nnq>beMhpkq9IOEFDLT9oH@eGS}n~+c7&H z3^V)KALy@)A}JpDKLGh0ztNqsY^^)p#JXX5mne)Qzzir-;mRjFz1b$K4oI<_j%TX1!5!WSP5|(TlaaOD#bepaJEc@ z-9_KpKhcL|pTB(#Xtg)STj6NJeA!@{=nXgXBS}31F)a8x0!sREq@k57iR5;vKGxQO z!1210$XxLDEkh`2&T^#i-c=euJ{cKek1ZIWhr1YBRSSWKh4~pK4H&0?v5#-e9ZIb&;$@RC4b8QGUv;$z+$6h00IU zLW`ie4Ve`PuOFx5H{*e6{n|A( z?{quca%yGy0L#DA!lr*&Qr;v-^9WMJ8uMO=Le1Lt{6AV)!6#A#q~q%H8g&3PUe%2r z7_#3hDN!sqyU&Ahny@HVJF2adt+jXB1=XG_!+)>#U@PPRu-?yo0$nol(F5Dh+eKgd zhcE7w_s8fQVBfyD%DiR%8+Q&o%T(Iv&aQs;m(0mIc)B~Qf!d^!Hkq*npRufr2FG@AsRU~o#3yx z9LpRx8S5IS=f(iwmDt7-wUxPxje(?S?Kwvxe{!RmfN2)j-9Z#;2+#`UgRiw~J$c$d z7J2(zmmlMevHmn#32YIAo%0Iw$QiUlQn#tsfzlz)p!A)Trc!@ptF=iXNW=$^`=C^{3 zSi}9c%v+eWKak>p!+l9;v!;>mX0T(ky8C7#()2PmwzacHXx8qE4YFK^ zU}|nKoLS(EI*Cx&GfSY1YfcN6W!JVe9Shd=mfPnejtNz=5uuYsLyWCA#GOW&yHz+Yd}ZqE2_5kS*9HviD3~0^ln$8RR!_tJ%9-U z2*sG1I`yDW9;n*=ZukxG^u!uhX$L++Znzl%<1^140Vro93jTTTtz#tU4EX)KlAE8O;YlKohPyIw{(jY!sGrtYOp0#UIs@or^kM8*JB
%yZWzx2mbpe+`!FfIbfZ7~@X z_ZCb+@-=m_fy5muUKN*5wAMwn+V2uYeX3#sZ7%8E{hUx&u9-7}C%6+F^}I~v{c1t( z>PcHvr03eg?OOKh{@n<{^~X&Cy;(Wr03C#L?CKWaI38dtXtgPjmK^n0O5P}u(&SJm z^pE^rD>h_{c}~_pGV`MtnwKz!XP=O4FaiI}v!y1wYvzdLe1xR{N z^!(Rk8~L^HIgk2+wrH9HwAj%W*kn5B99-ErgMdyaH+)7XWMll7&h^H_L9{jtG#1uh zyV^{f1u~ z$#>EhgR5=B5r3-P&0*G0WmyuA!+8Pf96E$y4%ScUUq~$jKP{V0o#T4n9 z_tjAwVo*ab(0#S@=d)v_zCi(?Kmn2jqWbUqd#a2(8G>)({(ni85exr|RH>ZJ24sI8 zUNu>(`VmS#NVMWwA#X|4)y`C(Mhh;^?{OZwO=i_EyctOrHs!kMtR;=pAuEJH#g^eM zeKkr96;K@ra>&8DxFsZs1NzjE?#m9s)`}MZr2ArA^77XSKcEY?lypq~ty^ARgjhqw z)p?8fL%3msO@V+hfTu<6H_QrgKwt@QLkpn(0)RFJ0D0eUi^bR^;rF2yy@1Ci@E3UB zxLPjF`{;J`cxT+H>qft~U&hxguyS=0?Cx%{U1|O!CNo)!oS{h{(PsSLFf3I!{tpbx z5`bYz7bV4(%g2{m`%T|q%@Qdahl0=!xZ3=@Fm>U4Wn@vdfV++ACO_A1jQ2cgv=Ync zsq*{c#j0b&&KBK(k(Y3HB-(wL%w>-{eaM_i(-dV^^=oW(%H7fxq6zRxP_l%7-F}*f z?3`&zN8aQ;Hz80BNUi2gwJvyniu*-PGjzsb?%Wz!D#%tEJq-x%rJ2Hf_e0%bP<+{$;X zK@!neu2%LL0FsYG07IID1F&Hg_dH#?DRsLy9*IPKgG*wp3zK#RlxgZ$|L>;+IArDN z2wK({E*{oB&n^BCgKu{3u|cFUm?97QDn2NMU!fMnQFAPn^@w2RqA6RlN*7EH6Lhu& zhsc5x+^xncYl~*56K;c8ow8?+ABEXEn3k6G+?R2Q4avlf8q~iU6YED()^iolb6XXK zjh7d@8&tcO701DAy(YmTiY7mzmBFvLVk#&iDxOXKmR6To8@{r4>8^*pib;w~go$}h zWYNRYw+6d@+|+M;MR;JQ`(Mr>2F~|UJaXS)9%{>5fhX30kYVneU4z=bX zrDxC6g)``yT>~neH(44abr)3j{SF&R`TgEqyz%eLe^Kg|!gT+%_+xmkE*#6&1`6d8g4tmSe~F1j!tWF2B-38?wt#n=8nY~v*^NH*PEXuP{|BjF@j%mMx@mui94&V<*b4R`59{_VqJoC9yqy&9|I}|$a1WV)dLz|QKnEdHR_iaiM+Lm{^JU z-%+LX@RsNG)DUCoeq8NHwhiPHaq-E;>3ClW0)!a7afS)6rwta@xRbx6T15cijh^4w zQdZ;}9nEpVm98jwb)IZlxLq<9d;$KdRZ-PK$Nz^wCC06d;~LX|#35$!=SH%PpiO~> z65XHqhZ;3^BSq}D@`ui)w;UB!?2hW}J{D5!-$l}1iAJ_U%{FTo1Il%i;|4?fUjBiC zcY9xsK7Sg2$!w=Gb9JP8z`3YyYqWdmjBj))mMoeG{{D;b$D3{_B1tSyzd|`pwRUeq zT_@{B^@HMg7Rp@V)owQe%SoX$EwvWSAl|UCTc1+}gyU1<&@BwY->i$KwyNz zKzYtPimumT_}!+Tu)@d82V8=DVW`+AJl|2#^Z?g$FF2^;$Ben}F=E!Kq5A$eE{Zj% zS)=uF=VQe!9&wdc*0pKHh{|{DPQNK$A_**A37T(P_Vqi;ly|W1+@aibO`T=+VmpKN zEa)K+R^B&PJxd0R;B}Sh+6QS5`K&gj?TM45Q&{y`luLegjVn2VO zYU{AQYp)=c8BH$MME`{5yL8#@(}TT8{lwI**>VBQQThnDg>l{ujoU}t=YFDuVk;8s znDpx_P(=7o3gbMu1MsGBNa~|^)(H!vGM2$0u2*;@h3yON4)mi3!RE~juO%V#;E}th zoS@+c*Mhtxz%#z?}^;A9j_8tO68v&>py3HmHuYiSJJ1oF4+2hHx>5-MpUR2w!d>DEwc+q^px=! zfy#zBsQlt27r?p=zEFv^YwZcl`+O4!+TPs_Ly<^_8HA z_4>z^9#lpXhIuqJxp&;8Q`ptT#UD>SV)Z8>PvhjS%39;mpo%O+n==s$iV@pfd2iiCNz)sYNf9=|dSf^@mb zjnD6wc@~$b#gcpi{l>lx9&R^L@3_y*%>0Dqx$T6rl4-l19AAMW%YQT-TME56Q;3^EZbpy>erJB=kxovcrH1VbO z{o4ImS5mjuhv|efN zPrxt4zrt~yex8VyAU6_kVW~}SsR#o*4C<*?Fp=9t(e7Z=ZWK-inQ}8C2M8j zPBmStF`c{QlsXLypxi_WPS=61q?As7`{tH%6k!{TeKuIu=7X8s^W!S%==HZ(p~+;2 zK2YTnNtQ?~!aF@&`!@b^Hm#{^Qm;FZA2P8p7%{FZ{eZg$rBj(Pn$ALTetr%-IQ&Nh zR`+dCUv>WxmcGI(xIfqEz%1J(EnE1}`Pg#{{tJs>Wr>FS&+|I3-@OI$WzB} z_iq>V4v1g9Wsb>xTY^@K(%DHZR)f9}2i`dq+I^$5I+XF)-FE!F?|tf4|Bwaqb$r#; z=on>7231`Ven`#sb@2fO1Q#Yh4ikzsiWl=G{3i^<{C{o@l1r#I1ea5s)Og9+5s84M6m3Zd0b=sJ?!83dET|L&uN%=TyY- zDcuv_zsOd(ktT+h_+@eid-0 zNP7P%eRiDknl#6rW5W`4-$LA}-z2`yW7Syfo|kq{~_K3P|aO+&dP2qwWwm8*|! z#X0m?K4Tg%>8my8D73&E=a_C#-Y!?p_>@w|LO%5~{hr5GA_Z@j!QVVH*Jj|FO~ZOq zO_^2HUU9>I{w!|FKa0I8mk*mm68RYm@oT`04~XJ*{xIu%H5dHg>shd<&F~a!|CmkW z`kyaxcSnmhDEk|uU8Vh#EntS<0rdJ*b>MPCNoU)<^|vk;E`QRIUxhwi7z2A$s7N~6 z`cT^#Zw><;=2~vT+BHSaAaE|0c`<-9tz7xT2~U0)V#;=R(un%2U$ewLm+rv+m6_7_ z#3@$wgAgdH2I_k!Yqs5E21GH z$DBF=eG|BXv-w!I`?owKDc;EmJ?v}13-QnJZx8cLDQBs)P`Z+5p)Qh9G&tZ`i8{0UE_h^7j*H+2WyjvQC-Up{;ae6VsNj6rh{C~w~4FxsG^A_li* z&#)F4fJvQ@Z1Q1W;;&e{8GDn>Rge8h@Khu`U9o!Zzp!^$*SQPeZUvT{1p{MSrddli zUiaaJxE2r$q{caHJ7~J;7w0c0o}{($V>|rigX+=Zo8a>v2<8oFGS7$$)5!4;=hN)X z?tP47{rH~si&<6Vjt~hoN~2&^Dz&&N5@IABJKi7i^o!e6!NxfGw#XjeaUbYxZ=Gm7 zeV!5cxLdx6J#>m?D=B2MhG?^LJhXh{LwjZfZ~^iDG*)xgL-QIp@qTl@G@1ps5_%=7 zV6t#<-Gd&Uu*nK|kEXq4V5;i()|r@`K*`BU?%WLfGR{}Ia=1>ophq;+%ey{#cSRSq zQb&h2u4&_Tf>H}#Z}w%Ajc}Ln%+H?eVi=5uu8CTri7n#$^Z2evZKmW0VQ}Ku*}p20 zU@4K?URubmSrqugF#pUz+LlT1$;+dAb4(VDDNp7x+%%*&!KRuW+w-;UsDk5LI0h%k zvuP{jfj8=+!&@QfmNfD3ZV^yU58Dzmzj|f)%Z1H9<8X0tdk&cUh0>;a@gibbmk79w zj4sD}7M%RBypS})Xx+&X0X6$|8Cm+^@i2S+w!Yxo6?*8jN^a4EiT2wIF#JI()A7bF zDw6_;c&q8TrCpiI(zUMgS*ViVTNX$oGnNb0=NP&|*&lbAHsC%+trtaGqwPC&K^Ht) z5~wLCn~cMH)#LsSuyd3y?_|J6(Jpj1U1zo9=&;688?i{D0kM^p=eabSnAhNL?mO)o zqNR0pe3FU>g2T5yAR$5K`|~r;c7Ja7RiTWQ7_?PJi50Y5kP4Zi?(X^fFu&qjy0_^? z0Qm2#G&75Q!0Nxqn2|YV{gf%O>Ge~lzb|2(Ir;OevP~*^;F1ArGqdcg4T~;Q;<#}} zCNAoQ4pb{Ey5k3W`g6ut#|?DCZ0hyVQmnhG-x(1JL)PeJ8dR z6h8ACUd_o008%mr=w-yj>X%1u)nfxlu--@FN`xGE^z|-|&}}zqI`8yrz}0`U3cjUA z(M;OC{8837(0u#c!>_}f@aUB}K0W=wRmoZ+xXt%|NWB~n>TeS;|MjagtdlNf5J|T) ze|+@L>Z`!(9plFOZcY1c;MdWdVmpOL`&ILB|LP`APp^F>C4lj&UF)W9)lNlZiO7J? zDwxTg6%FQ3XE;XwLt2mU-f0D1^=RSI)JxvGp0^YAavbmGUjE?WA+K?>dA+0-cC-~* zKU5XpMgxZ`l{)p%gU?ri`iqk29yZ--OT!ah!7`tEY9q zMHWeud zhbV3R>0eC&>_1?m<8+uO{^5gq(q$0jc{y`%+UY00d-Uiy=al^f1cG7H848<_vdg58 zV5XI6-*m+1JQ+8fH9a3uWQ5w6A8Ef!d{l7VVi!roEUDvs?hsT-#hOt1{KR0EZN|S+d5`nM#hH`&B8TTZpDH zCKoo>vj(gEVpENHS)IntkB1HFnRScWJ5!EcP2Z2Rz6Bp38wla0zXM5{NoCeEalV;U zQ2IK9ID%V=v#Z`JUtslHI$A@h+ehZ10WP$Q7p`%^5(GgpgYuT>y@OGI!(^v`lyE|- z1(eEuA_q;npZTp2X1rp-{IL?=C28(?#$^}yrsx3q7Ri3cQ&SX@_tS3`1q!APa)2BkN$%-9;dD3XE+egkDkF7F{pAhS4Uy=mN@j z`2fgE8MoiIg5w*O$u6?yEj~ePBaG1I^+sw+W_K9JaKRcFpm$zarHRVf<##ss?EpLT zzW6$m0pR}8nPlCmQ{rRQ8=Lg#d8RdsGM2J!&+f*Kxwb*3LLMn;8yI&x>T9d$+7U{lkc;Z(V zbo~g5eS8F_fh4TqYO6f|+&VuClERn~bb{lOPiK z8q}7HcKypMJ^8zOPD_zM&gGe7=`3X2x90BE(PV7J`+Sc#AVAdKh_DT|wYgciRcVMt zK-hibS^lzt11d61%UVb%mY_4{zM+f(y3zZV9bxiP$IdmVV=f zFFJ68iLp@RRTI)J?Pc>_wY^tKA0d?Yoa=f;Ly`93*oRmmt0JqsQf~5FTiKJY&vQ#E z+b=W5mNr3}1r@SUU2-=PR2r+_&N+!|zUvYIQH2ufgIOs{Vb_)2|E2tR+*3IO?quk% zYLWR4m1Lp)xYni7(!)bLkBL7JLOtL$P+M?7ZL>lI?{2yId@roC$N?T@zNo2y)~>;gyKr93&XIbfIE*5;F_^R22&KN9kioME97 zIn!{0U%QvoqeU;i?`i5yZQAd)1!X@`jqeYfdNfPJEzk5ZY%fP!L%Hcu^}%Poh$}Wa zfmwUHXm=-Mg7+2AH6P%Umsk7wFy<;Mrf4{9@?7&L3}6Qt3n#-YQC1gF#;_IbcL@ZH z0g`cl5(l+(bP8yLy|DURIR?%5z2jP_c|55>-GN5OY~acZv*W+Dt-m3Y#~*-AVVLPL zuc~HM%+-O4006cscUZ}dgGfhJxdJ;P?X1`#dBVPMS>Tcz$a4n^0jLF>2 z7H&Ta4lYjy6t7}Gxc9Q+pl98wniZW#>H`B8qGd9uV>^!{gMe+6mcyqsFbkw-fS_!~ zdSQywEklh)ja`q!zN>DZ{qd(*2Z{W~13Yuq8UxAmL~S zT}ki!ru?*97i}_t@R*NK)brw%f{a;QM$K)t3cct%B=3~<&V6I!;Xw>U%c+GAp}ij8 zX!pp=ZP${Se1&lK$en*pgJjNxo!5%ov}3aW9yaw5=YoH>9qgZNcWMX}r=_ZM__c2+CAQ3VA@WIHt7bv_bucE-J-3ziuF;)QmMny)g%x91*v9a|1g{HC1e z_EY7M|E}#0H+uzu?fDk+WRlG}%5{4h|tmm9Qkh`EZXI0&3xfBmHoq6A{g$iAq_ zx37umfMDET{L6g^4V6YOutLRF1(%Nh-dNWzi-_Bc-l008{i1Q9Y_zv=U03e z^N0keL5JX8rv?4R9EU$mWecj~UYmK@h`l+rTW3&ox;s8H6XxWi4SC@Bf9n5>wc#W$fSglY5;s z7&|5N18k60TV5O6Z3Q#eMY|`QNY1J9>vTeU-s?;yav`WYpUG|mtG76d!n;kx%H!-9 z>%#$Bfv|YikFQxQ9r(LFhLYfEI*+MZQ_Q9-W_wPG74qmla-5uuMF||=xm{$KqvGfq z%Nz5V~pskRb3hxt`vxX zMOMsG2kJW-uv&DATnoXNpBa&&2T)i~6IOb*BLBk$?#z*|3;q+aJ-y*cW})SX$e)mk zvf3-Q&M(d&-H(g*m9XWF%GAf%vEQ4Q-UHa}T_V0*lVTJReOOQhUCyP%U%VmIAT;S9 zNO>7A*JhkqqZ-okn(ltFnzW^DOD2&?*`E;rV>QsN)58b6ZVo{Hrj(sfzG(>@Ta@pk zivQ+j@yisl`%85+e-_;33$$?R(D$oKZE|$=^m^c1-QAfO;2lqp5t7kFTr9XeNDeqf zttW+W$Spom^`h2UrH9k74My31|2)x=Xub5QgR-Uf4QVyRE4NIEBD0rEuJZK)rUxNM zJ?2jjSpaFpgg7Dm_HHew3*e6ZBf@-5Vhfaz>9Tzaks*0KA@Jz}T*kahOCF#CPxi!s znLIXc1wqYw|3{QsEq-)j@VF2yha@KkZ%!FV+--|9GC~+|R^eaFDiqfD-jkB_+wx1c zj!2-zAqU_cO*GT9#gDI9!ZOH&mcu2+)HY*W2QQu+hG37;1KC zoj#A?7=vt0F_-{3$E1IWh+AtYJ2@t7(pJX4TIc!~2?pS~$delY#kE0i7jJ*f{S?0G zDA&vJ=ttgvdcpPCV3($VWv`S&PCppGeji9LcdShG6Y}$WM9wVNv)Q7es(_wf_zU>e zwACQI5ZI>NY6|W81awHro%CO9N&xVQ5c?1xw{L|4_~=gOe7WkROt6!WK0`!^;%w7t zIU3E7fXm_om2r=?vf&LExI?4&qTiPYulo#pddlmbJ@KYw_isdn!gSyt+yBikJOj!F z+W|!+f3Y>jD=Yw4DFA>QJr%vI{+=(m$IXVF?C!-cVHQ6j_NLw<5EDY)a?z>3+XTb# zdOG><2SbNJl28B3@-7)javp~Fgy=hSG&>OhiSg59{yXCa#(@9ARkuGx_sqJ%dr0dU!!xP literal 0 HcmV?d00001 diff --git a/docs/vo/conesearch/index.rst b/docs/vo/conesearch/index.rst new file mode 100644 index 0000000..4666035 --- /dev/null +++ b/docs/vo/conesearch/index.rst @@ -0,0 +1,191 @@ +.. doctest-skip-all + +.. _astropy_conesearch: + +VO Simple Cone Search +===================== + +Astropy offers Simple Cone Search Version 1.03 as defined in IVOA +Recommendation (February 22, 2008). Cone Search queries an +area encompassed by a given radius centered on a given RA and DEC and returns +all the objects found within the area in the given catalog. + +.. _vo-sec-default-scs-services: + +Default Cone Search Services +---------------------------- + +Currently, the default Cone Search services used are a subset of those found in +the STScI VAO Registry. They were hand-picked to represent commonly used +catalogs below: + +* 2MASS All-Sky +* HST Guide Star Catalog +* SDSS Data Release 7 +* SDSS-III Data Release 8 +* USNO A1 +* USNO A2 +* USNO B1 + +This subset undergoes daily validations hosted by STScI using +:ref:`vo-sec-validator-validate`. Those that pass without critical +warnings or exceptions are used by :ref:`vo-sec-client-scs` by +default. They are controlled by `astropy.vo.Conf.conesearch_dbname`: + +#. ``'conesearch_good'`` + Default. Passed validation without critical warnings and exceptions. +#. ``'conesearch_warn'`` + Has critical warnings but no exceptions. Use at your own risk. +#. ``'conesearch_exception'`` + Has some exceptions. *Never* use this. +#. ``'conesearch_error'`` + Has network connection error. *Never* use this. + +If you are a Cone Search service provider and would like to include your +service in the list above, please open a +`GitHub issue on Astropy `_. + + +Caching +------- + +Caching of downloaded contents is controlled by `astropy.utils.data`. +To use cached data, some functions in this package have a ``cache`` +keyword that can be set to ``True``. + + +Getting Started +--------------- + +This section only contains minimal examples showing how to perform +basic Cone Search. + +>>> from astropy.vo.client import conesearch + +List the available Cone Search catalogs: + +>>> conesearch.list_catalogs() +[u'Guide Star Catalog 2.3 1', + u'SDSS DR7 - Sloan Digital Sky Survey Data Release 7 1', + u'SDSS DR7 - Sloan Digital Sky Survey Data Release 7 2', + u'SDSS DR7 - Sloan Digital Sky Survey Data Release 7 3', + u'SDSS DR7 - Sloan Digital Sky Survey Data Release 7 4', + u'SDSS DR8 - Sloan Digital Sky Survey Data Release 8 1', + u'SDSS DR8 - Sloan Digital Sky Survey Data Release 8 2', + u'The HST Guide Star Catalog, Version 1.1 (Lasker+ 1992) 1', + u'The HST Guide Star Catalog, Version 1.2 (Lasker+ 1996) 1', + u'The HST Guide Star Catalog, Version GSC-ACT (Lasker+ 1996-99) 1', + u'The PMM USNO-A1.0 Catalogue (Monet 1997) 1', + u'The USNO-A2.0 Catalogue (Monet+ 1998) 1', + u'Two Micron All Sky Survey (2MASS) 1', + u'Two Micron All Sky Survey (2MASS) 2', + u'USNO-A2 Catalogue 1', + u'USNO-A2.0 1'] + +Select a 2MASS catalog from the list above that is to be searched: + +>>> my_catname = 'Two Micron All Sky Survey (2MASS) 1' + +Query the selected 2MASS catalog around M31 with a 0.1-degree search radius: + +>>> from astropy.coordinates import SkyCoord +>>> from astropy import units as u +>>> c = SkyCoord.from_name('M31') +>>> c.ra, c.dec +(, ) +>>> result = conesearch.conesearch(c, 0.1 * u.degree, catalog_db=my_catname) +Trying http://wfaudata.roe.ac.uk/twomass-dsa/DirectCone?DSACAT=TWOMASS&... +Downloading ... +WARNING: W06: ... UCD has invalid character '?' in '??' [...] +WARNING: W50: ... Invalid unit string 'yyyy-mm-dd' [...] +WARNING: W50: ... Invalid unit string 'Julian days' [...] +>>> result +
+>>> result.url +u'http://wfaudata.roe.ac.uk/twomass-dsa/DirectCone?DSACAT=TWOMASS&DSATAB=twomass_psc&' + +Get the number of matches and returned column names: + +>>> result.array.size +2008 +>>> result.array.dtype.names +('cx', + 'cy', + 'cz', + 'htmID', + 'ra', + 'dec', ..., + 'coadd_key', + 'coadd') + +Extract RA and DEC of the matches: + +>>> result.array['ra'] +masked_array(data = [10.620983 10.672264 10.651166 ..., 10.805599], + mask = [False False False ..., False], + fill_value = 1e+20) +>>> result.array['dec'] +masked_array(data = [41.192303 41.19426 41.19445 ..., 41.262123], + mask = [False False False ..., False], + fill_value = 1e+20) + + +Using `astropy.vo` +------------------ + +This package has four main components across two subpackages: + +.. toctree:: + :maxdepth: 2 + + client + validator + +They are designed to be used in a work flow as illustrated below: + +.. image:: images/astropy_vo_flowchart.png + :width: 500px + :alt: VO work flow + +The one that a typical user needs is the :ref:`vo-sec-client-scs` component +(see :ref:`Cone Search Examples `). + + +See Also +-------- + +- `NVO Directory `_ + +- `Simple Cone Search Version 1.03, IVOA Recommendation (22 February 2008) `_ + +- `STScI VAO Registry `_ + +- `STScI VO Databases `_ + + +Reference/API +------------- + +.. automodapi:: astropy.vo + :no-inheritance-diagram: + +.. automodapi:: astropy.vo.client.vos_catalog + :no-inheritance-diagram: + +.. automodapi:: astropy.vo.client.conesearch + :no-inheritance-diagram: + +.. automodapi:: astropy.vo.client.async + :no-inheritance-diagram: + +.. automodapi:: astropy.vo.client.exceptions + +.. automodapi:: astropy.vo.validator + +.. automodapi:: astropy.vo.validator.validate + :no-inheritance-diagram: + +.. automodapi:: astropy.vo.validator.inspect + :no-inheritance-diagram: + +.. automodapi:: astropy.vo.validator.exceptions diff --git a/docs/vo/conesearch/validator.rst b/docs/vo/conesearch/validator.rst new file mode 100644 index 0000000..a4cabef --- /dev/null +++ b/docs/vo/conesearch/validator.rst @@ -0,0 +1,362 @@ +.. doctest-skip-all + +Using `astropy.vo.validator` +============================ + +VO services validator is used by STScI to support :ref:`vo-sec-client-scs`. +Currently, only Cone Search services are supported. +A typical user should not need the validator. However, this could be used by +VO service providers to validate their services. Currently, any service +to be validated has to be registered in STScI VAO Registry. + +.. _vo-sec-validator-validate: + +Validation for Simple Cone Search +--------------------------------- + +`astropy.vo.validator.validate` validates VO services. +Currently, only Cone Search validation is done using +:func:`~astropy.vo.validator.validate.check_conesearch_sites`, +which utilizes underlying `astropy.io.votable.validator` library. + +A master list of all available Cone Search services is obtained from +`astropy.vo.validator.Conf.conesearch_master_list`, which is a URL +query to STScI VAO Registry by default. However, by default, only the +ones in `astropy.vo.validator.Conf.conesearch_urls` are validated +(also see :ref:`vo-sec-default-scs-services`), while the rest are +skipped. There are also options to validate a user-defined list of +services or all of them. + +All Cone Search queries are done using RA, DEC, and SR given by +```` XML tag in the registry, and maximum verbosity. +In an uncommon case where ```` is not defined for a service, +it uses a default search for ``RA=0&DEC=0&SR=0.1``. + +The results are separated into 4 groups below. Each group +is stored as a JSON file of `~astropy.vo.client.vos_catalog.VOSDatabase`: + +#. ``conesearch_good.json`` Passed validation without critical + warnings and exceptions. This database residing in + `astropy.vo.Conf.vos_baseurl` is the one used by + :ref:`vo-sec-client-scs` by default. +#. ``conesearch_warn.json`` Has critical warnings but no + exceptions. Users can manually set + `astropy.vo.Conf.conesearch_dbname` to use this at their own + risk. +#. ``conesearch_exception.json`` + Has some exceptions. *Never* use this. + For informational purpose only. +#. ``conesearch_error.json`` + Has network connection error. *Never* use this. + For informational purpose only. + +HTML pages summarizing the validation results are stored in +``'results'`` sub-directory, which also contains downloaded XML +files from individual Cone Search queries. + +Warnings and Exceptions +^^^^^^^^^^^^^^^^^^^^^^^ + +A subset of `astropy.io.votable.exceptions` that is considered +non-critical is defined by +`astropy.vo.validator.Conf.noncritical_warnings`, which will not be +flagged as bad by the validator. However, this does not change the +behavior of `astropy.io.votable.Conf.pedantic`, which still needs to +be set to ``False`` for them not to be thrown out by +:func:`~astropy.vo.client.conesearch.conesearch`. Despite being +listed as non-critical, user is responsible to check whether the +results are reliable; They should not be used blindly. + +Some `units recognized by VizieR `_ +are considered invalid by Cone Search standards. As a result, +they will give the warning ``'W50'``, which is non-critical by default. + +User can also modify `astropy.vo.validator.Conf.noncritical_warnings` +to include or exclude any warnings or exceptions, as desired. +However, this should be done with caution. Adding exceptions to +non-critical list is not recommended. + +.. _vo-sec-validator-build-db: + +Building the Database from Registry +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Each Cone Search service is a `~astropy.vo.client.vos_catalog.VOSCatalog` in +a `~astropy.vo.client.vos_catalog.VOSDatabase` (see +:ref:`vo-sec-client-cat-manip` and :ref:`vo-sec-client-db-manip`). + +In the master registry, there are duplicate catalog titles with +different access URLs, duplicate access URLs with different titles, +duplicate catalogs with slightly different descriptions, etc. + +A Cone Search service is really defined by its access URL +regardless of title, description, etc. By default, +:func:`~astropy.vo.client.vos_catalog.VOSDatabase.from_registry` ensures +each access URL is unique across the database. +However, for user-friendly catalog listing, its title will be +the catalog key, not the access URL. + +In the case of two different access URLs sharing the same title, +each URL will have its own database entry, with a sequence number +appended to their titles (e.g., 'Title 1' and 'Title 2'). For +consistency, even if the title does not repeat, it will still be +renamed to 'Title 1'. + +In the case of the same access URL appearing multiple times in +the registry, the validator will store the first catalog with +that access URL and throw out the rest. However, it will keep +count of the number of duplicates thrown out in the +``'duplicatesIgnored'`` dictionary key of the catalog kept in the +database. + +All the existing catalog tags will be copied over as dictionary +keys, except ``'accessURL'`` that is renamed to ``'url'`` for simplicity. +In addition, new keys from validation are added: + +* ``validate_expected`` + Expected validation result category, e.g., "good". +* ``validate_network_error`` + Indication for connection error. +* ``validate_nexceptions`` + Number of exceptions found. +* ``validate_nwarnings`` + Number of warnings found. +* ``validate_out_db_name`` + Cone Search database name this entry belongs to. +* ``validate_version`` + Version of validation software. +* ``validate_warning_types`` + List of warning codes. +* ``validate_warnings`` + Descriptions of the warnings. +* ``validate_xmllint`` + Indication of whether ``xmllint`` passed. +* ``validate_xmllint_content`` + Output from ``xmllint``. + +Configurable Items +^^^^^^^^^^^^^^^^^^ + +These parameters are set via :ref:`astropy_config`: + +* `astropy.vo.validator.Conf.conesearch_master_list` + VO registry query URL that should return a VO table with all the desired + VO services. +* `astropy.vo.validator.Conf.conesearch_urls` + Subset of Cone Search access URLs to validate. +* `astropy.vo.validator.Conf.noncritical_warnings` + List of VO table parser warning codes that are considered non-critical. + +Also depends on properties in +:ref:`Simple Cone Search Configurable Items `. + +.. _vo-sec-validate-examples: + +Examples +^^^^^^^^ + +>>> from astropy.vo.validator import validate + +Validate default Cone Search sites with multiprocessing and write results +in the current directory. Reading the master registry can be slow, so the +default timeout is internally set to 60 seconds for it. However, +``astropy.utils.data.REMOTE_TIMEOUT`` should still be set to account for +accessing the individual services (at least 30 seconds is recommended). +In addition, all VO table warnings from the registry are suppressed because +we are not trying to validate the registry itself but the services it contains: + +>>> from astropy.utils import data +>>> with data.conf.set_temp('remote_timeout', 30): +... validate.check_conesearch_sites() +Downloading http://vao.stsci.edu/directory/NVORegInt.asmx/... +|===========================================| 25M/ 25M (100.00%) 00s +INFO: Only 30/11938 site(s) are validated [astropy.vo.validator.validate] +# ... +INFO: good: 14 catalog(s) [astropy.vo.validator.validate] +INFO: warn: 12 catalog(s) [astropy.vo.validator.validate] +INFO: excp: 0 catalog(s) [astropy.vo.validator.validate] +INFO: nerr: 4 catalog(s) [astropy.vo.validator.validate] +INFO: total: 30 out of 30 catalog(s) [astropy.vo.validator.validate] +INFO: check_conesearch_sites took 451.05685997 s on AVERAGE... + +Validate only Cone Search access URLs hosted by ``'stsci.edu'`` without verbose +outputs (except warnings that are controlled by :py:mod:`warnings`) or +multiprocessing, and write results in ``'subset'`` sub-directory instead of the +current directory. For this example, we use ``registry_db`` from +:ref:`VO database examples `: + +>>> urls = registry_db.list_catalogs_by_url(pattern='stsci.edu') +>>> urls +['http://archive.stsci.edu/befs/search.php?', + 'http://archive.stsci.edu/copernicus/search.php?', ..., + 'http://galex.stsci.edu/gxWS/ConeSearch/gxConeSearch.aspx?', + 'http://gsss.stsci.edu/webservices/vo/ConeSearch.aspx?CAT=GSC23&'] +>>> with data.conf.set_temp('remote_timeout', 30): +... validate.check_conesearch_sites( +... destdir='./subset', verbose=False, parallel=False, url_list=urls) +INFO: check_conesearch_sites took 84.7241549492 s on AVERAGE... + +Add ``'W24'`` from `astropy.io.votable.exceptions` to the list of +non-critical warnings to be ignored and re-run default validation. +This is *not* recommended unless you know exactly what you are doing: + +>>> from astropy.vo.validator.validate import conf +>>> with conf.set_temp('noncritical_warnings', conf.noncritical_warnings + ['W24']): +... with data.conf.set_temp('remote_timeout', 30): +... validate.check_conesearch_sites() + +Validate *all* Cone Search services in the master registry +(this will take a while) and write results in ``'all'`` sub-directory: + +>>> with data.conf.set_temp('remote_timeout', 30): +... validate.check_conesearch_sites(destdir='./all', url_list=None) + +To look at the HTML pages of the validation results in the current +directory using Firefox browser (images shown are from STScI server +but your own results should look similar):: + + firefox results/index.html + +.. image:: images/validator_html_1.png + :width: 600px + :alt: Main HTML page of validation results + +When you click on 'All tests' from the page above, you will see all the +Cone Search services validated with a summary of validation results: + +.. image:: images/validator_html_2.png + :width: 600px + :alt: All tests HTML page + +When you click on any of the listed URLs from above, you will see +detailed validation warnings and exceptions for the selected URL: + +.. image:: images/validator_html_3.png + :width: 600px + :alt: Detailed validation warnings HTML page + +When you click on the URL on top of the page above, you will see +the actual VO Table returned by the Cone Search query: + +.. image:: images/validator_html_4.png + :width: 600px + :alt: VOTABLE XML page + + +.. _vo-sec-validator-inspect: + +Inspection of Validation Results +-------------------------------- + +`astropy.vo.validator.inspect` inspects results from +:ref:`vo-sec-validator-validate`. It reads in JSON files of +`~astropy.vo.client.vos_catalog.VOSDatabase` +residing in ``astropy.vo.Conf.vos_baseurl``, which +can be changed to point to a different location. + +Configurable Items +^^^^^^^^^^^^^^^^^^ + +This parameter is set via :ref:`astropy_config`: + +* `astropy.vo.Conf.vos_baseurl` + +Examples +^^^^^^^^ + +>>> from astropy.vo.validator import inspect + +Load Cone Search validation results from +``astropy.vo.Conf.vos_baseurl`` (by default, the one used by +:ref:`vo-sec-client-scs`): + +>>> r = inspect.ConeSearchResults() +Downloading http://.../conesearch_good.json +|===========================================| 48k/ 48k (100.00%) 00s +Downloading http://.../conesearch_warn.json +|===========================================| 85k/ 85k (100.00%) 00s +Downloading http://.../conesearch_exception.json +|===========================================| 3.0k/3.0k (100.00%) 00s +Downloading http://.../conesearch_error.json +|===========================================| 4.0k/4.0k (100.00%) 00s + +Print tally. In this example, there are 13 Cone Search services that +passed validation with non-critical warnings, 14 with critical warnings, +1 with exceptions, and 2 with network error: + +>>> r.tally() +good: 13 catalog(s) +warn: 14 catalog(s) +exception: 1 catalog(s) +error: 2 catalog(s) +total: 30 catalog(s) + +Print a list of good Cone Search catalogs, each with title, access URL, +warning codes collected, and individual warnings: + +>>> r.list_cats('good') +Guide Star Catalog 2.3 1 +http://gsss.stsci.edu/webservices/vo/ConeSearch.aspx?CAT=GSC23& +W48,W50 +.../vo.xml:136:0: W50: Invalid unit string 'pixel' +.../vo.xml:155:0: W48: Unknown attribute 'nrows' on TABLEDATA +# ... +USNO-A2 Catalogue 1 +http://www.nofs.navy.mil/cgi-bin/vo_cone.cgi?CAT=USNO-A2& +W17,W42,W21 +.../vo.xml:4:0: W21: vo.table is designed for VOTable version 1.1 and 1.2... +.../vo.xml:4:0: W42: No XML namespace specified +.../vo.xml:15:15: W17: VOTABLE element contains more than one DESCRIPTION... + +List Cone Search catalogs with warnings, excluding warnings that were +ignored in `astropy.vo.validator.Conf.noncritical_warnings`, and +writes the output to a file named ``'warn_cats.txt'`` in the current +directory. This is useful to see why the services failed validations: + +>>> with open('warn_cats.txt', 'w') as fout: +... r.list_cats('warn', fout=fout, ignore_noncrit=True) + +List the titles of all good Cone Search catalogs: + +>>> r.catkeys['good'] +[u'Guide Star Catalog 2.3 1', + u'SDSS DR7 - Sloan Digital Sky Survey Data Release 7 1', + u'SDSS DR7 - Sloan Digital Sky Survey Data Release 7 2', + u'SDSS DR7 - Sloan Digital Sky Survey Data Release 7 3', ..., + u'USNO-A2 Catalogue 1'] + +Print the details of catalog titled ``'USNO-A2 Catalogue 1'``: + +>>> r.print_cat('USNO-A2 Catalogue 1') +{ + "capabilityClass": "ConeSearch", + "capabilityStandardID": "ivo://ivoa.net/std/ConeSearch", + "capabilityValidationLevel": "", + "contentLevel": "#University#Research#Amateur#", + # ... + "version": "", + "waveband": "#Optical#" +} +Found in good + +Load Cone Search validation results from a local directory named ``'subset'``. +This is useful if you ran your own :ref:`vo-sec-validator-validate` +and wish to inspect the output databases. This example reads in +validation of STScI Cone Search services done in +:ref:`Validation for Simple Cone Search Examples `: + +>>> from astropy.vo import conf +>>> with conf.set_temp('vos_baseurl', './subset/'): +>>> r = inspect.ConeSearchResults() +>>> r.tally() +good: 19 catalog(s) +warn: 7 catalog(s) +exception: 2 catalog(s) +error: 0 catalog(s) +total: 28 catalog(s) +>>> r.catkeys['good'] +[u'Advanced Camera for Surveys 1', + u'Berkeley Extreme and Far-UV Spectrometer 1', + u'Copernicus Satellite 1', + u'Extreme Ultraviolet Explorer 1', ..., + u'Wisconsin Ultraviolet Photo-Polarimeter Experiment 1'] diff --git a/docs/vo/index.rst b/docs/vo/index.rst new file mode 100644 index 0000000..49579bf --- /dev/null +++ b/docs/vo/index.rst @@ -0,0 +1,38 @@ +.. _astropy_vo: + +******************************************* +Virtual Observatory Access (``astropy.vo``) +******************************************* + +.. module:: astropy.vo + +Introduction +============ + +The ``astropy.vo`` subpackage handles simple access for Virtual Observatory +(VO) services. + +Current services include: + +.. toctree:: + :maxdepth: 1 + + conesearch/index + samp/index + +Other third-party Python packages and tools related to ``astropy.vo``: + +* `PyVO `__ + provides further functionality to discover + and query VO services. Its user guide contains a + `good introduction `__ + to how the VO works. + +* `Astroquery `__ + is an Astropy affiliated package that provides simply access to specific astronomical + web services, many of which do not support the VO protocol. + +* `Simple-Cone-Search-Creator `_ + shows how to ingest a catalog into a cone search service and serve it in VO + standard format using Python + (using CSV files and `healpy `__). diff --git a/docs/vo/samp/advanced_embed_samp_hub.rst b/docs/vo/samp/advanced_embed_samp_hub.rst new file mode 100644 index 0000000..6f110ea --- /dev/null +++ b/docs/vo/samp/advanced_embed_samp_hub.rst @@ -0,0 +1,134 @@ +.. include:: references.txt + +.. doctest-skip-all + +Embedding a SAMP hub in a GUI +----------------------------- + +Overview +^^^^^^^^ + +If you wish to embed a SAMP hub in your Python GUI tool, you will need to start +the hub programmatically using:: + + from astropy.vo.samp import SAMPHubServer + hub = SAMPHubServer() + hub.start() + +This launches the hub in a thread and is non-blocking. If you are not +interested in connections from web SAMP clients, then you can simply use:: + + from astropy.vo.samp import SAMPHubServer + hub = SAMPHubServer(web_profile=False) + hub.start() + +and this should be all you need to do. However, if you want to keep the web +profile active, there is an additional consideration, which is that when a web +SAMP client connects, you will need to ask the user whether they accept the +connection (for security reasons). By default, the confirmation message is a +text-based message in the terminal, but if you have a GUI tool, you will +instead likely want to open a GUI dialog. + +To do this, you will need to define a class that handles the dialog, +and you should then pass an **instance** of the class to +|SAMPHubServer| (not the class itself). This class should inherit +from `astropy.vo.samp.WebProfileDialog` and add the following: + + 1) It should have a GUI timer callback that periodically calls + ``WebProfileDialog.handle_queue`` (available as + ``self.handle_queue``). + + 2) Implement a ``show_dialog`` method to display a consent dialog. + It should take the following arguments: + + - ``samp_name``: The name of the application making the request. + + - ``details``: A dictionary of details about the client + making the request. The only key in this dictionary required by + the SAMP standard is ``samp.name`` which gives the name of the + client making the request. + + - ``client``: A hostname, port pair containing the client + address. + + - ``origin``: A string containing the origin of the + request. + + 3) Based on the user response, the ``show_dialog`` should call + ``WebProfileDialog.consent`` or ``WebProfileDialog.reject``. + This may, in some cases, be the result of another GUI callback. + +Example of embedding a SAMP hub in a Tk application +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The following code is a full example of a simple Tk application that watches +for web SAMP connections and opens the appropriate dialog:: + + + import Tkinter as tk + import tkMessageBox + + from astropy.vo.samp import SAMPHubServer + from astropy.vo.samp.hub import WebProfileDialog + + MESSAGE = """ + A Web application which declares to be + + Name: {name} + Origin: {origin} + + is requesting to be registered with the SAMP Hub. Pay attention + that if you permit its registration, such application will acquire + all current user privileges, like file read/write. + + Do you give your consent? + """ + + class TkWebProfileDialog(WebProfileDialog): + def __init__(self, root): + self.root = root + self.wait_for_dialog() + + def wait_for_dialog(self): + self.handle_queue() + self.root.after(100, self.wait_for_dialog) + + def show_dialog(self, samp_name, details, client, origin): + text = MESSAGE.format(name=samp_name, origin=origin) + + response = tkMessageBox.askyesno( + 'SAMP Hub', text, + default=tkMessageBox.NO) + + if response: + self.consent() + else: + self.reject() + + # Start up Tk application + root = tk.Tk() + tk.Label(root, text="Example SAMP Tk application", + font=("Helvetica", 36), justify=tk.CENTER).pack(pady=200) + root.geometry("500x500") + root.update() + + # Start up SAMP hub + h = SAMPHubServer(web_profile_dialog=TkWebProfileDialog(root)) + h.start() + + try: + # Main GUI loop + root.mainloop() + except KeyboardInterrupt: + pass + + h.stop() + +If you run the above script, a window will open saying "Example SAMP Tk +application". If you then go to the following page for example: + +http://astrojs.github.io/sampjs/examples/pinger.html + +and click on the Ping button, you will see the dialog open in the Tk +application. Once you click on 'CONFIRM', future 'Ping' calls will no longer +bring up the dialog. diff --git a/docs/vo/samp/example_clients.rst b/docs/vo/samp/example_clients.rst new file mode 100644 index 0000000..c1e6754 --- /dev/null +++ b/docs/vo/samp/example_clients.rst @@ -0,0 +1,127 @@ +.. include:: references.txt + +.. doctest-skip-all + +.. _vo-samp-example_clients: + + +Communication between integrated clients objects +------------------------------------------------ + +As shown in :doc:`example_table_image`, the |SAMPIntegratedClient| class can be +used to communicate with other SAMP-enabled tools such as `TOPCAT +`_, `SAO Ds9 +`_, or `Aladin Desktop +`_. + +In this section, we look at how we can set up two |SAMPIntegratedClient| +instances and communicate between them. + +First, start up a SAMP hub as described in :doc:`example_hub`. + +Next, we create two clients and connect them to the hub:: + + >>> client1 = samp.SAMPIntegratedClient(name="Client 1", description="Test Client 1", + ... metadata = {"client1.version":"0.01"}) + >>> client2 = samp.SAMPIntegratedClient(name="Client 2", description="Test Client 2", + ... metadata = {"client2.version":"0.25"}) + >>> client1.connect() + >>> client2.connect() + +We now define functions to call when receiving a notification, call or response:: + + >>> def test_receive_notification(private_key, sender_id, mtype, params, extra): + ... print("Notification:", private_key, sender_id, mtype, params, extra) + + >>> def test_receive_call(private_key, sender_id, msg_id, mtype, params, extra): + ... print("Call:", private_key, sender_id, msg_id, mtype, params, extra) + ... client1.ereply(msg_id, SAMP_STATUS_OK, result = {"txt": "printed"}) + + >>> def test_receive_response(private_key, sender_id, msg_id, response): + ... print("Response:", private_key, sender_id, msg_id, response) + +We subscribe client 1 to ``"samp.*"`` and ``"samp.app.*"`` and bind them to the +related functions:: + + >>> client1.bind_receive_notification("samp.app.*", test_receive_notification) + >>> client1.bind_receive_call("samp.app.*", test_receive_call) + +We now bind message tags received by client 2 to suitable functions:: + + >>> client2.bind_receive_response("my-dummy-print", test_receive_response) + >>> client2.bind_receive_response("my-dummy-print-specific", test_receive_response) + +We are now ready to test out the clients and callback functions. Client 2 +notifies all clients using the "samp.app.echo" message type via the hub:: + + >>> client2.enotify_all("samp.app.echo", txt="Hello world!") + ['cli#2'] + Notification: 0d7f4500225981c104a197c7666a8e4e cli#2 samp.app.echo {'txt': + 'Hello world!'} {'host': 'antigone.lambrate.inaf.it', 'user': 'unknown'} + +We can also find a dictionary giving the clients that would currently receive +``samp.app.echo`` messages:: + + >>> print(client2.getSubscribedClients("samp.app.echo")) + {'cli#2': {}} + +Client 2 calls all clients with the ``"samp.app.echo"`` message type using +``"my-dummy-print"`` as a message-tag:: + + >>> print(client2.call_all("my-dummy-print", + ... {"samp.mtype": "samp.app.echo", + ... "samp.params": {"txt": "Hello world!"}})) + {'cli#1': 'msg#1;;cli#hub;;cli#2;;my-dummy-print'} + Call: 8c8eb53178cb95e168ab17ec4eac2353 cli#2 + msg#1;;cli#hub;;cli#2;;my-dummy-print samp.app.echo {'txt': 'Hello world!'} + {'host': 'antigone.lambrate.inaf.it', 'user': 'unknown'} + Response: d0a28636321948ccff45edaf40888c54 cli#1 my-dummy-print + {'samp.status': 'samp.ok', 'samp.result': {'txt': 'printed'}} + +Client 2 then calls client 1 using the ``"samp.app.echo"`` message type, +tagging the message as ``"my-dummy-print-specific"``:: + + >>> try: + ... print(client2.call(client1.getPublicId(), + ... "my-dummy-print-specific", + ... {"samp.mtype": "samp.app.echo", + ... "samp.params": {"txt": "Hello client 1!"}})) + ... except SAMPProxyError as e: + ... print("Error ({0}): {1}".format(e.faultCode, e.faultString)) + msg#2;;cli#hub;;cli#2;;my-dummy-print-specific + Call: 8c8eb53178cb95e168ab17ec4eac2353 cli#2 + msg#2;;cli#hub;;cli#2;;my-dummy-print-specific samp.app.echo {'txt': 'Hello + Cli 1!'} {'host': 'antigone.lambrate.inaf.it', 'user': 'unknown'} + Response: d0a28636321948ccff45edaf40888c54 cli#1 my-dummy-print-specific + {'samp.status': 'samp.ok', 'samp.result': {'txt': 'printed'}} + +We can now define a function called to test synchronous calls:: + + >>> def test_receive_sync_call(private_key, sender_id, msg_id, mtype, params, extra): + ... import time + ... print("SYNC Call:", sender_id, msg_id, mtype, params, extra) + ... time.sleep(2) + ... client1.reply(msg_id, {"samp.status": SAMP_STATUS_OK, + ... "samp.result": {"txt": "printed sync"}}) + +We now bind the ``samp.test`` message type to ``test_receive_sync_call``:: + + >>> client1.bind_receive_call("samp.test", test_receive_sync_call) + >>> try: + ... # Sync call + ... print(client2.call_and_wait(client1.getPublicId(), + ... {"samp.mtype": "samp.test", + ... "samp.params": {"txt": "Hello SYNCRO client 1!"}}, + ... "10")) + ... except SAMPProxyError as e: + ... # If timeout expires than a SAMPProxyError is returned + ... print("Error ({0}): {1}".format(e.faultCode, e.faultString)) + SYNC Call: cli#2 msg#3;;cli#hub;;cli#2;;sampy::sync::call samp.test {'txt': + 'Hello SYNCRO Cli 1!'} {'host': 'antigone.lambrate.inaf.it', 'user': + 'unknown'} + {'samp.status': 'samp.ok', 'samp.result': {'txt': 'printed sync'}} + +Finally, we disconnect the clients from the hub at the end:: + + >>> client1.disconnect() + >>> client2.disconnect() diff --git a/docs/vo/samp/example_hub.rst b/docs/vo/samp/example_hub.rst new file mode 100644 index 0000000..c8d197a --- /dev/null +++ b/docs/vo/samp/example_hub.rst @@ -0,0 +1,51 @@ +.. include:: references.txt + +.. doctest-skip-all + +.. _vo-samp-example_hub: + +Starting and stopping a SAMP hub server +--------------------------------------- + +There are several ways you can start up a SAMP hub: + +Using an existing hub +^^^^^^^^^^^^^^^^^^^^^ + +You can start up another application that includes a hub, such as +`TOPCAT `_, +`SAO Ds9 `_, or +`Aladin Desktop `_. + +Using the command-line hub utility +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can make use of the ``samp_hub`` command-line utility, which is included in +Astropy:: + + $ samp_hub + +To get more help on available options for ``samp_hub``:: + + $ samp_hub -h + +To stop the server, you can simply press control-C. + +Starting a hub programmatically (advanced) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can start up a hub by creating a |SAMPHubServer| instance and starting it, +either from the interactive Python prompt, or from a Python script:: + + >>> from astropy.vo.samp import SAMPHubServer + >>> hub = SAMPHubServer() + >>> hub.start() + +You can then stop the hub by calling:: + + >>> hub.stop() + +However, this method is generally not recommended for average users because it +does not work correctly when web SAMP clients try and connect. Instead, this +should be reserved for developers who want to embed a SAMP hub in a GUI for +example. For more information, see :doc:`advanced_embed_samp_hub`. diff --git a/docs/vo/samp/example_table_image.rst b/docs/vo/samp/example_table_image.rst new file mode 100644 index 0000000..c51be7b --- /dev/null +++ b/docs/vo/samp/example_table_image.rst @@ -0,0 +1,274 @@ +.. include:: references.txt + +.. doctest-skip-all + +.. _vo-samp-example-table-image: + +Sending/receiving tables and images over SAMP +--------------------------------------------- + +In the following examples, we make use of: + +* `TOPCAT `_, which is a tool to + explore tabular data. +* `SAO Ds9 `_, which is an image + visualization tool, which can also overplot catalogs. +* `Aladin Desktop `_, which is another tool that + can visualize images and catalogs. + +TOPCAT and Aladin will run a SAMP Hub is none is found, so for the following +examples you can either start up one of these applications first, or you can +start up the `astropy.vo.samp` hub. You can start this using the following +command:: + + $ samp_hub + +Sending a table to TOPCAT and Ds9 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The easiest way to send a VO table to TOPCAT is to make use of the +|SAMPIntegratedClient| class. Once TOPCAT is open, then first instantiate a +|SAMPIntegratedClient| instance and connect to the hub:: + + >>> from astropy.vo.samp import SAMPIntegratedClient + >>> client = SAMPIntegratedClient() + >>> client.connect() + +Next, we have to set up a dictionary that contains details about the table to +send. This should include ``url``, which is the URL to the file, and ``name``, +which is a human-readable name for the table. The URL can be a local URL +(starting with ``file:///``):: + + >>> params = {} + >>> params["url"] = 'file:///Users/tom/Desktop/aj285677t3_votable.xml' + >>> params["name"] = "Robitaille et al. (2008), Table 3" + +.. note:: To construct a local URL, you can also make use of ``urlparse`` as + follows:: + + >>> import urlparse + >>> params["url"] = urlparse.urljoin('file:', os.path.abspath("aj285677t3_votable.xml")) + +Now we can set up the message itself. This includes the type of message (here +we use ``table.load.votable`` which indicates that a VO table should be loaded, +and the details of the table that we set above:: + + >>> message = {} + >>> message["samp.mtype"] = "table.load.votable" + >>> message["samp.params"] = params + +Finally, we can broadcast this to all clients that are listening for +``table.load.votable`` messages using +:meth:`~astropy.vo.samp.integrated_client.SAMPIntegratedClient.notify_all`:: + + >>> client.notify_all(message) + +The above message will actually be broadcast to all applications connected via +SAMP. For example, if we open `SAO Ds9 `_ in +addition to TOPCAT, and we run the above command, both applications will load +the table. We can use the +:meth:`~astropy.vo.samp.integrated_client.SAMPIntegratedClient.get_registered_clients` method to +find all the clients connected to the hub:: + + >>> client.get_registered_clients() + ['hub', 'c1', 'c2'] + +These IDs don't mean much, but we can find out more using:: + + >>> client.get_metadata('c1') + {'author.affiliation': 'Astrophysics Group, Bristol University', + 'author.email': 'm.b.taylor@bristol.ac.uk', + 'author.name': 'Mark Taylor', + 'home.page': 'http://www.starlink.ac.uk/topcat/', + 'samp.description.text': 'Tool for OPerations on Catalogues And Tables', + 'samp.documentation.url': 'http://127.0.0.1:2525/doc/sun253/index.html', + 'samp.icon.url': 'http://127.0.0.1:2525/doc/images/tc_sok.gif', + 'samp.name': 'topcat', + 'topcat.version': '4.0-1'} + +We can see that ``c1`` is the TOPCAT client. We can now re-send the data, but +this time only to TOPCAT, using the +:meth:`~astropy.vo.samp.integrated_client.SAMPIntegratedClient.notify` method:: + + >>> client.notify('c1', message) + +Once finished, we should make sure we disconnect from the hub:: + + >>> client.disconnect() + +Receiving a table from TOPCAT +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To receive a table from TOPCAT, we have to set up a client that listens for +messages from the hub. As before, we instantiate a |SAMPIntegratedClient| +instance and connect to the hub:: + + >>> from astropy.vo.samp import SAMPIntegratedClient + >>> client = SAMPIntegratedClient() + >>> client.connect() + +We now set up a receiver class which will handle any received message. We need +to take care to write handlers for both notifications and calls (the difference +between the two being that calls expect a reply):: + + >>> class Receiver(object): + ... def __init__(self, client): + ... self.client = client + ... self.received = False + ... def receive_call(self, private_key, sender_id, msg_id, mtype, params, extra): + ... self.params = params + ... self.received = True + ... self.client.reply(msg_id, {"samp.status": "samp.ok", "samp.result": {}}) + ... def receive_notification(self, private_key, sender_id, mtype, params, extra): + ... self.params = params + ... self.received = True + +and we instantiate it: + + >>> r = Receiver(client) + +We can now use the +:meth:`~astropy.vo.samp.integrated_client.SAMPIntegratedClient.bind_receive_call` and +:meth:`~astropy.vo.samp.integrated_client.SAMPIntegratedClient.bind_receive_notification` methods +to tell our receiver to listen to all ``table.load.votable`` messages:: + + >>> client.bind_receive_call("table.load.votable", r.receive_call) + >>> client.bind_receive_notification("table.load.votable", r.receive_notification) + +We can now check that the message has not been received yet:: + + >>> r.received + False + +Let's now broadcast the table from TOPCAT. After a few seconds, we can try and +check again if the message has been received:: + + >>> r.received + True + +Success! The table URL should now be available in ``r.params['url']``, so we can do:: + + >>> from astropy.table import Table + >>> t = Table.read(r.params['url']) + Downloading http://127.0.0.1:2525/dynamic/4/t12.vot [Done] + >>> t + col1 col2 col3 col4 col5 col6 col7 col8 col9 col10 + ------------------------- -------- ------- -------- -------- ----- ---- ----- ---- ----- + SSTGLMC G000.0046+01.1431 0.0046 1.1432 265.2992 -28.3321 6.67 5.04 6.89 5.22 N + SSTGLMC G000.0106-00.7315 0.0106 -0.7314 267.1274 -29.3063 7.18 6.07 nan 5.17 Y + SSTGLMC G000.0110-01.0237 0.0110 -1.0236 267.4151 -29.4564 8.32 6.30 8.34 6.32 N + ... + +As before, we should remember to disconnect from the hub once we are done:: + + >>> client.disconnect() + +The following is a full example of a script that can be used to receive and +read a table. It includes a loop that waits until the message is received, and +reads the table once it has:: + + import time + + from astropy.vo.samp import SAMPIntegratedClient + from astropy.table import Table + + # Instantiate the client and connect to the hub + client=SAMPIntegratedClient() + client.connect() + + # Set up a receiver class + class Receiver(object): + def __init__(self, client): + self.client = client + self.received = False + def receive_call(self, private_key, sender_id, msg_id, mtype, params, extra): + self.params = params + self.received = True + self.client.reply(msg_id, {"samp.status": "samp.ok", "samp.result": {}}) + def receive_notification(self, private_key, sender_id, mtype, params, extra): + self.params = params + self.received = True + + # Instantiate the receiver + r = Receiver(client) + + # Listen for any instructions to load a table + client.bind_receive_call("table.load.votable", r.receive_call) + client.bind_receive_notification("table.load.votable", r.receive_notification) + + # We now run the loop to wait for the message in a try/finally block so that if + # the program is interrupted e.g. by control-C, the client terminates + # gracefully. + + try: + + # We test every 0.1s to see if the hub has sent a message + while True: + time.sleep(0.1) + if r.received: + t = Table.read(r.params['url']) + break + + finally: + + client.disconnect() + + # Print out table + print t + +Sending an image to Ds9 and Aladin +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +As for tables, the easiest way to send a FITS image over SAMP is to make use of +the |SAMPIntegratedClient| class. Once Aladin or Ds9 are open, then first +instantiate a |SAMPIntegratedClient| instance and connect to the hub as before:: + + >>> from astropy.vo.samp import SAMPIntegratedClient + >>> client = SAMPIntegratedClient() + >>> client.connect() + +Next, we have to set up a dictionary that contains details about the image to +send. This should include ``url``, which is the URL to the file, and ``name``, +which is a human-readable name for the table. The URL can be a local URL +(starting with ``file:///``):: + + >>> params = {} + >>> params["url"] = 'file:///Users/tom/Desktop/MSX_E.fits' + >>> params["name"] = "MSX Band E Image of the Galactic Center" + +See `Sending a table to TOPCAT and Ds9`_ for an example of how to construct local URLs +more easily. Now we can set up the message itself. This includes the type of +message (here we use ``image.load.fits`` which indicates that a FITS image +should be loaded, and the details of the table that we set above:: + + >>> message = {} + >>> message["samp.mtype"] = "image.load.fits" + >>> message["samp.params"] = params + +Finally, we can broadcast this to all clients that are listening for +``table.load.votable`` messages:: + + >>> client.notify_all(message) + +As for `Sending a table to TOPCAT and Ds9`_, the +:meth:`~astropy.vo.samp.integrated_client.SAMPIntegratedClient.notify_all` +method will broadcast the image to all listening clients, and as for tables it +is possible to instead use the +:meth:`~astropy.vo.samp.integrated_client.SAMPIntegratedClient.notify` method +to send it to a specific client. + +Once finished, we should make sure we disconnect from the hub:: + + >>> client.disconnect() + +Receiving a table from Ds9 or Aladin +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Receiving images over SAMP is identical to `Receiving a table from TOPCAT`_, +with the execption that the message type should be ``image.load.fits`` instead +of ``table.load.votable``. Once the URL has been received, the FITS image can +be opened with:: + + >>> from astropy.io import fits + >>> fits.open(r.params['url']) + diff --git a/docs/vo/samp/index.rst b/docs/vo/samp/index.rst new file mode 100644 index 0000000..79717f9 --- /dev/null +++ b/docs/vo/samp/index.rst @@ -0,0 +1,67 @@ +.. include:: references.txt + +.. doctest-skip-all + +.. _vo-samp: + +*************************************************************** +SAMP (Simple Application Messaging Protocol (`astropy.vo.samp`) +*************************************************************** + +Introduction +============ + +`astropy.vo.samp` is an IVOA SAMP (Simple Application Messaging Protocol) +messaging system implementation in Python. It provides classes to easily: + +1. instantiate one or multiple Hubs; +2. interface an application or script to a running Hub; +3. create and manage a SAMP client. + +`astropy.vo.samp` provides also a stand-alone program ``samp_hub`` capable to +instantiate a persistent hub. + +SAMP is a protocol that is used by a number of other tools such as +`TOPCAT `_, +`SAO Ds9 `_, +and `Aladin `_, which means that it is possible to +send and receive data to and from these tools. The `astropy.vo.samp` package +also supports the 'web profile' for SAMP, which means that it can be used to +communicate with web SAMP clients. See the `sampjs +`_ library examples for more details. + +The following classes are available in `astropy.vo.samp`: + +* |SAMPHubServer|, which is used to instantiate a hub server that clients can + then connect to. +* |SAMPHubProxy|, which is used to connect to an existing hub (including hubs + started from other applications such as + `TOPCAT `_). +* |SAMPClient|, which is used to create a SAMP client +* |SAMPIntegratedClient|, which is the same as |SAMPClient| except that it has + a self-contained |SAMPHubProxy| to provide a simpler user interface. + +.. _IVOA Simple Application Messaging Protocol: http://www.ivoa.net/Documents/latest/SAMP.html + +Using `astropy.vo.samp` +======================= + +.. toctree:: + :maxdepth: 2 + + example_hub + example_table_image + example_clients + advanced_embed_samp_hub + +Reference/API +============= + +.. automodapi:: astropy.vo.samp + +Acknowledgments +=============== + +This code is adapted from the `SAMPy `__ +package written by Luigi Paioro, who has granted the Astropy project permission +to use the code under a BSD license. diff --git a/docs/vo/samp/references.txt b/docs/vo/samp/references.txt new file mode 100644 index 0000000..bfc9e62 --- /dev/null +++ b/docs/vo/samp/references.txt @@ -0,0 +1,5 @@ +.. |SAMPClient| replace:: :class:`~astropy.vo.samp.SAMPClient` +.. |SAMPIntegratedClient| replace:: :class:`~astropy.vo.samp.SAMPIntegratedClient` +.. |SAMPHubServer| replace:: :class:`~astropy.vo.samp.SAMPHubServer` +.. |SAMPHubProxy| replace:: :class:`~astropy.vo.samp.SAMPHubProxy` +.. |SAMPMsgReplierWrapper| replace:: :class:`~astropy.vo.samp.SAMPMsgReplierWrapper` diff --git a/docs/warnings.rst b/docs/warnings.rst new file mode 100644 index 0000000..f8dcccf --- /dev/null +++ b/docs/warnings.rst @@ -0,0 +1,55 @@ +.. _python-warnings: + +********************** +Python warnings system +********************** + +.. doctest-skip-all + +Astropy uses the Python :mod:`warnings` module to issue warning messages. The +details of using the warnings module are general to Python, and apply to any +Python software that uses this system. The user can suppress the warnings +using the python command line argument ``-W"ignore"`` when starting an +interactive python session. For example:: + + $ python -W"ignore" + +The user may also use the command line argument when running a python script as +follows:: + + $ python -W"ignore" myscript.py + +It is also possible to suppress warnings from within a python script. For +instance, the warnings issued from a single call to the +`astropy.io.fits.writeto` function may be suppressed from within a Python +script as follows:: + + >>> import warnings + >>> from astropy.io import fits + >>> warnings.filterwarnings('ignore', category=UserWarning, append=True) + >>> fits.writeto(filename, data, clobber=True) + +Astropy includes its own warning class, +`~astropy.utils.exceptions.AstropyUserWarning`, on which all warnings from +Astropy are based. So one can also ignore warnings from Astropy (while still +allowing through warnings from other libraries like Numpy) by using something +like:: + + >>> warnings.filterwarnings('ignore', category=AstropyUserWarning) + +However, warning filters may also be modified just within a certain context +using the `~warnings.catch_warnings` context manager:: + + >>> from warnings import catch_warnings + >>> with catch_warnings(): + ... warnings.filterwarnings('ignore', AstropyUserWarning) + ... fits.writeto(filename, data, clobber=True) + +Astropy also issues warnings when deprecated API features are used. If you +wish to *squelch* deprecation warnings, you can start Python with +``-Wi::Deprecation``. This sets all deprecation warnings to ignored. There is +also an Astropy-specific `~astropy.utils.exceptions.AstropyDeprecationWarning` +which can be used to disable deprecation warnings from Astropy only. + +See http://docs.python.org/using/cmdline.html#cmdoption-unittest-discover-W for +more information on the -W argument. diff --git a/docs/wcs/examples/from_file.py b/docs/wcs/examples/from_file.py new file mode 100644 index 0000000..b43be74 --- /dev/null +++ b/docs/wcs/examples/from_file.py @@ -0,0 +1,43 @@ +# Load the WCS information from a fits header, and use it +# to convert pixel coordinates to world coordinates. + +from __future__ import division, print_function + +import numpy +from astropy import wcs +from astropy.io import fits +import sys + +def load_wcs_from_file(filename): + # Load the FITS hdulist using astropy.io.fits + hdulist = fits.open(sys.argv[-1]) + + # Parse the WCS keywords in the primary HDU + w = wcs.WCS(hdulist[0].header) + + # Print out the "name" of the WCS, as defined in the FITS header + print(w.wcs.name) + + # Print out all of the settings that were parsed from the header + w.wcs.print_contents() + + # Some pixel coordinates of interest. + pixcrd = numpy.array([[0, 0], [24, 38], [45, 98]], numpy.float_) + + # Convert pixel coordinates to world coordinates + # The second argument is "origin" -- in this case we're declaring we + # have 1-based (Fortran-like) coordinates. + world = w.wcs_pix2world(pixcrd, 1) + print(world) + + # Convert the same coordinates back to pixel coordinates. + pixcrd2 = w.wcs_world2pix(world, 1) + print(pixcrd2) + + # These should be the same as the original pixel coordinates, modulo + # some floating-point error. + assert numpy.max(numpy.abs(pixcrd - pixcrd2)) < 1e-6 + + +if __name__ == '__main__': + load_wcs_from_file(sys.argv[-1]) diff --git a/docs/wcs/examples/programmatic.py b/docs/wcs/examples/programmatic.py new file mode 100644 index 0000000..0772518 --- /dev/null +++ b/docs/wcs/examples/programmatic.py @@ -0,0 +1,44 @@ +# Set the WCS information manually by setting properties of the WCS +# object. + +from __future__ import division, print_function + +import numpy +from astropy import wcs +from astropy.io import fits + +# Create a new WCS object. The number of axes must be set +# from the start +w = wcs.WCS(naxis=2) + +# Set up an "Airy's zenithal" projection +# Vector properties may be set with Python lists, or Numpy arrays +w.wcs.crpix = [-234.75, 8.3393] +w.wcs.cdelt = numpy.array([-0.066667, 0.066667]) +w.wcs.crval = [0, -90] +w.wcs.ctype = ["RA---AIR", "DEC--AIR"] +w.wcs.set_pv([(2, 1, 45.0)]) + +# Some pixel coordinates of interest. +pixcrd = numpy.array([[0, 0], [24, 38], [45, 98]], numpy.float_) + +# Convert pixel coordinates to world coordinates +world = w.wcs_pix2world(pixcrd, 1) +print(world) + +# Convert the same coordinates back to pixel coordinates. +pixcrd2 = w.wcs_world2pix(world, 1) +print(pixcrd2) + +# These should be the same as the original pixel coordinates, modulo +# some floating-point error. +assert numpy.max(numpy.abs(pixcrd - pixcrd2)) < 1e-6 + +# Now, write out the WCS object as a FITS header +header = w.to_header() + +# header is an astropy.io.fits.Header object. We can use it to create a new +# PrimaryHDU and write it to a file. +hdu = fits.PrimaryHDU(header=header) +# Save to FITS file +# hdu.writeto('test.fits') diff --git a/docs/wcs/history.rst b/docs/wcs/history.rst new file mode 100644 index 0000000..8883d83 --- /dev/null +++ b/docs/wcs/history.rst @@ -0,0 +1,90 @@ +astropy.wcs History +=================== + +`astropy.wcs` began life as ``pywcs``. Earlier version numbers refer to +that package. + +pywcs Version 1.11 +------------------ + +- Updated to wcslib version 4.8, which gives much more detailed error + messages. + +- Added functions get_pc() and get_cdelt(). These provide a way to + always get the canonical representation of the linear transformation + matrix, whether the header specified it in PC, CD or CROTA form. + +- Long-running process will now release the Python GIL to better + support Python multithreading. + +- The dimensions of the `~astropy.wcs.Wcsprm.cd` and + `~astropy.wcs.Wcsprm.pc` matrices were always returned as 2x2. They + now are sized according to naxis. + +- Supports Python 3.x + +- Builds on Microsoft Windows without severely patching wcslib. + +- Lots of new unit tests + +- ``pywcs`` will now run without ``pyfits``, though the SIP and distortion + lookup table functionality is unavailable. + +- Setting `~astropy.wcs.Wcsprm.cunit` will now verify that the values + are valid unit strings. + +pywcs Version 1.10 +------------------ + +- Adds a ``UnitConversion`` class, which gives access to wcslib's unit + conversion functionality. Given two convertible unit strings, pywcs + can convert arrays of values from one to the other. + +- Now uses wcslib 4.7 + +- Changes to some wcs values would not always calculate secondary values. + +pywcs Version 1.9 +----------------- + +- Support binary image arrays and pixel list format WCS by presenting + a way to call wcslib's ``wcsbth()`` + +- Updated underlying wcslib to version 4.5, which fixes the following: + + - Fixed the interpretation of VELREF when translating + AIPS-convention spectral types. Such translation is now handled + by a new special- purpose function, spcaips(). The wcsprm + struct has been augmented with an entry for velref which is + filled by wcspih() and wcsbth(). Previously, selection by + VELREF of the radio or optical velocity convention for type VELO + was not properly handled. + +Bugs +```` + +- The `~astropy.wcs.Wcsprm.pc` member is now available with a default + raw `~astropy.wcs.Wcsprm` object. + +- Make properties that return arrays read-only, since modifying a + (mutable) array could result in secondary values not being + recomputed based on those changes. + +- `float` properties can now be set using `int` values + +pywcs Version 1.3a1 +------------------- + +Earlier versions of pywcs had two versions of every conversion method:: + + X(...) -- treats the origin of pixel coordinates at (0, 0) + X_fits(...) -- treats the origin of pixel coordinates at (1, 1) + +From version 1.3 onwards, there is only one method for each +conversion, with an 'origin' argument: + + - 0: places the origin at (0, 0), which is the C/Numpy convention. + + - 1: places the origin at (1, 1), which is the Fortran/FITS + convention. + diff --git a/docs/wcs/index.rst b/docs/wcs/index.rst new file mode 100644 index 0000000..99f4cc0 --- /dev/null +++ b/docs/wcs/index.rst @@ -0,0 +1,198 @@ +.. doctest-skip-all +.. _astropy-wcs: + +*************************************** +World Coordinate System (`astropy.wcs`) +*************************************** + +.. _wcslib: http://www.atnf.csiro.au/~mcalabre/WCS/ +.. _Paper IV: http://www.atnf.csiro.au/people/mcalabre/WCS/index.html +.. _SIP: http://irsa.ipac.caltech.edu/data/SPITZER/docs/files/spitzer/shupeADASS.pdf +.. _ds9: http://hea-www.harvard.edu/RD/ds9/ + +Introduction +============ + +`astropy.wcs` contains utilities for managing World Coordinate System +(WCS) transformations in FITS files. These transformations map the +pixel locations in an image to their real-world units, such as their +position on the sky sphere. + +It is at its base a wrapper around Mark Calabretta's `wcslib`_, but +also adds support for the Simple Imaging Polynomial (`SIP`_) +convention and table lookup distortions as defined in WCS `Paper IV`_. +Each of these transformations can be used independently or together in +a standard pipeline. + +Getting Started +=============== + +The basic workflow is as follows: + + 1. ``from astropy import wcs`` + + 2. Call the `~astropy.wcs.WCS` constructor with an + `astropy.io.fits` header and/or hdulist object. + + 3. Optionally, if the FITS file uses any deprecated or + non-standard features, you may need to call one of the + `~astropy.wcs.wcs.WCS.fix` methods on the object. + + 4. Use one of the following transformation methods: + + - `~astropy.wcs.wcs.WCS.all_pix2world`: Perform all three + transformations from pixel to world coordinates. + + - `~astropy.wcs.wcs.WCS.wcs_pix2world`: Perform just the core + WCS transformation from pixel to world coordinates. + + - `~astropy.wcs.wcs.WCS.all_world2pix`: Perform all three + transformations from world to pixel coordinates, using an + iterative method if necessary. + + - `~astropy.wcs.wcs.WCS.wcs_world2pix`: Perform just the core + WCS transformation from world to pixel coordinates. + + - `~astropy.wcs.wcs.WCS.sip_pix2foc`: Convert from pixel to + focal plane coordinates using the `SIP`_ polynomial + coefficients. + + - `~astropy.wcs.wcs.WCS.sip_foc2pix`: Convert from focal plane + to pixel coordinates using the `SIP`_ polynomial + coefficients. + + - `~astropy.wcs.wcs.WCS.p4_pix2foc`: Convert from pixel to + focal plane coordinates using the table lookup distortion + method described in `Paper IV`_. + + - `~astropy.wcs.wcs.WCS.det2im`: Convert from detector + coordinates to image coordinates. Commonly used for narrow + column correction. + +For example, to convert pixel coordinates to world coordinates:: + + >>> from astropy.wcs import WCS + >>> w = WCS('image.fits') + >>> lon, lat = w.all_pix2world(30, 40, 0) + >>> print(lon, lat) + + +Using `astropy.wcs` +=================== + +Loading WCS information from a FITS file +---------------------------------------- + +This example loads a FITS file (supplied on the commandline) and uses +the WCS cards in its primary header to transform. + +.. literalinclude:: examples/from_file.py + :language: python + +Building a WCS structure programmatically +----------------------------------------- + +This example, rather than starting from a FITS header, sets WCS values +programmatically, uses those settings to transform some points, and then +saves those settings to a new FITS header. + +.. literalinclude:: examples/programmatic.py + :language: python + +Validating the WCS keywords in a FITS file +------------------------------------------ + +Astropy includes a commandline tool, ``wcslint`` to check the WCS +keywords in a FITS file:: + + > wcslint invalid.fits + HDU 1: + WCS key ' ': + - RADECSYS= 'ICRS ' / Astrometric system + RADECSYS is non-standard, use RADESYSa. + - The WCS transformation has more axes (2) than the image it is + associated with (0) + - 'celfix' made the change 'PV1_5 : Unrecognized coordinate + transformation parameter'. + + HDU 2: + WCS key ' ': + - The WCS transformation has more axes (3) than the image it is + associated with (0) + - 'celfix' made the change 'In CUNIT2 : Mismatched units type + 'length': have 'Hz', want 'm''. + - 'unitfix' made the change 'Changed units: 'HZ ' -> 'Hz''. + +Bounds checking +--------------- + +Bounds checking is enabled by default, and any computed world +coordinates outside of [-180°, 180°] for longitude and [-90°, 90°] in +latitude are marked as invalid. To disable this behavior, use +`astropy.wcs.Wcsprm.bounds_check`. + +Supported projections +===================== + +As `astropy.wcs` is based on `wcslib`_, it supports the standard +projections defined in the WCS papers. These projection codes are +specified in the second part of the ``CUNITn`` keywords (accessible +through `Wcsprm.cunit `), for example, +``RA-TAN-SIP``. The supported projection codes are: + +- ``AZP``: zenithal/azimuthal perspective +- ``SZP``: slant zenithal perspective +- ``TAN``: gnomonic +- ``STG``: stereographic +- ``SIN``: orthographic/synthesis +- ``ARC``: zenithal/azimuthal equidistant +- ``ZPN``: zenithal/azimuthal polynomial +- ``ZEA``: zenithal/azimuthal equal area +- ``AIR``: Airy's projection +- ``CYP``: cylindrical perspective +- ``CEA``: cylindrical equal area +- ``CAR``: plate carrée +- ``MER``: Mercator's projection +- ``COP``: conic perspective +- ``COE``: conic equal area +- ``COD``: conic equidistant +- ``COO``: conic orthomorphic +- ``SFL``: Sanson-Flamsteed ("global sinusoid") +- ``PAR``: parabolic +- ``MOL``: Mollweide's projection +- ``AIT``: Hammer-Aitoff +- ``BON``: Bonne's projection +- ``PCO``: polyconic +- ``TSC``: tangential spherical cube +- ``CSC``: COBE quadrilateralized spherical cube +- ``QSC``: quadrilateralized spherical cube +- ``HPX``: HEALPix +- ``XPH``: HEALPix polar, aka "butterfly" + +Other information +================= + +.. toctree:: + :maxdepth: 1 + + relax + history + + + +See Also +======== + +- `wcslib`_ + +Reference/API +============= + +.. automodapi:: astropy.wcs + + +Acknowledgments and Licenses +============================ + +wcslib is licenced under the `GNU Lesser General Public License +`_. diff --git a/docs/wcs/references.txt b/docs/wcs/references.txt new file mode 100644 index 0000000..fd5957f --- /dev/null +++ b/docs/wcs/references.txt @@ -0,0 +1,6 @@ +.. _wcslib: http://www.atnf.csiro.au/~mcalabre/WCS/ +.. _pyfits: http://www.stsci.edu/resources/software_hardware/pyfits +.. _Paper IV: http://www.atnf.csiro.au/people/mcalabre/WCS/index.html +.. _SIP: http://irsa.ipac.caltech.edu/data/SPITZER/docs/files/spitzer/shupeADASS.pdf +.. _ds9: http://hea-www.harvard.edu/RD/ds9/ + diff --git a/docs/wcs/relax.rst b/docs/wcs/relax.rst new file mode 100644 index 0000000..aa641c6 --- /dev/null +++ b/docs/wcs/relax.rst @@ -0,0 +1,377 @@ +.. include:: references.txt + +.. _relax: + +Relax constants +=============== + +The ``relax`` keyword argument controls the handling of non-standard +FITS WCS keywords. + +Note that the default value of ``relax`` is `True` for reading (to +accept all non standard keywords), and `False` for writing (to write +out only standard keywords), in accordance with `Postel's prescription +`_: + + “Be liberal in what you accept, and conservative in what you send.” + +.. _relaxread: + +Header-reading relaxation constants +----------------------------------- + +`~astropy.wcs.WCS`, `~astropy.wcs.Wcsprm` and +`~astropy.wcs.find_all_wcs` have a *relax* argument, which may be +either `True`, `False` or an `int`. + +- If `True`, (default), all non-standard WCS extensions recognized by the parser + will be handled. + +- If `False`, none of the extensions (even those in the + errata) will be handled. Non-conformant keywords will be handled in + the same way as non-WCS keywords in the header, i.e. by simply + ignoring them. + +- If an `int`, is is a bit field to provide fine-grained control over + what non-standard WCS keywords to accept. The flag bits are subject + to change in future and should be set by using the constants + beginning with ``WCSHDR_`` in the `astropy.wcs` module. + + For example, to accept ``CD00i00j`` and ``PC00i00j`` use:: + + relax = astropy.wcs.WCSHDR_CD00i00j | astropy.wcs.WCSHDR_PC00i00j + + The parser always treats ``EPOCH`` as subordinate to ``EQUINOXa`` if + both are present, and ``VSOURCEa`` is always subordinate to + ``ZSOURCEa``. + + Likewise, ``VELREF`` is subordinate to the formalism of WCS Paper + III. + +The flag bits are: + +- ``WCSHDR_none``: Don't accept any extensions (not even those in the + errata). Treat non-conformant keywords in the same way as non-WCS + keywords in the header, i.e. simply ignore them. (This is + equivalent to passing `False`) + +- ``WCSHDR_all``: Accept all extensions recognized by the parser. (This + is equivalent to the default behavior or passing `True`). + +- ``WCSHDR_CROTAia``: Accept ``CROTAia``, ``iCROTna``, ``TCROTna`` +- ``WCSHDR_EPOCHa``: Accept ``EPOCHa``. +- ``WCSHDR_VELREFa``: Accept ``VELREFa``. + + The constructor always recognizes the AIPS-convention + keywords, ``CROTAn``, ``EPOCH``, and ``VELREF`` for the + primary representation ``(a = ' ')`` but alternates are + non-standard. + + The constructor accepts ``EPOCHa`` and ``VELREFa`` only if + ``WCSHDR_AUXIMG`` is also enabled. + +- ``WCSHDR_CD00i00j``: Accept ``CD00i00j``. +- ``WCSHDR_PC00i00j``: Accept ``PC00i00j``. +- ``WCSHDR_PROJPn``: Accept ``PROJPn``. + + These appeared in early drafts of WCS Paper I+II (before they + were split) and are equivalent to ``CDi_ja``, ``PCi_ja``, and + ``PVi_ma`` for the primary representation ``(a = ' ')``. + ``PROJPn`` is equivalent to ``PVi_ma`` with ``m`` = ``n`` <= + 9, and is associated exclusively with the latitude axis. + +- ``WCSHDR_RADECSYS``: Accept ``RADECSYS``. This appeared in early + drafts of WCS Paper I+II and was subsequently replaced by + ``RADESYSa``. The construtor accepts ``RADECSYS`` only if + ``WCSHDR_AUXIMG`` is also enabled. + +- ``WCSHDR_VSOURCE``: Accept ``VSOURCEa`` or ``VSOUna``. This appeared + in early drafts of WCS Paper III and was subsequently dropped in + favour of ``ZSOURCEa`` and ``ZSOUna``. The constructor accepts + ``VSOURCEa`` only if ``WCSHDR_AUXIMG`` is also enabled. + +- ``WCSHDR_DOBSn``: Allow ``DOBSn``, the column-specific analogue of + ``DATE-OBS``. By an oversight this was never formally defined in + the standard. + +- ``WCSHDR_LONGKEY``: Accept long forms of the alternate binary table + and pixel list WCS keywords, i.e. with "a" non- blank. + Specifically:: + + jCRPXna TCRPXna : jCRPXn jCRPna TCRPXn TCRPna CRPIXja + - TPCn_ka : - ijPCna - TPn_ka PCi_ja + - TCDn_ka : - ijCDna - TCn_ka CDi_ja + iCDLTna TCDLTna : iCDLTn iCDEna TCDLTn TCDEna CDELTia + iCUNIna TCUNIna : iCUNIn iCUNna TCUNIn TCUNna CUNITia + iCTYPna TCTYPna : iCTYPn iCTYna TCTYPn TCTYna CTYPEia + iCRVLna TCRVLna : iCRVLn iCRVna TCRVLn TCRVna CRVALia + iPVn_ma TPVn_ma : - iVn_ma - TVn_ma PVi_ma + iPSn_ma TPSn_ma : - iSn_ma - TSn_ma PSi_ma + + where the primary and standard alternate forms together with the + image-header equivalent are shown rightwards of the colon. + + The long form of these keywords could be described as quasi- + standard. ``TPCn_ka``, ``iPVn_ma``, and ``TPVn_ma`` appeared by + mistake in the examples in WCS Paper II and subsequently these and + also ``TCDn_ka``, ``iPSn_ma`` and ``TPSn_ma`` were legitimized by + the errata to the WCS papers. + + Strictly speaking, the other long forms are non-standard and in fact + have never appeared in any draft of the WCS papers nor in the + errata. However, as natural extensions of the primary form they are + unlikely to be written with any other intention. Thus it should be + safe to accept them provided, of course, that the resulting keyword + does not exceed the 8-character limit. + + If ``WCSHDR_CNAMn`` is enabled then also accept:: + + iCNAMna TCNAMna : --- iCNAna --- TCNAna CNAMEia + iCRDEna TCRDEna : --- iCRDna --- TCRDna CRDERia + iCSYEna TCSYEna : --- iCSYna --- TCSYna CSYERia + + Note that ``CNAMEia``, ``CRDERia``, ``CSYERia``, and their variants + are not used by `astropy.wcs` but are stored as auxiliary information. + +- ``WCSHDR_CNAMn``: Accept ``iCNAMn``, ``iCRDEn``, ``iCSYEn``, + ``TCNAMn``, ``TCRDEn``, and ``TCSYEn``, i.e. with ``a`` blank. + While non-standard, these are the obvious analogues of ``iCTYPn``, + ``TCTYPn``, etc. + +- ``WCSHDR_AUXIMG``: Allow the image-header form of an auxiliary WCS + keyword with representation-wide scope to provide a default value + for all images. This default may be overridden by the + column-specific form of the keyword. + + For example, a keyword like ``EQUINOXa`` would apply to all image + arrays in a binary table, or all pixel list columns with alternate + representation ``a`` unless overridden by ``EQUIna``. + + Specifically the keywords are:: + + LATPOLEa for LATPna + LONPOLEa for LONPna + RESTFREQ for RFRQna + RESTFRQa for RFRQna + RESTWAVa for RWAVna + + whose keyvalues are actually used by WCSLIB, and also keywords that + provide auxiliary information that is simply stored in the wcsprm + struct:: + + EPOCH - ... (No column-specific form.) + EPOCHa - ... Only if WCSHDR_EPOCHa is set. + EQUINOXa for EQUIna + RADESYSa for RADEna + RADECSYS for RADEna ... Only if WCSHDR_RADECSYS is set. + SPECSYSa for SPECna + SSYSOBSa for SOBSna + SSYSSRCa for SSRCna + VELOSYSa for VSYSna + VELANGLa for VANGna + VELREF - ... (No column-specific form.) + VELREFa - ... Only if WCSHDR_VELREFa is set. + VSOURCEa for VSOUna ... Only if WCSHDR_VSOURCE is set. + WCSNAMEa for WCSNna ... Or TWCSna (see below). + ZSOURCEa for ZSOUna + + DATE-AVG for DAVGn + DATE-OBS for DOBSn + MJD-AVG for MJDAn + MJD-OBS for MJDOBn + OBSGEO-X for OBSGXn + OBSGEO-Y for OBSGYn + OBSGEO-Z for OBSGZn + + where the image-header keywords on the left provide default values + for the column specific keywords on the right. + + Keywords in the last group, such as ``MJD-OBS``, apply to all + alternate representations, so ``MJD-OBS`` would provide a default + value for all images in the header. + + This auxiliary inheritance mechanism applies to binary table image + arrays and pixel lists alike. Most of these keywords have no + default value, the exceptions being ``LONPOLEa`` and ``LATPOLEa``, + and also ``RADESYSa`` and ``EQUINOXa`` which provide defaults for + each other. Thus the only potential difficulty in using + ``WCSHDR_AUXIMG`` is that of erroneously inheriting one of these four + keywords. + + Unlike ``WCSHDR_ALLIMG``, the existence of one (or all) of these + auxiliary WCS image header keywords will not by itself cause a + `~astropy.wcs.Wcsprm` object to be created for alternate + representation ``a``. This is because they do not provide + sufficient information to create a non-trivial coordinate + representation when used in conjunction with the default values of + those keywords, such as ``CTYPEia``, that are parameterized by axis + number. + +- ``WCSHDR_ALLIMG``: Allow the image-header form of *all* image header + WCS keywords to provide a default value for all image arrays in a + binary table (n.b. not pixel list). This default may be overridden + by the column-specific form of the keyword. + + For example, a keyword like ``CRPIXja`` would apply to all image + arrays in a binary table with alternate representation ``a`` + unless overridden by ``jCRPna``. + + Specifically the keywords are those listed above for ``WCSHDR_AUXIMG`` + plus:: + + WCSAXESa for WCAXna + + which defines the coordinate dimensionality, and the following + keywords which are parameterized by axis number:: + + CRPIXja for jCRPna + PCi_ja for ijPCna + CDi_ja for ijCDna + CDELTia for iCDEna + CROTAi for iCROTn + CROTAia - ... Only if WCSHDR_CROTAia is set. + CUNITia for iCUNna + CTYPEia for iCTYna + CRVALia for iCRVna + PVi_ma for iVn_ma + PSi_ma for iSn_ma + + CNAMEia for iCNAna + CRDERia for iCRDna + CSYERia for iCSYna + + where the image-header keywords on the left provide default values + for the column specific keywords on the right. + + This full inheritance mechanism only applies to binary table image + arrays, not pixel lists, because in the latter case there is no + well-defined association between coordinate axis number and column + number. + + Note that ``CNAMEia``, ``CRDERia``, ``CSYERia``, and their variants + are not used by pywcs but are stored in the `~astropy.wcs.Wcsprm` + object as auxiliary information. + + Note especially that at least one `~astropy.wcs.Wcsprm` object will + be returned for each ``a`` found in one of the image header keywords + listed above: + + - If the image header keywords for ``a`` **are not** inherited by + a binary table, then the struct will not be associated with any + particular table column number and it is up to the user to + provide an association. + + - If the image header keywords for ``a`` **are** inherited by a + binary table image array, then those keywords are considered to + be "exhausted" and do not result in a separate + `~astropy.wcs.Wcsprm` object. + +.. _relaxwrite: + +Header-writing relaxation constants +----------------------------------- + +`~astropy.wcs.wcs.WCS.to_header` and `~astropy.wcs.wcs.WCS.to_header_string` +has a *relax* argument which may be either `True`, `False` or an +`int`. + +- If `True`, write all recognized extensions. + +- If `False` (default), write all extensions that are considered to be + safe and recommended, equivalent to ``WCSHDO_safe`` (described below). + +- If an `int`, is is a bit field to provide fine-grained control over + what non-standard WCS keywords to accept. The flag bits are subject + to change in future and should be set by using the constants + beginning with ``WCSHDO_`` in the `astropy.wcs` module. + +The flag bits are: + +- ``WCSHDO_none``: Don't use any extensions. + +- ``WCSHDO_all``: Write all recognized extensions, equivalent to setting + each flag bit. + +- ``WCSHDO_safe``: Write all extensions that are considered to be safe + and recommended. + +- ``WCSHDO_DOBSn``: Write ``DOBSn``, the column-specific analogue of + ``DATE-OBS`` for use in binary tables and pixel lists. WCS Paper + III introduced ``DATE-AVG`` and ``DAVGn`` but by an oversight + ``DOBSn`` (the obvious analogy) was never formally defined by the + standard. The alternative to using ``DOBSn`` is to write + ``DATE-OBS`` which applies to the whole table. This usage is + considered to be safe and is recommended. + +- ``WCSHDO_TPCn_ka``: WCS Paper I defined + + - ``TPn_ka`` and ``TCn_ka`` for pixel lists + + but WCS Paper II uses ``TPCn_ka`` in one example and subsequently + the errata for the WCS papers legitimized the use of + + - ``TPCn_ka`` and ``TCDn_ka`` for pixel lists + + provided that the keyword does not exceed eight characters. This + usage is considered to be safe and is recommended because of the + non-mnemonic terseness of the shorter forms. + +- ``WCSHDO_PVn_ma``: WCS Paper I defined + + - ``iVn_ma`` and ``iSn_ma`` for bintables and + - ``TVn_ma`` and ``TSn_ma`` for pixel lists + + but WCS Paper II uses ``iPVn_ma`` and ``TPVn_ma`` in the examples + and subsequently the errata for the WCS papers legitimized the use + of + + - ``iPVn_ma`` and ``iPSn_ma`` for bintables and + - ``TPVn_ma`` and ``TPSn_ma`` for pixel lists + + provided that the keyword does not exceed eight characters. This + usage is considered to be safe and is recommended because of the + non-mnemonic terseness of the shorter forms. + +- ``WCSHDO_CRPXna``: For historical reasons WCS Paper I defined + + - ``jCRPXn``, ``iCDLTn``, ``iCUNIn``, ``iCTYPn``, and ``iCRVLn`` for + bintables and + - ``TCRPXn``, ``TCDLTn``, ``TCUNIn``, ``TCTYPn``, and ``TCRVLn`` for + pixel lists + + for use without an alternate version specifier. However, because + of the eight-character keyword constraint, in order to accommodate + column numbers greater than 99 WCS Paper I also defined + + - ``jCRPna``, ``iCDEna``, ``iCUNna``, ``iCTYna`` and ``iCRVna`` for + bintables and + - ``TCRPna``, ``TCDEna``, ``TCUNna``, ``TCTYna`` and ``TCRVna`` for + pixel lists + + for use with an alternate version specifier (the ``a``). Like the + ``PC``, ``CD``, ``PV``, and ``PS`` keywords there is an obvious + tendency to confuse these two forms for column numbers up to 99. + It is very unlikely that any parser would reject keywords in the + first set with a non-blank alternate version specifier so this + usage is considered to be safe and is recommended. + +- ``WCSHDO_CNAMna``: WCS Papers I and III defined + + - ``iCNAna``, ``iCRDna``, and ``iCSYna`` for bintables and + - ``TCNAna``, ``TCRDna``, and ``TCSYna`` for pixel lists + + By analogy with the above, the long forms would be + + - ``iCNAMna``, ``iCRDEna``, and ``iCSYEna`` for bintables and + - ``TCNAMna``, ``TCRDEna``, and ``TCSYEna`` for pixel lists + + Note that these keywords provide auxiliary information only, none + of them are needed to compute world coordinates. This usage is + potentially unsafe and is not recommended at this time. + +- ``WCSHDO_WCSNna``: Write ``WCSNna`` instead of ``TWCSna`` for pixel + lists. While the constructor treats ``WCSNna`` and ``TWCSna`` as + equivalent, other parsers may not. Consequently, this usage is + potentially unsafe and is not recommended at this time. + +- ``WCSHDO_SIP``: Write out Simple Imaging Polynomial (SIP) keywords. diff --git a/docs/whatsnew/0.1.rst b/docs/whatsnew/0.1.rst new file mode 100644 index 0000000..c0b7f60 --- /dev/null +++ b/docs/whatsnew/0.1.rst @@ -0,0 +1,26 @@ +========================= +What's New in Astropy 0.1 +========================= + +This was the initial version of Astropy, released on June 19, 2012. It was +released primarily as a "developer preview" for developers interested in +working directly on Astropy, on affiliated packages, or on other software that +might integrate with Astropy. + +Astropy 0.1 integrated several existing packages under a single ``astropy`` +package with a unified installer, including: + + * asciitable as `astropy.io.ascii` + * PyFITS as `astropy.io.fits` + * votable as ``astropy.io.vo`` + * PyWCS as `astropy.wcs` + +It also added the beginnings of the :mod:`astropy.cosmology` package, and new +common data structures for science data in the :mod:`astropy.nddata` and +:mod:`astropy.table` packages. + +It also laid much of the groundwork for Astropy's installation and +documentation frameworks, as well as tools for managing configuration and data +management. These facilities are designed to be shared by Astropy's affiliated +packages in the hopes of providing a framework on which other Astronomy-related +Python packages can build. diff --git a/docs/whatsnew/0.2.rst b/docs/whatsnew/0.2.rst new file mode 100644 index 0000000..7b78005 --- /dev/null +++ b/docs/whatsnew/0.2.rst @@ -0,0 +1,9 @@ +.. _whatsnew-0.2: + +========================= +What's New in Astropy 0.2 +========================= + +See this page in the `Astropy v0.2 documentation`__. + +__ http://docs.astropy.org/en/v0.2.5/whatsnew/0.2.html diff --git a/docs/whatsnew/0.3.rst b/docs/whatsnew/0.3.rst new file mode 100644 index 0000000..9f44112 --- /dev/null +++ b/docs/whatsnew/0.3.rst @@ -0,0 +1,9 @@ +.. _whatsnew-0.3: + +========================== +What's New in Astropy 0.3? +========================== + +See this page in the `Astropy v0.3 documentation`__. + +__ http://docs.astropy.org/en/v0.3.2/whatsnew/0.3.html diff --git a/docs/whatsnew/0.4.rst b/docs/whatsnew/0.4.rst new file mode 100644 index 0000000..155c884 --- /dev/null +++ b/docs/whatsnew/0.4.rst @@ -0,0 +1,225 @@ +.. doctest-skip-all + +.. _whatsnew-0.4: + +========================== +What's New in Astropy 0.4? +========================== + +Overview +-------- + +Astropy 0.4 is a major release that adds new functionality since the +0.3.x series of releases. A new sub-package is included (see `SAMP`_), +a major overhaul of the :ref:`Coordinates ` +sub-package has been completed (see `Coordinates`_), +and many new features and improvements have been implemented for the +existing sub-packages. In addition to usability improvements, we have +made a number of changes in the infrastructure for setting up/installing +the package (see `astropy-helpers package`_), as well as reworking the +configuration system (see `Configuration`_). + +In addition to these major changes, a large number of smaller +improvements have occurred. Since v0.3, by the numbers: + +* 819 issues have been closed +* 511 pull requests have been merged +* 57 distinct people have contributed code + + +Coordinates +----------- + +The :ref:`astropy-coordinates` sub-package has been largely re-designed based +on broad community discussion and experience with v0.2 and v0.3. The key +motivation was to implement coordinates within an extensible framework that +cleanly separates the distinct aspects of data representation, coordinate +frame representation and transformation, and user interface. This is described +in the `APE5 `_ +document. Details of the new usage are given in the :ref:`astropy-coordinates` +section of the documentation. + +*An important point is that this sub-package is now considered stable and we do +not expect any further major interface changes.* + +For most users the major change is that the recommended user interface to +coordinate functionality is the `~astropy.coordinates.SkyCoord` class +instead of classes like `~astropy.coordinates.ICRS` or +`~astropy.coordinates.Galactic` (which are now +called "frame" classes). For example:: + + >>> from astropy import units as u + >>> from astropy.coordinates import SkyCoord + >>> coordinate = SkyCoord(123.4*u.deg, 56.7*u.deg, frame='icrs') + +The frame classes can still be used to create coordinate objects as before, but +they are now more powerful because they can represent abstract coordinate +frames without underlying data. The more typical use for frame classes is now:: + + >>> from astropy.coordinates import FK4 # Or ICRS, Galactic, or similar + >>> fk4_frame = FK4(equinox='J1980.0', obstime='2011-06-12T01:12:34') + >>> coordinate.transform_to(fk4_frame) + + +At the lowest level of the framework are the representation classes which +describe how to represent a point in a frame as a tuple of quantities, for +instance as spherical, cylindrical, or cartesian coordinates. Any coordinate +object can now be created using values in a number of common representations +and be displayed using those representations. For example:: + + >>> coordinate = SkyCoord(1*u.pc, 2*u.pc, 3*u.pc, representation='cartesian') + >>> coordinate + + + >>> coordinate.representation = 'physicsspherical' + >>> coordinate + + +SAMP +---- + +The :ref:`vo-samp` sub-package is a new sub-package (adapted from the `SAMPy +package `_) that contains an +implementation of the `Simple Application Messaging Protocol (SAMP) +`_ standard that allows communication +with any SAMP-enabled application (such as `TOPCAT +`_, `SAO Ds9 +`_, and `Aladin +`_). This sub-package includes both classes for a +hub and a client, as well as an *integrated client* which automatically +connects to any running SAMP hub and acts as a client:: + + >>> from astropy.vo.samp import SAMPIntegratedClient + >>> client = SAMPIntegratedClient() + >>> client.connect() + +We can then use the client to communicate with other clients:: + + >>> client.get_registered_clients() + ['hub', 'c1', 'c2'] + >>> client.get_metadata('c1') + {'author.affiliation': 'Astrophysics Group, Bristol University', + 'author.email': 'm.b.taylor@bristol.ac.uk', + 'author.name': 'Mark Taylor', + 'home.page': 'http://www.starlink.ac.uk/topcat/', + 'samp.description.text': 'Tool for OPerations on Catalogues And Tables', + 'samp.documentation.url': 'http://127.0.0.1:2525/doc/sun253/index.html', + 'samp.icon.url': 'http://127.0.0.1:2525/doc/images/tc_sok.gif', + 'samp.name': 'topcat', + 'topcat.version': '4.0-1'} + +and we can then send for example tables and images over SAMP to other +applications (see :ref:`vo-samp` for examples of how to do this). + +Quantity +-------- +The `~astropy.units.Quantity` class has seen a series of optimizations +and is now substantially faster. Additionally, the `~astropy.time`, +`~astropy.coordinates`, and `~astropy.table` subpackages integrate +better with `~astropy.units.Quantity`, with further improvements on the +way for `~astropy.table`. See :doc:`/units/quantity` and the other +subpackage documentation sections for more details. + +Inspecting FITS headers from the command line +--------------------------------------------- + +The :ref:`astropy-io-fits` sub-package now provides a command line script for +inspecting the header(s) of a FITS file. With Astropy 0.4 installed, run +``fitsheader file.fits`` in your terminal to print the header information to +the screen in a human-readable format. Run ``fitsheader --help`` to see the +full usage documentation. + +Reading and writing HTML tables +------------------------------- + +The :ref:`io-ascii` sub-package now provides the capability to read a table +within an HTML file or web URL into an astropy `~astropy.table.Table` object. +This requires the `BeautifulSoup4 +`_ package to be installed. +Conversely a `~astropy.table.Table` object can now be written out as an HTML +table. + +Documentation URL changes +------------------------- + +Starting in v0.4, the astropy documentation (and any package that uses +``astropy-helpers``) will show the full name of functions and classes +prefixed by the intended user-facing location. This is in contrast to +previous versions, which pointed to the actual implementation module, +rather than the intended public API location. + +This will affect URLs pointing to specific documentation pages. For +example, this URL points to the v0.3 location of the +`astropy.cosmology.luminosity_distance` function: + +* http://docs.astropy.org/en/v0.3/api/astropy.cosmology.funcs.luminosity_distance.html + +while the appropriate URL for v0.4 and later is: + +* http://docs.astropy.org/en/v0.4/api/astropy.cosmology.luminosity_distance.html + +astropy-helpers package +----------------------- + +We have now extracted our set-up and documentation utilities into a separate +package, `astropy-helpers `_. In +practice, this does not change anything from a user point of view, but it is +a big internal change that will allow any other packages to benefit from the +set-up utilies developed for the core package without having to first install +astropy. + +Configuration +------------- + +The configuration framework has been re-factored based on the design +described in +`APE3 `_. +If you have previously edited the astropy configuration file (typically +located at ``~/.astropy/config/astropy.cfg``) then you should read over +:ref:`config-0-4-transition` in order to understand how to update it +to the new mechanism. + +Deprecation and backward-incompatible changes +--------------------------------------------- + +- ``Quantity`` comparisons with ``==`` or ``!=`` now always return ``True`` + or ``False``, even if units do not match (for which case a ``UnitsError`` + used to be raised). [#2328] + +- The functional interface for `astropy.cosmology` (e.g. + ``cosmology.H(z=0.5)`` is now deprecated in favor of the + objected-oriented approach (``WMAP9.H(z=0.5)``). [#2343] + +- The `astropy.coordinates` sub-package has undergone major changes for + implementing the + `APE5 `_ plan + for the package. A compatibility layer has been added that will allow + common use cases of pre-v0.4 coordinates to work, but this layer will be + removed in the next major version. Hence, any use of the coordinates + package should be adapted to the new framework. Additionally, the + compatibility layer cannot be used for convenience functions (like the + ``match_catalog_*()`` functions), as these have been moved to + `~astropy.coordinates.SkyCoord`. From this point on, major changes to the + coordinates classes are not expected. [#2422] + +- The configuration framework has been re-designed to the scheme of + `APE3 `_. + The previous framework based on `~astropy.config.ConfigurationItem` is + deprecated, and will be removed in a future release. Affiliated + packages should update to the new configuration system, and any users + who have customized their configuration file should migrate to the new + configuration approach. Until they do, warnings will appear prompting + them to do so. + +Full change log +--------------- + +To see a detailed list of all changes in version 0.4 and prior, please see the +:ref:`changelog`. + +Note on future versions +----------------------- + +While the current release supports Python 2.6, 2.7, and 3.1 to 3.4, the next +release (1.0) will drop support for Python 3.1 and 3.2. + diff --git a/docs/whatsnew/index.rst b/docs/whatsnew/index.rst new file mode 100644 index 0000000..8c3310b --- /dev/null +++ b/docs/whatsnew/index.rst @@ -0,0 +1,11 @@ +===================== +Major Release History +===================== + +.. toctree:: + :maxdepth: 1 + + 0.4 + 0.3 + 0.2 + 0.1 diff --git a/ez_setup.py b/ez_setup.py new file mode 100644 index 0000000..9dc2c87 --- /dev/null +++ b/ez_setup.py @@ -0,0 +1,382 @@ +#!python +"""Bootstrap setuptools installation + +If you want to use setuptools in your package's setup.py, just include this +file in the same directory with it, and add this to the top of your setup.py:: + + from ez_setup import use_setuptools + use_setuptools() + +If you want to require a specific version of setuptools, set a download +mirror, or use an alternate download directory, you can do so by supplying +the appropriate options to ``use_setuptools()``. + +This file can also be run as a script to install or upgrade setuptools. +""" +import os +import shutil +import sys +import tempfile +import tarfile +import optparse +import subprocess +import platform + +from distutils import log + +try: + from site import USER_SITE +except ImportError: + USER_SITE = None + +DEFAULT_VERSION = "1.4.2" +DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" + +def _python_cmd(*args): + args = (sys.executable,) + args + return subprocess.call(args) == 0 + +def _check_call_py24(cmd, *args, **kwargs): + res = subprocess.call(cmd, *args, **kwargs) + class CalledProcessError(Exception): + pass + if not res == 0: + msg = "Command '%s' return non-zero exit status %d" % (cmd, res) + raise CalledProcessError(msg) +vars(subprocess).setdefault('check_call', _check_call_py24) + +def _install(tarball, install_args=()): + # extracting the tarball + tmpdir = tempfile.mkdtemp() + log.warn('Extracting in %s', tmpdir) + old_wd = os.getcwd() + try: + os.chdir(tmpdir) + tar = tarfile.open(tarball) + _extractall(tar) + tar.close() + + # going in the directory + subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) + os.chdir(subdir) + log.warn('Now working in %s', subdir) + + # installing + log.warn('Installing Setuptools') + if not _python_cmd('setup.py', 'install', *install_args): + log.warn('Something went wrong during the installation.') + log.warn('See the error message above.') + # exitcode will be 2 + return 2 + finally: + os.chdir(old_wd) + shutil.rmtree(tmpdir) + + +def _build_egg(egg, tarball, to_dir): + # extracting the tarball + tmpdir = tempfile.mkdtemp() + log.warn('Extracting in %s', tmpdir) + old_wd = os.getcwd() + try: + os.chdir(tmpdir) + tar = tarfile.open(tarball) + _extractall(tar) + tar.close() + + # going in the directory + subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) + os.chdir(subdir) + log.warn('Now working in %s', subdir) + + # building an egg + log.warn('Building a Setuptools egg in %s', to_dir) + _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) + + finally: + os.chdir(old_wd) + shutil.rmtree(tmpdir) + # returning the result + log.warn(egg) + if not os.path.exists(egg): + raise IOError('Could not build the egg.') + + +def _do_download(version, download_base, to_dir, download_delay): + egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' + % (version, sys.version_info[0], sys.version_info[1])) + if not os.path.exists(egg): + tarball = download_setuptools(version, download_base, + to_dir, download_delay) + _build_egg(egg, tarball, to_dir) + sys.path.insert(0, egg) + + # Remove previously-imported pkg_resources if present (see + # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). + if 'pkg_resources' in sys.modules: + del sys.modules['pkg_resources'] + + import setuptools + setuptools.bootstrap_install_from = egg + + +def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, + to_dir=os.curdir, download_delay=15): + # making sure we use the absolute path + to_dir = os.path.abspath(to_dir) + was_imported = 'pkg_resources' in sys.modules or \ + 'setuptools' in sys.modules + try: + import pkg_resources + except ImportError: + return _do_download(version, download_base, to_dir, download_delay) + try: + pkg_resources.require("setuptools>=" + version) + return + except pkg_resources.VersionConflict: + e = sys.exc_info()[1] + if was_imported: + sys.stderr.write( + "The required version of setuptools (>=%s) is not available,\n" + "and can't be installed while this script is running. Please\n" + "install a more recent version first, using\n" + "'easy_install -U setuptools'." + "\n\n(Currently using %r)\n" % (version, e.args[0])) + sys.exit(2) + else: + del pkg_resources, sys.modules['pkg_resources'] # reload ok + return _do_download(version, download_base, to_dir, + download_delay) + except pkg_resources.DistributionNotFound: + return _do_download(version, download_base, to_dir, + download_delay) + +def _clean_check(cmd, target): + """ + Run the command to download target. If the command fails, clean up before + re-raising the error. + """ + try: + subprocess.check_call(cmd) + except subprocess.CalledProcessError: + if os.access(target, os.F_OK): + os.unlink(target) + raise + +def download_file_powershell(url, target): + """ + Download the file at url to target using Powershell (which will validate + trust). Raise an exception if the command cannot complete. + """ + target = os.path.abspath(target) + cmd = [ + 'powershell', + '-Command', + "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(), + ] + _clean_check(cmd, target) + +def has_powershell(): + if platform.system() != 'Windows': + return False + cmd = ['powershell', '-Command', 'echo test'] + devnull = open(os.path.devnull, 'wb') + try: + try: + subprocess.check_call(cmd, stdout=devnull, stderr=devnull) + except: + return False + finally: + devnull.close() + return True + +download_file_powershell.viable = has_powershell + +def download_file_curl(url, target): + cmd = ['curl', url, '--silent', '--output', target] + _clean_check(cmd, target) + +def has_curl(): + cmd = ['curl', '--version'] + devnull = open(os.path.devnull, 'wb') + try: + try: + subprocess.check_call(cmd, stdout=devnull, stderr=devnull) + except: + return False + finally: + devnull.close() + return True + +download_file_curl.viable = has_curl + +def download_file_wget(url, target): + cmd = ['wget', url, '--quiet', '--output-document', target] + _clean_check(cmd, target) + +def has_wget(): + cmd = ['wget', '--version'] + devnull = open(os.path.devnull, 'wb') + try: + try: + subprocess.check_call(cmd, stdout=devnull, stderr=devnull) + except: + return False + finally: + devnull.close() + return True + +download_file_wget.viable = has_wget + +def download_file_insecure(url, target): + """ + Use Python to download the file, even though it cannot authenticate the + connection. + """ + try: + from urllib.request import urlopen + except ImportError: + from urllib2 import urlopen + src = dst = None + try: + src = urlopen(url) + # Read/write all in one block, so we don't create a corrupt file + # if the download is interrupted. + data = src.read() + dst = open(target, "wb") + dst.write(data) + finally: + if src: + src.close() + if dst: + dst.close() + +download_file_insecure.viable = lambda: True + +def get_best_downloader(): + downloaders = [ + download_file_powershell, + download_file_curl, + download_file_wget, + download_file_insecure, + ] + + for dl in downloaders: + if dl.viable(): + return dl + +def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, + to_dir=os.curdir, delay=15, + downloader_factory=get_best_downloader): + """Download setuptools from a specified location and return its filename + + `version` should be a valid setuptools version number that is available + as an egg for download under the `download_base` URL (which should end + with a '/'). `to_dir` is the directory where the egg will be downloaded. + `delay` is the number of seconds to pause before an actual download + attempt. + + ``downloader_factory`` should be a function taking no arguments and + returning a function for downloading a URL to a target. + """ + # making sure we use the absolute path + to_dir = os.path.abspath(to_dir) + tgz_name = "setuptools-%s.tar.gz" % version + url = download_base + tgz_name + saveto = os.path.join(to_dir, tgz_name) + if not os.path.exists(saveto): # Avoid repeated downloads + log.warn("Downloading %s", url) + downloader = downloader_factory() + downloader(url, saveto) + return os.path.realpath(saveto) + + +def _extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + import copy + import operator + from tarfile import ExtractError + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directories with a safe mode. + directories.append(tarinfo) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 448 # decimal for oct 0700 + self.extract(tarinfo, path) + + # Reverse sort directories. + if sys.version_info < (2, 4): + def sorter(dir1, dir2): + return cmp(dir1.name, dir2.name) + directories.sort(sorter) + directories.reverse() + else: + directories.sort(key=operator.attrgetter('name'), reverse=True) + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError: + e = sys.exc_info()[1] + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + +def _build_install_args(options): + """ + Build the arguments to 'python setup.py install' on the setuptools package + """ + install_args = [] + if options.user_install: + if sys.version_info < (2, 6): + log.warn("--user requires Python 2.6 or later") + raise SystemExit(1) + install_args.append('--user') + return install_args + +def _parse_args(): + """ + Parse the command line for options + """ + parser = optparse.OptionParser() + parser.add_option( + '--user', dest='user_install', action='store_true', default=False, + help='install in user site package (requires Python 2.6 or later)') + parser.add_option( + '--download-base', dest='download_base', metavar="URL", + default=DEFAULT_URL, + help='alternative URL from where to download the setuptools package') + parser.add_option( + '--insecure', dest='downloader_factory', action='store_const', + const=lambda: download_file_insecure, default=get_best_downloader, + help='Use internal, non-validating downloader' + ) + options, args = parser.parse_args() + # positional arguments are ignored + return options + +def main(version=DEFAULT_VERSION): + """Install or upgrade setuptools and EasyInstall""" + options = _parse_args() + tarball = download_setuptools(download_base=options.download_base, + downloader_factory=options.downloader_factory) + return _install(tarball, _build_install_args(options)) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/licenses/CONFIGOBJ_LICENSE.rst b/licenses/CONFIGOBJ_LICENSE.rst new file mode 100644 index 0000000..bdb6748 --- /dev/null +++ b/licenses/CONFIGOBJ_LICENSE.rst @@ -0,0 +1,32 @@ +Copyright (c) 2003-2010, Michael Foord +All rights reserved. +E-mail : fuzzyman AT voidspace DOT org DOT uk + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Michael Foord nor the name of Voidspace + may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/licenses/DATATABLES_LICENSE.rst b/licenses/DATATABLES_LICENSE.rst new file mode 100644 index 0000000..477c4c3 --- /dev/null +++ b/licenses/DATATABLES_LICENSE.rst @@ -0,0 +1,29 @@ +Copyright (c) 2008-2013, Allan Jardine +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +Redistributions of source code must retain the above copyright notice, +this list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +Neither the name of Allan Jardine nor SpryMedia may be used to endorse +or promote products derived from this software without specific prior +written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS "AS IS" AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/licenses/ERFA.rst b/licenses/ERFA.rst new file mode 100644 index 0000000..ea44c48 --- /dev/null +++ b/licenses/ERFA.rst @@ -0,0 +1,53 @@ +Copyright (C) 2013, NumFOCUS Foundation. +All rights reserved. + +This library is derived, with permission, from the International +Astronomical Union's "Standards of Fundamental Astronomy" library, +available from http://www.iausofa.org. + +The ERFA version is intended to retain identical +functionality to the SOFA library, but made distinct through +different function and file names, as set out in the SOFA license +conditions. The SOFA original has a role as a reference standard +for the IAU and IERS, and consequently redistribution is permitted only +in its unaltered state. The ERFA version is not subject to this +restriction and therefore can be included in distributions which do not +support the concept of "read only" software. + +Although the intent is to replicate the SOFA API (other than replacement of +prefix names) and results (with the exception of bugs; any that are +discovered will be fixed), SOFA is not responsible for any errors found +in this version of the library. + +If you wish to acknowledge the SOFA heritage, please acknowledge that +you are using a library derived from SOFA, rather than SOFA itself. + + +TERMS AND CONDITIONS + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1 Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2 Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3 Neither the name of the Standards Of Fundamental Astronomy Board, the + International Astronomical Union nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/licenses/EXPAT_LICENSE.rst b/licenses/EXPAT_LICENSE.rst new file mode 100644 index 0000000..dcb4506 --- /dev/null +++ b/licenses/EXPAT_LICENSE.rst @@ -0,0 +1,22 @@ +Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd + and Clark Cooper +Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Expat maintainers. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/licenses/FUTURES_LICENSE.rst b/licenses/FUTURES_LICENSE.rst new file mode 100644 index 0000000..c430db0 --- /dev/null +++ b/licenses/FUTURES_LICENSE.rst @@ -0,0 +1,21 @@ +Copyright 2009 Brian Quinlan. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY BRIAN QUINLAN "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT +HALL THE FREEBSD PROJECT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/licenses/JQUERY_LICENSE.rst b/licenses/JQUERY_LICENSE.rst new file mode 100644 index 0000000..cdd31b5 --- /dev/null +++ b/licenses/JQUERY_LICENSE.rst @@ -0,0 +1,21 @@ +Copyright 2014 jQuery Foundation and other contributors +http://jquery.com/ + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/licenses/LICENSE.rst b/licenses/LICENSE.rst new file mode 100644 index 0000000..da9a136 --- /dev/null +++ b/licenses/LICENSE.rst @@ -0,0 +1,26 @@ +Copyright (c) 2011-2014, Astropy Developers + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. +* Neither the name of the Astropy Team nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/licenses/PLY_LICENSE.rst b/licenses/PLY_LICENSE.rst new file mode 100644 index 0000000..0902238 --- /dev/null +++ b/licenses/PLY_LICENSE.rst @@ -0,0 +1,30 @@ +PLY (Python Lex-Yacc) Version 3.4 + +Copyright (C) 2001-2011, +David M. Beazley (Dabeaz LLC) +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of the David Beazley or Dabeaz LLC may be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/licenses/PYFITS.rst b/licenses/PYFITS.rst new file mode 100644 index 0000000..4161e9f --- /dev/null +++ b/licenses/PYFITS.rst @@ -0,0 +1,29 @@ +Copyright (C) 2014 Association of Universities for Research in Astronomy (AURA) + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + 3. The name of AURA and its representatives may not be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. + diff --git a/licenses/PYTEST_LICENSE.rst b/licenses/PYTEST_LICENSE.rst new file mode 100644 index 0000000..ff33b8f --- /dev/null +++ b/licenses/PYTEST_LICENSE.rst @@ -0,0 +1,18 @@ + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. diff --git a/licenses/README.rst b/licenses/README.rst new file mode 100644 index 0000000..e8c3e95 --- /dev/null +++ b/licenses/README.rst @@ -0,0 +1,5 @@ +Licenses +======== + +This directory holds license and credit information for astropy, works astropy is derived from, and/or datasets. + diff --git a/licenses/SIX_LICENSE.rst b/licenses/SIX_LICENSE.rst new file mode 100644 index 0000000..b673027 --- /dev/null +++ b/licenses/SIX_LICENSE.rst @@ -0,0 +1,18 @@ +Copyright (c) 2010-2013 Benjamin Peterson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/licenses/SPHINXEXT_LICENSES.rst b/licenses/SPHINXEXT_LICENSES.rst new file mode 100644 index 0000000..248b4fb --- /dev/null +++ b/licenses/SPHINXEXT_LICENSES.rst @@ -0,0 +1,80 @@ +This file details liceses for some of the files in astropy/sphinx/ext +that are adapted from other projects: + + +========= +License 1 +========= + + The files + - numpydoc.py + - docscrape.py + - docscrape_sphinx.py + - phantom_import.py + have the following license: + +Copyright (C) 2008 Stefan van der Walt , Pauli Virtanen + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +========= +License 2 +========= + + The files + - compiler_unparse.py + - comment_eater.py + - traitsdoc.py + have the following license: + +This software is OSI Certified Open Source Software. +OSI Certified is a certification mark of the Open Source Initiative. + +Copyright (c) 2006, Enthought, Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Enthought, Inc. nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + diff --git a/licenses/SYMPY.rst b/licenses/SYMPY.rst new file mode 100644 index 0000000..8cd32b8 --- /dev/null +++ b/licenses/SYMPY.rst @@ -0,0 +1,28 @@ +Copyright (c) 2006-2014 SymPy Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + a. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + b. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + c. Neither the name of SymPy nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. diff --git a/licenses/WCSLIB_LICENSE.rst b/licenses/WCSLIB_LICENSE.rst new file mode 100644 index 0000000..cca7fc2 --- /dev/null +++ b/licenses/WCSLIB_LICENSE.rst @@ -0,0 +1,165 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/scripts/README.rst b/scripts/README.rst new file mode 100644 index 0000000..dc9cf3b --- /dev/null +++ b/scripts/README.rst @@ -0,0 +1,5 @@ +Scripts +======= + +This directory contains command-line scripts used by the astropy package. + diff --git a/scripts/fitscheck b/scripts/fitscheck new file mode 100755 index 0000000..514b2e8 --- /dev/null +++ b/scripts/fitscheck @@ -0,0 +1,8 @@ +#! /usr/bin/env python + +import astropy.io.fits.scripts.fitscheck +import sys + + +if __name__ == '__main__': + sys.exit(astropy.io.fits.scripts.fitscheck.main()) diff --git a/scripts/fitsdiff b/scripts/fitsdiff new file mode 100755 index 0000000..b848131 --- /dev/null +++ b/scripts/fitsdiff @@ -0,0 +1,8 @@ +#! /usr/bin/env python + +import astropy.io.fits.scripts.fitsdiff +import sys + + +if __name__ == '__main__': + sys.exit(astropy.io.fits.scripts.fitsdiff.main()) diff --git a/scripts/fitsheader b/scripts/fitsheader new file mode 100755 index 0000000..0da3300 --- /dev/null +++ b/scripts/fitsheader @@ -0,0 +1,5 @@ +#!/usr/bin/env python + +import astropy.io.fits.scripts.fitsheader + +astropy.io.fits.scripts.fitsheader.main() diff --git a/scripts/samp_hub b/scripts/samp_hub new file mode 100755 index 0000000..5fde941 --- /dev/null +++ b/scripts/samp_hub @@ -0,0 +1,4 @@ +#!/usr/bin/env python +from astropy.vo.samp.hub_script import hub_script +hub_script() + diff --git a/scripts/volint b/scripts/volint new file mode 100755 index 0000000..899600e --- /dev/null +++ b/scripts/volint @@ -0,0 +1,5 @@ +#!/usr/bin/env python + +import astropy.io.votable.volint + +astropy.io.votable.volint.main() diff --git a/scripts/wcslint b/scripts/wcslint new file mode 100755 index 0000000..f1fa44e --- /dev/null +++ b/scripts/wcslint @@ -0,0 +1,5 @@ +#!/usr/bin/env python + +import astropy.wcs.wcslint + +astropy.wcs.wcslint.main() diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..ec3c62d --- /dev/null +++ b/setup.cfg @@ -0,0 +1,25 @@ +[build_sphinx] +source-dir = docs +build-dir = docs/_build +all_files = 1 + +[upload_docs] +upload-dir = docs/_build/html +show-response = 1 + +[pytest] +minversion = 2.3.3 +norecursedirs = ".tox" "build" "docs[\/]_build" "astropy[\/]extern" "astropy[\/]utils[\/]compat" +doctest_plus = enabled +doctest_norecursedirs = "astropy[\/]sphinx" + +[bdist_dmg] +background = static/dmg_background.png +# Note: The SVG source file for the DMG background image is located in the +# repository at https://github.com/astropy/astropy-logo + +[bdist_wininst] +bitmap = static/wininst_background.bmp + +[ah_bootstrap] +auto_use = True diff --git a/setup.py b/setup.py new file mode 100755 index 0000000..c73d7ff --- /dev/null +++ b/setup.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python +# Licensed under a 3-clause BSD style license - see LICENSE.rst + +import glob +import os +import sys + +import ah_bootstrap +from setuptools import setup + +#A dirty hack to get around some early import/configurations ambiguities +if sys.version_info[0] >= 3: + import builtins +else: + import __builtin__ as builtins +builtins._ASTROPY_SETUP_ = True + +import astropy +from astropy_helpers.setup_helpers import ( + register_commands, adjust_compiler, get_package_info, get_debug_option, + is_distutils_display_option) +from astropy_helpers.git_helpers import get_git_devstr +from astropy_helpers.version_helpers import generate_version_py + +NAME = 'astropy' + +# VERSION should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386) +VERSION = '0.4.2' + +# Indicates if this version is a release version +RELEASE = 'dev' not in VERSION + +if not RELEASE: + VERSION += get_git_devstr(False) + +DOWNLOAD_BASE_URL = 'http://pypi.python.org/packages/source/a/astropy' + +# Populate the dict of setup command overrides; this should be done before +# invoking any other functionality from distutils since it can potentially +# modify distutils' behavior. +cmdclassd = register_commands(NAME, VERSION, RELEASE) + +# Adjust the compiler in case the default on this platform is to use a +# broken one. +adjust_compiler(NAME) + +# Freeze build information in version.py +generate_version_py(NAME, VERSION, RELEASE, get_debug_option(NAME)) + +# Treat everything in scripts except README.rst as a script to be installed +scripts = [fname for fname in glob.glob(os.path.join('scripts', '*')) + if os.path.basename(fname) != 'README.rst'] + +# Get configuration information from all of the various subpackages. +# See the docstring for setup_helpers.update_package_files for more +# details. +package_info = get_package_info() + +# Add the project-global data +package_info['package_data'].setdefault('astropy', []).append('data/*') + +# Currently the only entry points installed by Astropy are hooks to +# zest.releaser for doing Astropy's releases +entry_points = {} +for hook in [('prereleaser', 'middle'), ('releaser', 'middle'), + ('postreleaser', 'before'), ('postreleaser', 'middle')]: + hook_ep = 'zest.releaser.' + '.'.join(hook) + hook_name = 'astropy.release.' + '.'.join(hook) + hook_func = 'astropy.utils.release:' + '_'.join(hook) + entry_points[hook_ep] = ['%s = %s' % (hook_name, hook_func)] + + +setup_requires = ['numpy>=' + astropy.__minimum_numpy_version__] +install_requires = ['numpy>=' + astropy.__minimum_numpy_version__] +# Avoid installing setup_requires dependencies if the user just +# queries for information +if is_distutils_display_option(): + setup_requires = [] + + +setup(name=NAME, + version=VERSION, + description='Community-developed python astronomy tools', + scripts=scripts, + requires=['numpy'], # scipy not required, but strongly recommended + setup_requires=setup_requires, + install_requires=install_requires, + provides=[NAME], + author='The Astropy Developers', + author_email='astropy.team@gmail.com', + license='BSD', + url='http://astropy.org', + long_description=astropy.__doc__, + download_url='%s/astropy-%s.tar.gz' % (DOWNLOAD_BASE_URL, VERSION), + classifiers=[ + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: BSD License', + 'Operating System :: OS Independent', + 'Programming Language :: C', + 'Programming Language :: Cython', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: Implementation :: CPython', + 'Topic :: Scientific/Engineering :: Astronomy', + 'Topic :: Scientific/Engineering :: Physics' + ], + cmdclass=cmdclassd, + zip_safe=False, + use_2to3=True, + entry_points=entry_points, + **package_info +) diff --git a/static/wininst_background.bmp b/static/wininst_background.bmp new file mode 100644 index 0000000000000000000000000000000000000000..0e4d253098640a09324b69f7056aee1ef8eb8bd2 GIT binary patch literal 158742 zcmeI5349gR^~WCws732WZEdZ#+Pc)G)mCd=s`l^F)}^i};sPq7h=_nJ3dp{SY=Q^^ z0sO zb{cWKt%dizgRK@U2j1swE3<5PZLuOFE=#ag@IGu?VGg!ZuuOO# zwk!Ir@LB^%2u4PNuMxWI@LZ4vw} zZ22$;TWDGdye^4LMcYtj3*fbA^VgTe@iq_Mo0eA+H&?JZ@IGwoU=B9h#%96aX00u= z6nM>Ba&Ac+Z%Odpv^6Dhi4ogSOu~HGF~+mSUh{n6Yo4#Q8hjydUW_^Ng`5d&G1ff! z0^S_?Le>J{3t8j&N^|22IWyTp&Pw@`VQfAy=g1ebChc`+vPg{<*>r3ic>YbIa# z+QOH>n45;d|9HkMd?9ZdjEQ`qo=Iz%AI2)0hB+`6)-X=TME%0|JYV?Q)GtlGP}4-l zEPMfPynfm7g^ZQzm%tbLN(Uc_I>z&*qhUIJ(o4habWHSk%LNTXJu`izriP)< zLoH+R7n`eU7{>j6V||e? ze6Jeoiy9WzF-6AAI3MGE(5tFg-|8#*I1Kr6WXwgwg7A~J8s;&c6d5!5@)L&%e$wJ2 zHT|SGmW=X~8nM2pVK}Z7V=>n_3}P{HEU6Rgi*cA+te?Nzo-cDe>B5++hC!?^<|=~F zux#MSkuexc%EV!qvk-NRA4|%_VVJ)#b?i)Q*jW_oGrk-(jPsMsH}Z2=#x%x~LF6lB z7>nXd&@dUsy!Z+t)-TJ4mE|l{V|~k5Qgb{h#$i#hzC*qu%vVIi92s-dF!&$O*cr)J zRH9g4982PRGv`{4`HG-oeUmQ-KPl$*dF^7adA{(qg)s>Y3nE`p?y-KvM+On=i+r8U z`HBMMikW7tzfRCF9mXn5tnW6Kg#X2&ZWKhmLOIsA$?3-;*0=B_sbR|bimF&YP8?5K z>P9v>SBsxKi))j}mxVDm4Lb{CeLmO9UVG+Tg;>9$<}1#I+GG@8Q5vS7*SExBRkg|3 zX;=l$S1`UT8WzQveGdD*}u4t7?;GpIBK=jO!7L zm4^AOP4ax{Xqa-W@0PE?8YR?+Dn-7csx}$!7ZdriFyMu{?iuTQ zT^gA9>Eqe3(NVR#lti`owJdT&rr6WxcK&_mt)*c)p}H>@3VzRHj(J zEDo!$#k-h~mpj!^$z%uc}R2@0~mwVtwRGhOt=W zE2?UfYWa#FVtw&`BVYG^OVlQv_$rrJ|6Hj}dh}Ay%r=sk&C4?q_?(4nzrIT!Vq#9o z%NgWU+0s`ETB$2ftlt%<5)T&rr6 zrO!&SsehX}F`JyL9bZxRe%ml6#`%JR`-U4-qv>G&?`3!k2EW&z>*Pd5W=w@pWNcdTJ(&F?BS|?Y>D_ zzA8$-qB1=r#pH{tO-AuGl;ogxF|&*%ZQ?MCpLEGruxGkn*?=BqjJ-X~$U?q!7;}pC z8DBaYhH?Mogw!SdhmKy;zH`q_AGU0>qiy>xTYB~XD(}aXC5zYXI2j+Ve@AMQOIB}7 znUc6Djak;FLo=Q76*)yG;>S!$%IG&_^qLM``)>TOWxE|M+H}~~vD@bbL%*G}DlNA- z*{N4wd`3!kQAymS#3hUS4IR6>Yu_Px{fB**GkWr@Wy=bV%w*5vnGm-qjoF+PMYH%G zYWCWzmwM5bL0uDZUh*k>bWz zRIJa9C6gCrFL?L;W_zA`>M81E^^j016g^9QteP%=riWz0>(AlAQyi@|WagOZH}QjAIFD|o(SHEizEb@QHm z_PG-A{5idU>z()ZFUTyI^Xlsjj<97u`K<3-g7XK_3O$?96Q`4c~ruKfk`$>vo;{ZcN*7 zJb`g$`bmiOMZW&tmK53L6hy`__t1Jg$zY6$8fF*kf4{UOrTVj!@Y{L>^wH!?$XDpZ z`XXOp#v(OIyPVjDou}d(yxHg=e-7Vyuj$^&@e9(It=*BbX3No;3zp~49XWnR=DSVG z^zFIlpSN7wqrZ<|&e&oa*0uM*4g7YWd+zzuUEpuyrp;QGv?zD}j5#Y7j`}`f`G+mr z?%emJvKf4zyA?_xsTd*+RFICIb%%Z zi~sKL0oIz0CYfK&HWoA=Lo(4vR^#}<*X<{frA~~M<-|(wo2>f<$>q0?uWP?dPtIOj z5^pl**2CkbVUgP8=E(Sw=gckVn!IpTDu13D16%8NoQfC6lkB-OC@^V@IqUb8)@XLiqYosh6}5o#vC{FmR($YhMU@m1q_Y5-R&?fUgQ{y>)E z*pG2?*{k$e${J&46k|4i@|Rp}U+YDBnr)-?cq4=Hg>yf(N#sj~v0>L5pIx-TJS-aa82%r}dA%g#8<48Uq_UZRz8Dj(I&-EpFQG5yagY=0r7O{1Z z-@5(CTK4`gEyxaOzC3D^vV5^vzu({yYx(bDYU08rQGD6tTwibS=3#!ljL9Yex0FXW@Iw9*xZh>j|I&e54y%Q4Cf+W7<nHo25Wh&z5 z7SGq@r1Zt?|2UsMJ2MsevSW;2&cc@iW9SRnI`!(messm=lIUj!3%%6HTjJ8qv)Ot$ z|D;R50yn;%?L*cxw(K;F=@`akj1Qa-pKHu#jG25zG%OkX=LMWU#Jm^gE3lt}ZR54U zWHn>~v~DO;O#c#-be=){Pz61>$^z}H4|C^VwVJ`9| zGR9(kC%)z`FPQWB;E}nYhnBed-KNcVi@&4g#+S$#`#d;DzA!E`H7vpy%Y)(knP;Ch z$ItIKZ@rU^Cz*D!xt)eR54jn(za}4kyp!h(c(M4$W~0bTo-aGbP`@w_#@N95J71G5 z#+OCIfUyrom9dDmN32I8Ur+V2&xJA0bTll?m!gJENm!8nKVdvnx9&?Pn?ru}iyO#fCk}Vb&lP!&o23lkA-09P<@Ue1#c{_{ZGsr{ibL&RE!^-_ShtoBTHT*Asr4 zpU(4zIu^y(oHG5A;mg7p_w4gD$eZ!|Zr8a_p%Y)onaJ21Z@y!GjyD@M+2_Vr&mT#e z$d?^sajT3Zo-aNI6ZyKaDc#MMz3yGQm*rdsCL5{j?>HZvQkDy|(=eHQg~*qzpBy=U zdZx%2f6nps%k^G4jye5dqkqU6G~B9RJYS-Qt=(}t{`uNpZXUdpw5z z!^Y;=)r~~Henh?;YLomnnSNRL;xud)#Pg716Mz`9~A@<=b}b zxyh3+&QCUYtIW^8_QqR>!q0bwu_Nl3$QR@uGGzFgoomcwT;Tj^h^55-3e4eO@ftnK zmiu!Hx0#_#Xrzc(B^U~QoHfUnmkq-HIM8cRZq=Ej&TU&t80-`4HBZRYv1Xjp_X zzSj8O2Q7=mx{;foo%RF z@yoOtN0!^wjsDb(6tm^TT$mJNTwb5?rK4dumdq|ZkjNNgT=8=@o*Xs78s9Gg-)iy& zelm4s;T--ue(jBhhZ7<>Sbi)S#h8%S=i@LZzU(xN-#3mUy!dL;tko{IPTiO4omf_I zEJ0+9_m5#82^to+XoIDu@L8|H>&@p=+vpe8gxvTF$NDgSY&Vf)FeY%`KaniNaU~jK z>>q8(R<;c0{AaH-5|A&Eu@6TZX08KsVN)ah`HU@`%l_V^3STT=f&P#6H#BR}dMD!x z=R8~Zn(@`|%Wrbo_kr`Q!h?y(7h`M+)QkB4Pf1*C){pF-k+NXr=Goo)4qiWW^bf0k zNLZMbS$KG+7>7+sw&uSu{^Dzts9VhVedCy6-pZm`Zj51!#;?0I8Cy_(84$kv-mc;XXEm^ZIh5!GY zW$Wki^&?Tk`V6$z5f19rO6k(P8afD$iv-j$TprYaXi_5Ix+iDars9&kwV56n>U7h7-I|PJ!g=` zJYS-Q@%eqM4cPrYNLa@_#*>bGx#Yy=En7RE&rLLZtI@%%f_+JhF?(;LGGe?X zx7z64_xG{-H_sT~zta+W2{BJ$#~9|r`2L^rU3!1Ho?jkw4O3^YO0|pi2ale%k{`R^bvb6*f+bOW z69@%5O=tGf3cwBe10 zZ%5@C#zSGyjxX#*( ze!I_lY--k`jhSb{<*^60UB@1q&<2f|nB~M*Q^?2j+vi`8{vl-<<7>sHlkuo?{Qt36 zz@2;6VGdPoXM zZ8w2rK(AII#z*Y`IB)U|S%oztG{)8_A@+Z=C)vPaGM+D9!;qybUN0MKVlK>%FE0(V z<4eX*78IY3|D$Tp8(8tJPG0eI0%O^r#&&xtD4f{W?$N0kelDvb7AGR#ZXR+m+=6G%NgnntK-5ZFwuvgR+@%+B@a=jDSr=Q1q#UgbhQODNpJRSe_m?^8? z@W`>^7!NhgWD9c@k$i<;zEZPyq{J`GonIG@S^ON$TD0B4$6;aCAn!V2VqylL*Amym z_o#J~Q!-OoywB?w^OGW9Q=m5*V)LZCTa#^kFCoth)O+@zpA3&Bg&rP=+vD*bZMOVx z8_@%dEjBkbOkfP39l+MZdD{ubGCRK9G|Y)F84ct65VJS#Pn!7StYw3t4%F+5A$eVU z_0R7+@auI$M^4PfUVV(i`0*rPn=EClY%GcQgh2csmk2qp(UTIE_Z>7Mw+n7>(1^8P zkDanI6~>Nyyl-I)@)b+h?McFTf6SERW&HO%{>ORg3$wSUprz;SOJe$k^Hlg-YF4Rb zZ#^B4J%}r}oQ$6szbFmw2^lziVphVUwe$Fx-_$X#HW|MtZ{7e{7W;QPbnl#j$+%*3kuP4yreu_)VEua%SVDG5BJyR&n5bbGm!Xb@`I6Ky z+1ezF^WE|lj4vlY$!g!MZe)q`!N2l+IpxHr3i*i{b2C%L9v+@CmaAZMzRwk3^K98^ z7~fyPUfad`7RCa)Z_@0>=0lpS_le z^;w^OR8AlAVdZ(>q%B{dZ%&Lk)xJ&6Al`ChjMFg2mYENO|FQZv&lY=ar(rDK=WCNp z$HKV^%!hg8#8?bw#~9Nter_^mlXEp0i|CgfUv_?y=gThEH~DhOSMYw)jxj!8!Ctex zE9#eyhVeZ-Ov^++smd2>nHOV@d`0R;JX`Fwr-pI0Nfw8hd|`eJIAUzExyhJE-N=zK z&@7%U84Z(ROw3nU_>%OK{JkLII3M~hcnynU%pvE>GbYCQEU)kCCn26^Y(@D<_}?@; z|A_jfpke4MP5r{xZW`vmn3!{Qt4%U3^WqCRQ)SHbkCEJk$d`pNHx1+bBx;suJYz-3 z6mK^9Fx6N;iZ7eGktNo5(=e`whmXP7Yo0HWu@%rmjd?IOwhhN$9()C%VRrrcB4f-q zvbj^f!sJXt!@}c9V98O#F#Z>G%=D3wu_Pbs^L=8jaTqYi7(>2f7&HB(Lys8CSMWN< z$NA+M>myfhHF|G9&iVScQydm%477^H`B55H9Rl ztj~0dpSx)os~fG^UJ|!z)6oPAUkZNG;v;SNvWzFoBi4_^VPb7EtYJbS@ z@#Ray&cfQH$QXZLuAPqzYZ%rixxA~$SXFH@y1&B0mk^6t;`|DfuLze1%I7Pt0QdGf+W>Y&d`&=FRD=KroVtZu#$aBW$!esc0xrUWTzM`r&X}R~CkNM44Xdh6T63xDO7-j=XvH>z_rn$?XSA{UhUykMninu$XID zdBpmW9x=PTtC$n>%DD#7hZwVbg<>C~8Rxt7Qiu8C7!&!@VN8tmF+U-isAFO*#`o@6 zdJrRgS$izF`nO9TqS#*{_Umis#41|8q5{SGJCH4oFXcF_sy69)Pusa%n>6|I!@)c)GZE|O1{K#`A=3H&ZlNHTRo-46_ zMXOCh?w;@6vGnT0|Bx?7#@sYa$XCepiJ3Vuhi9aSIsK~Iq_r-2HufRzJY>(8Z$DWc z`HJ$0^#iO;ialZ#s+U@`Hp%#M)UX)G`eI!&oUag`S65Y=wCY$z?5}XTZ!*kRtTYT_ zG|aJzrs^j}zADpwlVaT{i2e$EW*vJDBAZ*DVc~Y~cfj{e@_e0f4Xdh6R{Gkc$(J7u z6JmW!-6)`ZMO3U0v6fd4Pb{A4s?)G;X2+N96tQdiRZCXp{Y#`t_Id+n7`j;(MMmuYASXT$_aY)2xHXY5wkmq+shV zQm}qADagtr8#A)W#>_QjL(T@WcHL%@vvCJmSx{_*e`CHPJk#}TtxXmmE*(GeeChbf zGuBHjS)0t>L}pG*APYY0M^@BtV&vWbU!&;S+l&Jb{?{m>lGFPw2|fQ8M#*n(HV)nS zkg@ZLSB#B~TN|r;3^A5XN;c-f{kHM!H*>{jSV;NFigw?mP2I?^h9O^ejKy5T0XF z=ZQ35ufGd?d2EmWfAz}^a_ygu11~i;^1e$j=6mrK#PbmO=T@-Sp8XX<{o7LehI_w* z{Jlw&FB?A@h=v7Ro6O%u;y)ijmfrJkV|$HX7)IzaU`oMSb&%()-rK;J!nVa_e|eK} zF3fiE^P^3VS@_`fTFD^0uP_XeGlDz9B9Pg44`tM&KibeuJQX&4VYV{9Eh z)mX?F6Zxu4_f77O#P>XBj$=s&4U0v-VtF1(dbx?Q;exA;)1hC$_gqKc{`QDLSFe@V zEnITZQtE4;GRJ$}-Nydby^PG2n~ivpF|mh7)G&N+>X@XTHVUVYjV z^$9K9QpStQt7FG0z2**oZ0lSXzrE8q*ln1x)QPXNG1hnD%Qe=Q%vZ?PCb3VaStlds z{A(e``W1YyKJ0gQ8T7|Q`8dmtyH1~jcIh62&+7uOt3Tdn>>c=>vG|O|VKK~CFurs& z%rVwis7)?iOOkK-m$4@VV>>_g%i9dPY$c^9PS_D6|Fogb3bVlhsoykdA_0;i*apo#ym3r%G-^jpBC-!Jo{;l7)zv% zfb7+64BF-6Sb|>n;v0<2_F3NlzwnR7sTRGBmB^UL7rrYg2BLu* zpVWx;`PyXr0c2G*9G5}PQ^#=Kj$e>47k=qlok!_?FX$bI<=A~*D}JssT(|bk#;G17 zjdY$dQNzkR)@OVLpirynhM)m6<-a@`Ktkw+%ojlWjtaTi67af4nnt#+g4xMAS zT}Sg8v#=f623&XKr4~jWa>O_j8RKJo_WDf4`fhwhX_!W=KQ@jmuJK3cqqz>gQ%xH+ z**{we^9a4}LH%RUJ@!D?bGbJEQP(KSC^X`u7?bpqtRBhF`LU$Tc+##m$@8V7VQ%?~ z?2RPpioY4hLf6Chs%foy`eW~T=!@L!5Ac5ndjk6V4Ec6uOIU{S`_yDgFZqk}_MP^7Hvj$lM~r=QvyCJR zW0D#ci&%eeWc~( zsWB@M4fC0=u;a_GhQ%);^K0D-wdxyzFAe*}KcLT4GryI;nbKQ;O84??enQrVZh`Mw!)pD7&;pH^>&pqyC$F(Tw{@!DM(E>Ehx#Fj(vy(ih}DE+ z;}vTtZ38*qOa6}6NbKycSIBl4AI3ArcTiEo8dGRG1>o3YCvupl^7@=F? z9BEj~zUtaFJDt)Rf7aWd-7)_DaYDNeqjbxTSP|vCT^#d%{w<|f|DVR^k-WYheUB6} zzGUh~LF6m;p>{EojP*qg!+gbK?~yH`+u_`Z23ipEuqz%Q%D!g}2l574Ivh z@^fE)Qt*+;mkeWK9LC4`jk=SyA&8%J>_)gpTBWzS1Y(wdH*wjQB;VEVAhgW@64Ur9 z2o6uceZC*|fzK=Xxn!2B^DsF&ceRm(K2pX{iW;U8>qqgW80*75Lh)BTOpb-%`_i$W z7N~s-{l-B&uemR_-gQqBx)8>LXVVl9TK0>FU9Ky|>-*07WJk<2jH^vDzGO7a&QJFJ zjw}z|37=cT{ss3^s7!kfP_x#7up9uZXXXCUTdTk%UXZrZOzS#P&cOkj5 z8m1fT^L)uLCi=YO&`ril{kqEIE_(?wgVy`{pFp1ivDqp;Yt%A6|+FgW%{sx~_(XM?;O}o~?b8l*3 zj!1DoZ24R25{l!3SWJgN57%s@v;cDW#rr5d4lz@#r@0xFUhofp*Bf4U@I4$@0nfkC zX3Qh!x<3-%^PI`%D?a;%tPJ6G#OyA3EomPQQse8%kL0&49>bN75n8xCm~VV_0j2%M zQ`#7E`VWDxy6Qjn$5rDV=}p5oCm(p1Y!Bh*)3D_V&$F4m3}Sl5u;ToJ`w2~7?T;Pt=4^{WZDvAb}Te(|1lCL!x;KS-nQ*0 z@fSWwPKV%}soM1l(B<956^(QZE;Tvvm2Ce3Ewg!&vAXwZewpW&FoV_&NOuW>J- z^*a)}5^Capr;fw@AK&e-kzKkP=9aH;;>#=6A2pLK38DYjuuq1``^F`pJ(hjgapv&1 z-jK`CoxX$G<0X$caP6=^+iO5#*+)JgGzn^5ex^p?uXXp&)@w7|SP`^_h56FZu=<_J zh7eqXI+nFT{!D%}P;Vc1LvOXtv=4eDUg`k*)VOZ^u2a7gq3DZsCd@lO-P+%A*ZB`Q zx@@C;tREgr!Z;K26wIPxF`HQ5oKXzQ>*a=GSg`A@9zHz|;3eAE-?D zo!nSoZ(lmc=eiDeJ-rDrmCp1PjCCEk=eF;(Ov66<`3f=CSK=#Dn;e@$(n9})??c0i z_sg9z^o>`;I8${WSAjo0q0keE{eurRQ{Rv3ap^AKW|%r-7>nKVy8iWY(c|P~D*P5n z5E_PjDKQ3lidMr&P6#wy$zY8C^Z&}9E7Z9V<^>e37vw%wrUp3fQryRxUcUcil}}W< zY!c=SWj>dKM!edStd)(!+-j4yd;xQOKA*j2dHokUlZ_#)c$nYf) zKOIyUf2HTD??d&Nf>^#9oagZxdA$F$QPcqLd|&o7Ip(Ww6sb)zzEm{qk&nsN5Y#?Y ztuEv$WvAwl<5u1NYl`v8)1RteBdXU_P|MeR6mn#sNh(t=)@(6{r>FyL`Om&HGW_=N zIP!%tzeB8l*ZYv0z&>$BYxj*jUr*w9uekkQ3VpCSg_KsqbIT@^K;$$HYsL&*~MaFPAn=P1{%imC97dKG$i{&_}wVl$VB-wb5le0?f=<@@mzkf~G zr{eX=_@2H=QM)H1RRI=RmuXvHr#Ef8` z$;orM7Jj$prnjTdk=JMB|3C0Yev96(Z-(ydEm4%=aWzqbXU>miPc)pY0>rT^{VeH`p~ z#QEjG{(kWDvQxX!`ulqH)5v|fk>^r7`bnGGH{(l1!+!n(IT^zENYU_|%1m=1URK&R zaQ?}+@_aqj369<2+PEC*%OjvSNA_Bf`8-a`;Q6=_b;aRWy#D?%Tqm+qI@HX)w&(Q! zOJ5|%F{Te|nBud~Eqn>Fm?h5V@)eiA47F{n1uNPXxc^0FS^>|aQQh{(qvX%^qwOQd zE?ggfhp|~PjJM9@v>I|$wV#9I5spRX{jZ>AiZPwcgxAYeuj=gwkL9amVJ@2wcNkV=a5}y zIOwJ7^~9E63}Y49sqvT5pPOyRYyNBxvccp_LBpc>5;V*%CwA2vG-DU8*dH?@-Y0K zXn9XVXR2RQcnw~!Fm65xIlGG?zEr(-artUb6S8nU_nbOO4YT9R%TGSso@@(Y4oA_( zD?AI}qL)V*^+Sw?&tB(U>}Ox zXUq4CGtw*?=D-+VH$uMb7!&jQe60WK0O-BPK4L{10@tYQbW1b!?e77Yo|EgU$-Qu8pv7Kv5Ui^7RLhwRU{D#rjTsxoOxZRIw5dCsAC=F`miP%c8u{gN%k6b zjPWI-VJsFKKAWV6u(wpvUKt>N9una>x#spIIqvsUMI%P-c&0Cf&n)>_`Q@&Hv4QL~ za*p0UaDP9qPfjqtER11p42*Hb=3%~0l=?@;86W36Y1rH~WM1e6;7if!Ku?5h-trj4 zBNxDZ8Y=s^{(bp;knH#2cezv;Gao!Ax0O7o7ns+S!nIo+z6aUbYw{|+eX!$-TWQQ< zjLGCH4telp#~Az`Onj|+P=kb+TGcWjcPcwQ3N;_q?Z>Z>-Fobo!*TE(9}~I^dUs?d zT)x)JYS$FI{xVvHOnvG>N_LT zmaoWIQuLE1UmRnikF?XUmj;r85atjR?Ex4c%TBAIpG$T7)!_PH7sI-=%&{X|^#+xl zn@=Xnf94l)45@fc;qsH0$Q)VeG8etBw-4M8e6DpoUlt9s$yXp>CS#FYg-opfc|1I~ z5Vc&pdIidBLbl-AvmCR^@rdiXes=EK?w=t4iEqMl%B`#N z`r)o*v&ff)F(KBs@TH((85_y$5ZvqUXc(J2*mTJ}KAy1=b3wx#w&V2oi(wp5SS)|O zgIW?}?7GO7aJ*kAeD`-@?Oj-3=6#1B`4_w6oS`u0 zfOBfQ4ZH2~f9tPsZ^O+n=HSzl<+|F^O1z%U&|GW<&Vh4jhjt+LJI&Qkhb*|59-uZ271AL;p>Hb!_~O1|&l- z0`{JITw4u>%YV*E@QgXH?K%BFW24;JEXLj^k7IWG|IgncCrrL1H0($?{zA3{z9cp5 zo-Sl(=nXi}idGHofyt5d8|=)zHBx;)?6_|E1oE7c-vQg7I6nnCim}uk9pta4<%Qx_ zoZt7re4pkuR4CaSoQpLA&3)i{wHuLBJYV>lkHIW)7-$&s<;a+XhJ7|2eoquw(XrPQ zu5J8QFowPt@~1lcV(UEI8}74*=ZeWrm~TKv8hiuKVa3>6;j{j|i@bK>@w^1_BD)?` z=dwL)3(yZJY5~9e`EQZaUVQP49XaX37pGxPvHqHE@C^Jn;TiZ4XQ)~YsLRS;cZ`SK zhar}3qrNZk*!~FP%eNs<_7mI-w+U)1d*EK2B6xQFI_RrFo)WVueHHkxg+5T@v*9}9 z;2v(-Y1cuh4T<}bWci}T@*Gcr+>pvOA})LW+vIc(jvobc(lExCj)t+k{#{++c@%ID zs*XJeJ>jZT9z56i@(=X(r*n)t>-@79)SN;4WT&p^8#T}2e;~e;oxYj}`_a5!aJ|c# zl4Ev!ImP;6z8o}6Dklc{ilOt!vJl=cDr4AZE?5F+h3b?F_voJw*R8m%itzsGR)p?? zI9hf>4u1pd=pK7N7;DH*pH6exj|AV>v?2$QFFVHUG>qrVQNuVtX_v2n-(QHovIX2z zg!-;wzlYqzF^H{HreW0s;_VuTZ zWH;k0iZPpGB%Se|rTmZn^{ec%1gD!Mn~i!qyqR zm_ItX*Z0GigRQSRhwECprw(&8HBj??oRfQcl5LKB*^DKRB3Ha=<|}e{z;D<#fm|3K zzlK$ZoN}CopTutv$CVAo=SeMUVn3F{Hf8?)@(B6sG7fUE zGTU@p=CJ`}6VF$ah6#+>@ujO_j|?D1A*?-UTI~-B&4lxE=)acf6h?HQ|+!WL?ho()f#UCXOZfSReVaW2`K%&-L$cahP4c zA|;p1uhs-|6%d2y+OHvRum$o2x>GUaFIr87TorQW<2Yp2h0jVqhIkok@H*3I=-D#& zqj6212m9ZuaIS8Je4Oqz#MbK+Z)B`SzC0LXu|DI=!kA{x6=VNL27!jb=k{Z_clTe< z*$Lyf4=2O31CcYI$78qt9X$!14Y^9)X&KzJcs`Czd>mVS$lu9MJ0QPivh3r!o3_fB zW5-vNpNz!%wtPim{W1*`YDX@y{^E^f&Uwv1!i9l zy{ixWUaQYzy0|5w?@WN-Z`tg$H@Uwf`&WOee{S%cR)f#7TH!Y>zk_E3TfPeouHkwJ z=T?y~JH|vmi5kZ9Wzn!G#_av1UB2R}ugRtm=09}}IRI-sQU3ar`DQUkKXEzKdd3m@ zoA#yq2ww-0{fVzHfIiQgyAk^2FhYmJxDc*4-xJm_@9*!uZ}C{f_s=TC^?137J^>YAnfNbuAC~_Ll-m`ez%7`80`}v{=;(- z!1b0|Ab&(B!5?4!ng9J(Yfi|VJR^BcB=%C_D^?mdbP?1g@m`ex?c<+_@8>wne>?t{ zEZhq{MR?B#e-3S7dD`DYe_d^QK{JAEaFf{;~yV*)GVZR7oBg5O4)c+(=#WIR zLfC&21FH@C&<0|gJ#g<$MVK<6m*EDiSNr&^&wN|v1HXmdit2Db*$Bnof5)-!t$qD5 z&h+5&4&+!aY8Nv%KN;!WVfhN4FBuKf$yeaB>Fyc?J&3L0^P<7|3feA)d{+A;cqY8$ zvuc855%a(^)=>H+)Do-v{CpQcuhJICjTmK!(S?QTNBblE-iX#Ywz)3qk0SE|UC%d@+sYY93%vo=w$nqt|`q0~A@sV~KX7Q7n`3j8vr{$1^H9mp7 zE5uG`!Y+n!`LmF>9u7IvEs#eIVnSxWhdjyuLH@iNeh%Gh#b&+VLHv$6U%wMFAJlO@ z@D(XyY_YkQpX4c1NrY2W0xYR2jl-oLj2PXzONS{FL1{oLN5Y+jMcR<9egZmkDn>;Q!S`##JJ=`PbN4s!cxNiMj7s55wbDpu(lP_e=VLXY9x$zZ5tZ$cdMGaeC zL}u0L0r5WeF`VPp3jD9%spITmuhrWIk>W60s9%~IhJNz+>C*U%aVCx@F}4@Y)G;5i zJ~B3I1DRjDE974Be*1IOt^$926z(6um``__194(?j0;uIp~H1uy#pZ=R~qvzd;w#; ze`K${{G`ZNl%I6tOEM0#>#tzXc=|q@EUwiBo)wHS4{vzB&H)>{%9`s^Wel-Qgnpu$ zor61Hk9|uvtplFWER1QyVK8ororZbTCK+Rc)5)^xohb=*f}EgV`wX^mme_qGRcW$c z9<}c$=+&;-#p$yP*Dik@3$HKeN>0zuHVKMcTf^9%#km5SMS3K(ZNIY_6(dd~MR?%TF8@ z#h83AHPbMh_gqLa&+7&{hS%ZQVw%^(yYV6&`tJ>36z!vAf7jI&zo65B}V-SHz`O!xLcy|w#7O0OB{ z@0z@#FFlsF$w)%J!i*{M<&+as;VX!IgJr(K(=d3arcfDNIAA0HS+@sxm7CbBH z3&rywaa}$%j%>{XR_yrlWK1Go0r$3BVz8KMm}I_!afXa-IY4GUGJzI_dcwJ_B-RAt zaMUfFkfC7^iy>nSF~-{rs8v5Y5$e@3`rLPqraN+X8u7>$GhV;KeEE#^?f42a<}+Ur zM6Azr3^{8%pJdnQON~%3;Os25pY@}39Q19lX*=|G-~V0d=Xo`*l&`(;45jXHAKNuU zWBWO8`-bj@`e4HPVqgm`!kEaHFAW2IL%!@76Z86F-6)7y->Ejq^o!>U8JoJ6EUYt_ z9t-t>bFXT>VVxLTbvUN+&|~u>=+3N7l%{Qi99J==`THr|al{&DrEaD4S6^C>>6h@n z*5}s0#p0O%@HO3+Swv=T0Itw@zJm6Xj4vI=#JXgxVtr(b*_vXK{I{`mSEw(14`;#@ zIX)rLGdme_?KO}i#cc<%{7ob1-X)t!(uQIZX3OM@(=a>6*cg({En`WD-&j48pJQzj z;s9w43nJDJ|JEcIhsoqC7+Y++j)-bnBWOBq2Y{7V_oHSWtd4%$EXVqMy{(Fpe=*Ke-?H zLdI6^Bq{ezq&q?o*Hm=-=cH1)_CS2)daNCJIr}_=EWcz3%HW|ej#}_iTb~j0SdIsHe-TqgT3li`_?$d(;nj($=>!!RDR=$Mcbli^DzCl-s^q!(kz*#gMP zK0JjMRT}``V+FEQsOR9+WNz$_tL7kTG)kel>vxfO#ul5KJ`#0ILc^3}eH*@H7&{aB z3guW|&@ki+Z79dy_wTl@WM~ZP6&lfTVzxxn3b`44U^8~s(G#I{D85;!4 z1#Gooyf^~JltqUrt%J40fVYp^t23M)`*;D#0*>MtTj6=cH`?;$pkZdt!i6!Gr(ko7 zkK}5Tj4umgk~!BP@)c6GNsg^3ABnlKh;A{yn1(UNkhA1MGVg!!wD5wj=;;vVHG?)8 zc4M&26~NU;3rKceF^Mk(y+X6_-Unx=Bae`Rio*V z5ayxE+cY`9Vgx<*;e46{TqSJUTgnyVONKGiM@BSE$4`oVh;|yL%2yEiilA$gGJX;@ zj4_7u-G{+<=90zF&7>Qz7zH&atoK!Lt2vAs_fMtU24s@-{5>Qdxnfo*Fs7to4t_F- z+N8)=5E@nYRza*99LEh4=)M*UN%o5E zBxy5n#Eh}U=DrxyjP*qg6Y~}D+v(x>p65)=xt=YxNms^zBRjr$#!%DHW^X2QJ1ixu z{u56(UpQnDhwh$L^>2YpIl@J!oMi!jExVskIPHfqHB zqK2vFE1>@$xOa&^nD|B^8pnTlbT=tr4aeTWs!^F@7A$ zUYoI)n}%t|`c8ZW;U~+pHp$q+xffq{8piX5uXi1?+D>?n+)dsMp{DuLFyM{FUi{pPF)zN(RIG2|3mD_ak?gfNp5)_v8NPzZ zR|Hv`40Gl%p7heNVmChtEZH#@=F3IH!aAm_VK}DL98WUs;^(H0xyAY}ahO<}gy)jU zYgi?#P1^ZL-B{n$FR}^D5IoFuS`fmLd z0rXO{aim?W&$Go|dogC0ui!OI$4|yA){oS^S$)#0c4YDuK&;RBl3~m%4hzR(73{uA zo!TTaCh`@PuPBdL-^NGAT*G|kD^&Q3xrUWTzQRXdU(~Q5@)f>f{R&u{6H^ReWXJicCO_sT=@zp))#xk$|YYBquOK;`HIRM>%((f!tp)NnI$Jy73)XEVyYVE z80*6rlGTo)#*!62UmqZ8C~4A=bByB?HP=RMjS}y@-{$HVMBi5{~bA&iGt~OwKjtdHt%|WbFD7 zxj0{^m)h&TNn5@I4U=KaD-NrwO`e;*)X0}DW0AfJrezjCSsppp^2k?I)h2yEBgKg? zU$K7V`4v^Q$>@8I%y?h757Dhh4F1RWP_x&5d&Ky>u6Y2tZ%)q?cB^)l+`9Z`4VD%OB@!EhDGHos%n#!rZx$W zY>e|{tSZ(IKf40!l2x@y>oXBeAL-CbZSuwW$tWKwXjowR3K$2Vb}