Initial Public Release
This commit is contained in:
commit
1af302dbe1
16 changed files with 1721 additions and 0 deletions
8
LICENSE
Normal file
8
LICENSE
Normal file
|
@ -0,0 +1,8 @@
|
|||
The MIT License (MIT)
|
||||
Copyright (c) 2013 Matthew R. Dillon
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
20
Makefile
Executable file
20
Makefile
Executable file
|
@ -0,0 +1,20 @@
|
|||
init:
|
||||
pip install -r requirements.txt --use-mirrors
|
||||
|
||||
test2:
|
||||
nosetests-2.7 tests
|
||||
|
||||
test3:
|
||||
nosetests-3.3 tests
|
||||
|
||||
simple_test2:
|
||||
nosetests-2.7 tests/test_backend_simple.py
|
||||
|
||||
advanced_test2:
|
||||
nosetests-2.7 tests/test_backend_advanced.py
|
||||
|
||||
simple_test3:
|
||||
nosetests-3.3 tests/test_backend_simple.py
|
||||
|
||||
advanced_test3:
|
||||
nosetests-3.3 tests/test_backend_advanced.py
|
43
README
Normal file
43
README
Normal file
|
@ -0,0 +1,43 @@
|
|||
SNAPExtract
|
||||
===========
|
||||
|
||||
Extract air temperatures from SNAP datasets
|
||||
|
||||
What is it?
|
||||
-----------
|
||||
|
||||
SNAPExtract is a simple tool to assist with extracting point-data from SNAP datasets
|
||||
|
||||
Prerequisites
|
||||
-------------
|
||||
|
||||
- numpy (1.7.1)
|
||||
- GDAL (1.10.0)
|
||||
- nose (1.3.0, optional, for tests)
|
||||
- sphinx (1.2b1, optional, for docs)
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
1) Clone the repo:
|
||||
|
||||
git clone https://github.com/thermokarst/snapextract
|
||||
|
||||
2) Get the data from http://snap.uaf.edu
|
||||
|
||||
3) Run the tests:
|
||||
|
||||
$ make test2
|
||||
OR
|
||||
$ make test3
|
||||
|
||||
4) If everything passes, you are ready to plug this into some larger project. For
|
||||
an example, check out [SNAPIndices](http://www.github.com/thermokarst/snapindices).
|
||||
|
||||
|
||||
Contact
|
||||
-------
|
||||
|
||||
Do you have an idea for a feature request? Find a Bug?
|
||||
Reach me at [matthewrdillon@gmail.com](mailto:matthewrdillon@gmail.com)
|
153
docs/Makefile
Executable file
153
docs/Makefile
Executable file
|
@ -0,0 +1,153 @@
|
|||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
-rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/SNAPIndices.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/SNAPIndices.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/SNAPIndices"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/SNAPIndices"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
242
docs/conf.py
Executable file
242
docs/conf.py
Executable file
|
@ -0,0 +1,242 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# SNAPIndices documentation build configuration file, created by
|
||||
# sphinx-quickstart on Thu Mar 28 16:33:50 2013.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys, os
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path.insert(0, os.path.abspath('../'))
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.viewcode']
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'SNAPIndices'
|
||||
copyright = u'2013, Matthew Ryan Dillon'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.1.0'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.1.0'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
|
||||
# -- Options for HTML output ---------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'SNAPIndicesdoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'SNAPIndices.tex', u'SNAPIndices Documentation and Source',
|
||||
u'Matthew Ryan Dillon', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output --------------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'snapindices', u'SNAPIndices Documentation',
|
||||
[u'Matthew Ryan Dillon'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output ------------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'SNAPIndices', u'SNAPIndices Documentation',
|
||||
u'Matthew Ryan Dillon', 'SNAPIndices', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
64
docs/index.rst
Executable file
64
docs/index.rst
Executable file
|
@ -0,0 +1,64 @@
|
|||
************************************
|
||||
SNAPExtract Documentation and Source
|
||||
************************************
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 4
|
||||
|
||||
.. default-domain:: python
|
||||
.. automodule:: snapextract
|
||||
|
||||
SNAPExtract is a Python project, currently supported under version
|
||||
2.7.x and 3.3.x.
|
||||
|
||||
Requirements: gdal, numpy
|
||||
Option: nose (for testing), sphinx (for docs)
|
||||
|
||||
Module: snapextract.backend
|
||||
---------------------------
|
||||
|
||||
Automatic API Documentation.
|
||||
|
||||
.. automodule:: snapextract.backend
|
||||
:members: SNAPDataSet, GeoRefData
|
||||
|
||||
Source: backend.py
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. literalinclude:: ../snapextract/_backend.py
|
||||
|
||||
Tests
|
||||
-----
|
||||
|
||||
Backend - Simple
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: tests.test_backend_simple
|
||||
:members:
|
||||
|
||||
Source: test_backend_simple.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. literalinclude:: ../tests/test_backend_simple.py
|
||||
|
||||
Backend - Advanced
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: tests.test_backend_advanced
|
||||
:members:
|
||||
|
||||
Source: test_backend_advanced.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. literalinclude:: ../tests/test_backend_advanced.py
|
||||
|
||||
Processing - Simple
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: tests.test_processing_simple
|
||||
:members:
|
||||
|
||||
Source: test_processing_simple.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. literalinclude:: ../tests/test_processing_simple.py
|
9
requirements.txt
Normal file
9
requirements.txt
Normal file
|
@ -0,0 +1,9 @@
|
|||
GDAL==1.10.0
|
||||
Jinja2==2.7.1
|
||||
MarkupSafe==0.18
|
||||
Pygments==1.6
|
||||
Sphinx==1.2b1
|
||||
docutils==0.11
|
||||
nose==1.3.0
|
||||
numpy==1.7.1
|
||||
wsgiref==0.1.2
|
22
setup.py
Executable file
22
setup.py
Executable file
|
@ -0,0 +1,22 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
|
||||
with open('README') as f:
|
||||
readme = f.read()
|
||||
|
||||
with open('LICENSE') as f:
|
||||
license = f.read()
|
||||
|
||||
setup(
|
||||
name='SNAPExtract',
|
||||
version='0.1.0',
|
||||
description='Extract climate data from SNAP datasets',
|
||||
long_description=readme,
|
||||
author='Matthew Ryan Dillon',
|
||||
author_email='matthewrdillon@gmail.com',
|
||||
url='https://github.com/thermokarst/snapextract',
|
||||
license=license,
|
||||
packages=find_packages(exclude=('tests', 'docs'))
|
||||
)
|
29
snapextract/__init__.py
Executable file
29
snapextract/__init__.py
Executable file
|
@ -0,0 +1,29 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Climate indices for geotechnical engineering consideration are
|
||||
extremely important for designing engineered structures on permanently
|
||||
frozen and seasonally frozen soils, particularly in Alaska. Relevant
|
||||
climate indices typically include (over a given timespan) a site's
|
||||
average air temperature, the average air freezing and air thawing
|
||||
indices, and the design air freezing and thawing indices (the average
|
||||
of the three coldest or three warmest years, respectively).
|
||||
|
||||
The University of Alaska Fairbanks (UAF) Scenarios Network for Alaska
|
||||
and Arctic Planning (http://snap.uaf.edu) has prepared and maintains
|
||||
a geographically gridded dataset representing calculated climate
|
||||
parameters across Alaska. SNAP provides an estimate of historical
|
||||
climate conditions in regions of Alaska that do not have consistent
|
||||
climate records, as well as providing scientifically defined
|
||||
peer-reviewed climatic projections. Previous methods utilized in
|
||||
engineering practice for projecting climate indices involved
|
||||
procedures with no scientific basis (linear extrapolation), and
|
||||
provided little confidence in the accuracy of the results. As
|
||||
distributed, the SNAP datasets are extremely large and cumbersome, and
|
||||
represent a significant hurdle for users to process site-specific
|
||||
data.
|
||||
|
||||
.. moduleauthor:: Matthew Dillon <mrdillon@alaska.edu>
|
||||
"""
|
||||
|
||||
from ._backend import SNAPDataSet, GeoRefData, mkdir_p, wgs84_to_ne, ne_to_wgs
|
279
snapextract/_backend.py
Executable file
279
snapextract/_backend.py
Executable file
|
@ -0,0 +1,279 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
.. :module:: backend
|
||||
:platform: Unix
|
||||
:synopsis: This represents the main engine for extracting from the
|
||||
raw datasets.
|
||||
|
||||
.. moduleauthor:: Matthew Dillon <mrdillon@alaska.edu>
|
||||
"""
|
||||
|
||||
import zipfile
|
||||
import gdal
|
||||
import numpy
|
||||
import itertools
|
||||
import os
|
||||
import errno
|
||||
from osgeo import osr
|
||||
import sqlite3
|
||||
import shutil
|
||||
|
||||
|
||||
# Classes
|
||||
class SNAPDataSet:
|
||||
"""
|
||||
Tools to work with a SNAP Dataset.
|
||||
|
||||
:param filename: A ZIP dataset from SNAP
|
||||
"""
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
self.zip_data = self.load_dataset()
|
||||
self.name_list = sorted(self.zip_data.namelist())
|
||||
# At this point, all SNAP datasets are zipped in a directory
|
||||
self.zip_dir = self.name_list[0]
|
||||
# Prefix in the form xyz_mean_C_abc_model_ (month_year.tif following)
|
||||
self.prefix = self.name_list[1][len(self.zip_dir):-11]
|
||||
|
||||
# Assume some info about dataset from the filename
|
||||
components = filename.replace('.', '_').split('_')[:-1]
|
||||
|
||||
if 'historical' in components:
|
||||
# HISTORICAL DATA
|
||||
self.model = 'CRU'
|
||||
self.scenario = components[components.index('CRU')+1]
|
||||
else:
|
||||
# PROJECTION DATA
|
||||
for comp in components:
|
||||
if comp == 'AK':
|
||||
startmarker = components.index(comp) + 2
|
||||
if comp.startswith('sres'):
|
||||
self.scenario = comp.replace('sres', '').upper()
|
||||
endmarker = components.index(comp)
|
||||
self.model = "_".join(components[startmarker:endmarker])
|
||||
|
||||
|
||||
def load_dataset(self):
|
||||
"""
|
||||
Import ZIP dataset.
|
||||
|
||||
:returns: A reference to a zipfile
|
||||
"""
|
||||
return zipfile.ZipFile(self.filename,"r")
|
||||
|
||||
|
||||
def dump_raw_temperatures(self, communities, extracted_temps, out):
|
||||
"""
|
||||
Given a set of extracted temperatures, generate csv output of data.
|
||||
|
||||
:param communities: Python list of community names
|
||||
:param extracted_temps: Numpy array with extracted temps
|
||||
:param out: path to output directory
|
||||
"""
|
||||
min_year = numpy.min(extracted_temps['year'])
|
||||
max_year = numpy.max(extracted_temps['year'])
|
||||
time_years = max_year - min_year + 1
|
||||
i = 0
|
||||
for community in communities:
|
||||
community = community.decode('utf-8')
|
||||
community = community.replace(" ", "_")
|
||||
outdir = ''.join([out, '/', community])
|
||||
mkdir_p(outdir)
|
||||
outfile = ''.join([outdir, '/', community, '_',
|
||||
self.model, '_', self.scenario, '_',
|
||||
str(min_year), '_',
|
||||
str(max_year),'.txt'])
|
||||
header = ' '.join([community.replace("_", " "), ',', str(min_year),
|
||||
'-', str(max_year), '\nAverage Monthly' \
|
||||
' Air Temperature (deg C)\nYear, ' \
|
||||
'Jan, Feb, Mar, Apr, May, Jun, Jul, ' \
|
||||
'Aug, Sep, Oct, Nov, Dec'])
|
||||
temp_data = extracted_temps[i, :]['temperature'].reshape(time_years,
|
||||
12)
|
||||
file_data = numpy.zeros((time_years, 13))
|
||||
file_data[:, 1:] = temp_data
|
||||
file_data[:, 0] = numpy.arange(min_year, max_year+1)
|
||||
numpy.savetxt(outfile, file_data, fmt=('%d', '%7.1f', '%7.1f',
|
||||
'%7.1f', '%7.1f', '%7.1f',
|
||||
'%7.1f', '%7.1f', '%7.1f',
|
||||
'%7.1f', '%7.1f', '%7.1f',
|
||||
'%7.1f'),
|
||||
delimiter=',', header=header)
|
||||
i += 1
|
||||
|
||||
|
||||
class GeoRefData(SNAPDataSet):
|
||||
"""
|
||||
Use GDAL to work with the SNAP datasets.
|
||||
|
||||
:param SNAPDataSet: A SNAPDataSet object
|
||||
"""
|
||||
def __init__(self, filename):
|
||||
SNAPDataSet.__init__(self, filename)
|
||||
test_tiff = self.read_geotiff_as_gdal(1, int(self.filename[-13:-9]))
|
||||
self.cols = test_tiff.RasterXSize
|
||||
self.rows = test_tiff.RasterYSize
|
||||
self.bands = test_tiff.RasterCount
|
||||
geotransform = test_tiff.GetGeoTransform()
|
||||
self.origin_x = geotransform[0]
|
||||
self.origin_y = geotransform[3]
|
||||
self.pixel_width = geotransform[1]
|
||||
self.pixel_height = geotransform[5]
|
||||
# Close the file
|
||||
test_tiff = None
|
||||
|
||||
|
||||
def read_geotiff_as_gdal(self, month, year):
|
||||
"""
|
||||
Read GeoTIFF Data in from ZIP dataset.
|
||||
|
||||
:param month: desired month (1- or 2-digit integer)
|
||||
:param year: desired year (4-digit integer)
|
||||
:returns: A GDAL data object
|
||||
"""
|
||||
# A bit clunky, but here we assemble a SNAP-style geotiff filename
|
||||
tiff = ''.join(['/vsizip/', self.filename, '/', self.zip_dir,
|
||||
self.prefix, str(month).zfill(2), '_',str(year),
|
||||
'.tif'])
|
||||
gdal_data = gdal.Open(tiff)
|
||||
return gdal_data
|
||||
|
||||
|
||||
def read_geotiff_as_array(self, month, year):
|
||||
"""
|
||||
Read GeoTIFF Data in from ZIP dataset.
|
||||
|
||||
:param month: desired month (1- or 2-digit integer)
|
||||
:param year: desired year (4-digit integer)
|
||||
:returns: A Numpy array
|
||||
"""
|
||||
gdal_data = self.read_geotiff_as_gdal(month, year)
|
||||
temp_band = gdal_data.GetRasterBand(1)
|
||||
temp_data = temp_band.ReadAsArray(0, 0, self.cols, self.rows)
|
||||
temp_band = None
|
||||
gdal_data = None
|
||||
return temp_data
|
||||
|
||||
|
||||
def ne_to_indices(self, northing, easting):
|
||||
"""
|
||||
Convert Northings and Eastings (NAD 83 Alaska Albers Equal Area
|
||||
Conic) to X,Y array indices.
|
||||
|
||||
:param northing: position northing (in meters)
|
||||
:param easting: position easting (in meters)
|
||||
:returns: array indices that correspond to location
|
||||
"""
|
||||
x_ind = (easting - self.origin_x)/self.pixel_width
|
||||
y_ind = (northing - self.origin_y)/self.pixel_height
|
||||
x_ind = x_ind.astype(numpy.int, copy=False)
|
||||
y_ind = y_ind.astype(numpy.int, copy=False)
|
||||
return (x_ind, y_ind)
|
||||
|
||||
|
||||
def indices_to_ne(self, x_ind, y_ind):
|
||||
"""
|
||||
Convert index values to Northings and Eastings (NAD 83 Alaska Albers
|
||||
Equal Area Conic).
|
||||
|
||||
:param x_ind: array x-index
|
||||
:param y_ind: array y-index
|
||||
:returns: position northings and eastings (in meters) corresponding to
|
||||
location
|
||||
"""
|
||||
northing = self.origin_y + (y_ind * self.pixel_height)
|
||||
easting = self.origin_x + (x_ind * self.pixel_width)
|
||||
return (northing, easting)
|
||||
|
||||
|
||||
def extract_points(self, northing, easting, start_year, end_year):
|
||||
"""
|
||||
Extract points from range of years between start and end at the
|
||||
specified points (Jan->Dec). Point locations should be numpy arrays.
|
||||
|
||||
:param northing: position northing (in meters)
|
||||
:param easting: position easting (in meters)
|
||||
:param start_year: 4-digit year for start of analysis period
|
||||
:param end_year: 4-digit year for end of analysis period, same as
|
||||
start_year if only analyzing one year
|
||||
:returns: numpy array of extracted temperatures
|
||||
"""
|
||||
x_offsets, y_offsets = self.ne_to_indices(northing, easting)
|
||||
years = list(range(start_year, end_year + 1))
|
||||
months = list(range(1, 13))
|
||||
# Record structure: (Year, Month, Temperature)
|
||||
# Each row represents a community, each column is a monthly temp.
|
||||
extracted_temps = numpy.zeros((len(x_offsets), 12*len(years)),
|
||||
dtype={'names': ['year', 'month',
|
||||
'temperature'],
|
||||
'formats':['i4', 'i4', 'f4']})
|
||||
i = 0
|
||||
for year, month in itertools.product(years, months):
|
||||
#for year in years:
|
||||
# for month in months:
|
||||
temp_data = self.read_geotiff_as_array(month, year)
|
||||
# gdal rotates for some reason, so y,x
|
||||
extracted_temps[:, i]['temperature'] = temp_data[y_offsets,
|
||||
x_offsets]
|
||||
extracted_temps[:, i]['year'] = year
|
||||
extracted_temps[:, i]['month'] = month
|
||||
i += 1
|
||||
return extracted_temps
|
||||
|
||||
|
||||
# Functions
|
||||
def mkdir_p(path):
|
||||
"""
|
||||
Function to emulate mkdir -p functionality.
|
||||
Pulled from: http://stackoverflow.com/q/600268/313548
|
||||
|
||||
:param path: path to create new directory at
|
||||
:returns: creates a path at the desired location, if one does not already
|
||||
exist
|
||||
"""
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as exc:
|
||||
if exc.errno == errno.EEXIST and os.path.isdir(path):
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def wgs84_to_ne(latitude, longitude):
|
||||
"""
|
||||
Convert WGS84 lat/long to Northings and Eastings (NAD 83 Alaska Albers
|
||||
Equal Area Conic)
|
||||
|
||||
:param latitude: WGS84 latitude (in decimal degrees)
|
||||
:param longitude: WGS84 longitude (in decimal degrees)
|
||||
:returns: transformed coordinates to Alaska Albers
|
||||
"""
|
||||
wgspoint = osr.SpatialReference()
|
||||
wgspoint.ImportFromEPSG(4326)
|
||||
nepoint = osr.SpatialReference()
|
||||
nepoint.ImportFromEPSG(3338)
|
||||
transform = osr.CoordinateTransformation(wgspoint, nepoint)
|
||||
return transform.TransformPoint(longitude, latitude)
|
||||
|
||||
|
||||
def ne_to_wgs(northing, easting):
|
||||
"""
|
||||
Convert Northings and Eastings (NAD 83 Alaska Albers
|
||||
Equal Area Conic) to WGS84 lat/long .
|
||||
|
||||
:param northing: AK Albers in meters
|
||||
:param easting: AK Albers in meters
|
||||
:returns: transformed coordinates in WGS84 lat long
|
||||
"""
|
||||
wgspoint = osr.SpatialReference()
|
||||
wgspoint.ImportFromEPSG(4326)
|
||||
nepoint = osr.SpatialReference()
|
||||
nepoint.ImportFromEPSG(3338)
|
||||
transform = osr.CoordinateTransformation(nepoint, wgspoint)
|
||||
return transform.TransformPoint(easting, northing)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print("nothing to see here...")
|
2
tests/__init__.py
Executable file
2
tests/__init__.py
Executable file
|
@ -0,0 +1,2 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Nose tests for snapextract"""
|
60
tests/data/anc1950-2009.csv
Executable file
60
tests/data/anc1950-2009.csv
Executable file
|
@ -0,0 +1,60 @@
|
|||
7.70, 6.98, 30.20, 37.22, 47.66, 55.40, 58.64, 59.54, 49.82, 31.10, 11.30, 14.36
|
||||
8.06, 12.38, 15.08, 39.38, 49.28, 55.22, 61.52, 57.74, 48.74, 29.66, 22.28, 12.56
|
||||
6.26, 18.14, 24.98, 35.06, 44.42, 54.32, 59.36, 56.84, 47.48, 36.14, 30.92, 20.12
|
||||
9.86, 20.12, 24.08, 40.10, 49.82, 61.16, 62.60, 58.28, 48.38, 32.00, 19.94, 20.12
|
||||
12.20, 5.72, 25.16, 35.06, 50.72, 57.02, 59.90, 57.74, 49.82, 37.58, 26.96, 7.88
|
||||
21.20, 14.72, 25.70, 33.08, 46.40, 53.60, 59.90, 55.94, 47.48, 30.02, 8.60, 9.86
|
||||
7.88, 9.50, 21.74, 36.68, 46.76, 54.68, 58.82, 57.56, 47.12, 26.42, 15.80, 6.26
|
||||
16.70, 12.74, 31.28, 39.02, 50.72, 59.90, 60.44, 60.08, 50.72, 36.32, 32.18, 11.48
|
||||
21.20, 18.86, 31.28, 41.00, 50.00, 57.56, 59.00, 57.38, 47.30, 27.32, 18.68, 17.42
|
||||
12.02, 19.94, 16.52, 35.78, 49.82, 59.00, 57.92, 57.56, 48.74, 30.56, 23.00, 19.76
|
||||
19.40, 22.10, 23.54, 36.14, 51.44, 56.84, 59.54, 56.66, 47.84, 34.70, 21.02, 27.14
|
||||
22.82, 16.70, 18.14, 37.58, 49.82, 56.84, 59.72, 56.12, 49.28, 26.78, 15.44, 5.72
|
||||
15.26, 18.32, 21.92, 38.66, 46.76, 55.58, 60.44, 58.28, 46.40, 35.24, 21.02, 18.14
|
||||
21.74, 22.64, 26.24, 33.80, 49.10, 53.60, 60.62, 58.46, 52.70, 34.34, 10.94, 26.42
|
||||
16.34, 19.04, 19.04, 35.24, 43.16, 57.74, 59.00, 56.30, 49.64, 32.72, 20.48, 2.84
|
||||
12.20, 9.14, 38.12, 40.82, 47.12, 54.32, 60.26, 57.20, 53.96, 28.04, 18.32, 13.46
|
||||
11.66, 13.64, 18.68, 37.04, 46.22, 57.38, 59.72, 55.94, 47.66, 29.84, 16.52, 12.74
|
||||
9.14, 13.46, 25.52, 36.68, 48.92, 57.92, 61.16, 59.72, 49.10, 33.26, 28.04, 16.88
|
||||
13.82, 22.10, 30.38, 36.50, 50.18, 56.84, 61.88, 59.54, 47.84, 30.74, 21.02, 6.80
|
||||
5.54, 16.16, 30.38, 41.36, 50.18, 59.90, 61.16, 55.40, 49.64, 38.48, 21.92, 29.84
|
||||
10.76, 28.76, 37.22, 37.94, 50.00, 56.66, 58.82, 56.30, 46.94, 29.48, 23.36, 15.26
|
||||
3.20, 18.86, 16.16, 35.06, 44.06, 53.78, 57.74, 57.02, 47.12, 30.02, 16.70, 17.06
|
||||
7.70, 11.84, 16.88, 28.40, 46.04, 54.32, 61.52, 58.64, 45.86, 30.02, 20.48, 14.00
|
||||
4.46, 11.84, 26.60, 38.12, 46.40, 53.96, 59.72, 55.40, 46.76, 29.66, 12.20, 19.58
|
||||
8.06, 12.92, 25.16, 39.74, 50.00, 57.56, 59.36, 57.92, 50.90, 32.36, 21.56, 19.76
|
||||
12.92, 11.30, 23.90, 34.70, 48.20, 55.04, 60.62, 58.10, 50.00, 32.18, 13.10, 12.92
|
||||
18.14, 11.12, 25.88, 37.04, 47.30, 56.12, 60.98, 58.10, 48.38, 31.10, 29.66, 24.80
|
||||
33.98, 31.28, 25.88, 37.22, 48.74, 59.36, 64.04, 61.70, 51.08, 35.60, 12.92, 11.84
|
||||
22.82, 24.80, 31.28, 40.64, 50.90, 56.12, 60.62, 60.80, 51.62, 36.68, 24.80, 22.64
|
||||
23.54, 7.88, 33.44, 40.10, 51.80, 57.56, 62.42, 60.44, 52.70, 38.66, 31.82, 11.12
|
||||
15.44, 26.06, 29.30, 41.36, 48.56, 55.76, 59.54, 56.12, 47.84, 35.24, 26.78, 2.12
|
||||
33.44, 23.54, 36.32, 37.40, 52.88, 55.94, 59.54, 56.84, 48.56, 33.44, 20.84, 17.06
|
||||
7.88, 14.36, 28.22, 34.70, 46.58, 55.04, 58.28, 56.48, 48.38, 24.44, 20.12, 23.72
|
||||
17.96, 20.30, 31.10, 39.56, 51.08, 58.28, 60.80, 57.74, 46.22, 32.36, 24.08, 18.32
|
||||
20.66, 17.78, 38.30, 40.28, 51.08, 60.08, 62.06, 58.10, 50.00, 32.90, 18.50, 20.30
|
||||
32.54, 11.84, 28.58, 29.84, 47.12, 53.96, 60.44, 56.48, 48.38, 27.86, 12.38, 29.66
|
||||
27.14, 20.84, 25.88, 32.54, 48.56, 56.66, 59.72, 55.76, 49.10, 36.32, 23.54, 29.84
|
||||
25.16, 24.62, 28.94, 39.74, 49.28, 54.68, 59.36, 58.82, 48.56, 36.32, 25.70, 19.40
|
||||
19.94, 21.92, 33.44, 39.20, 50.90, 57.56, 61.16, 57.74, 48.74, 31.10, 18.86, 23.54
|
||||
5.00, 15.98, 25.52, 41.18, 48.92, 57.38, 61.70, 60.62, 51.62, 31.64, 15.98, 26.24
|
||||
17.24, 2.48, 30.74, 41.90, 52.34, 59.36, 60.98, 59.36, 50.54, 30.02, 8.60, 16.52
|
||||
17.96, 18.68, 26.06, 39.92, 49.10, 58.10, 59.72, 57.20, 51.98, 30.74, 23.72, 22.10
|
||||
22.82, 14.00, 27.14, 36.86, 48.20, 58.10, 61.70, 57.56, 43.34, 28.94, 25.70, 16.16
|
||||
17.06, 20.66, 31.10, 42.62, 53.06, 58.46, 63.32, 60.26, 49.46, 36.32, 23.90, 25.88
|
||||
23.90, 15.80, 28.04, 40.82, 49.64, 58.64, 60.98, 60.62, 49.28, 31.10, 13.82, 17.42
|
||||
17.60, 19.58, 20.48, 42.26, 51.26, 58.28, 61.16, 59.18, 54.50, 34.34, 19.94, 20.84
|
||||
7.88, 14.54, 31.28, 40.46, 52.16, 58.64, 61.70, 57.92, 47.48, 23.36, 17.60, 14.36
|
||||
17.24, 30.02, 31.82, 41.36, 50.72, 59.18, 63.32, 59.90, 51.44, 27.86, 26.78, 18.50
|
||||
17.42, 24.44, 32.36, 42.08, 49.46, 57.20, 59.90, 55.58, 49.82, 33.62, 21.74, 16.16
|
||||
14.00, 5.90, 26.60, 37.04, 48.02, 57.92, 60.80, 58.82, 49.82, 31.82, 18.50, 16.34
|
||||
16.52, 25.70, 32.36, 39.56, 48.38, 57.38, 59.18, 56.30, 47.66, 32.54, 28.76, 27.14
|
||||
29.12, 21.92, 30.74, 39.56, 47.48, 60.44, 59.72, 60.08, 50.18, 28.22, 18.32, 12.20
|
||||
25.52, 20.12, 24.44, 32.90, 50.90, 55.94, 62.06, 58.64, 50.90, 38.66, 33.80, 25.34
|
||||
23.72, 30.02, 28.94, 40.28, 50.36, 57.92, 64.22, 59.36, 49.82, 37.94, 20.66, 18.32
|
||||
11.48, 25.34, 26.42, 39.92, 53.06, 60.26, 64.04, 63.14, 46.04, 36.32, 27.68, 23.54
|
||||
21.38, 19.76, 34.16, 42.08, 53.42, 59.36, 63.68, 60.08, 52.34, 34.70, 15.80, 26.96
|
||||
12.74, 20.66, 26.06, 37.94, 51.08, 56.66, 60.62, 56.84, 50.72, 36.68, 10.94, 22.82
|
||||
18.14, 16.52, 16.52, 40.64, 49.82, 57.20, 60.62, 59.90, 51.44, 33.44, 29.66, 21.20
|
||||
15.08, 15.98, 32.36, 35.60, 48.38, 54.32, 58.28, 57.38, 49.46, 27.50, 19.94, 15.98
|
||||
15.08, 15.98, 24.26, 37.22, 51.08, 56.66, 62.06, 57.74, 50.00, 38.48, 18.86, 21.20
|
|
445
tests/data/communities_dist.csv
Executable file
445
tests/data/communities_dist.csv
Executable file
|
@ -0,0 +1,445 @@
|
|||
Name,XCOORD,YCOORD
|
||||
Adak Station,-1537921.500000,472626.470000
|
||||
Afognak,72618.935600,890268.857000
|
||||
Akhiok,-10336.001000,770992.142000
|
||||
Akiachak,-400881.240000,1236454.240000
|
||||
Akiak,-389166.520000,1235472.380000
|
||||
Akutan,-766705.740000,526440.516000
|
||||
Alakanuk,-539720.780000,1456223.520000
|
||||
Alatna,55179.925600,1849325.170000
|
||||
Aleknagik,-262135.620000,1040114.480000
|
||||
Aleut Village,73001.128800,891270.391000
|
||||
Alexander,181178.981000,1275389.400000
|
||||
Allakaket,60340.341300,1845958.230000
|
||||
Ambler,-168133.430000,1908022.590000
|
||||
Anaktuvuk Pass,94894.423900,2021892.680000
|
||||
Anchor Point,121350.655000,1089289.740000
|
||||
Anchorage,214641.356000,1250935.040000
|
||||
Anderson,232159.794000,1606305.790000
|
||||
Andreafsky,-476407.150000,1374496.500000
|
||||
Angoon,1144440.290000,1001745.650000
|
||||
Aniak,-291959.950000,1300960.020000
|
||||
Annette,1407654.190000,801380.279000
|
||||
Anvik,-316803.160000,1424144.980000
|
||||
Arctic Village,353986.164000,2041044.550000
|
||||
Atka,-1365568.700000,451321.447000
|
||||
Atmautluak,-457665.010000,1237801.650000
|
||||
Atqasuk,-129196.580000,2279559.230000
|
||||
Attu,-2121053.100000,858805.660000
|
||||
Auke Bay,1111953.350000,1094627.820000
|
||||
Ayakulik,-32895.463000,800781.882000
|
||||
Baranof,1143084.960000,952727.812000
|
||||
Barrow,-102348.390000,2368016.980000
|
||||
Bartlett Cove,1039465.170000,1081403.930000
|
||||
Beaver,295988.751000,1837150.060000
|
||||
Belkofski,-511666.170000,595176.485000
|
||||
Bell Island Hot Springs,1373997.740000,892377.765000
|
||||
Belmezok,-645708.000000,1808579.710000
|
||||
Beluga,159491.525000,1247752.750000
|
||||
Bessie Number Five Dredge Camp,-531926.430000,1665623.720000
|
||||
Bethel,-419830.420000,1225435.720000
|
||||
Bettles,101881.638000,1885031.970000
|
||||
Big Delta,395466.704000,1600823.320000
|
||||
Big Lake,221158.526000,1291240.010000
|
||||
Bill Moores,-511644.770000,1484758.110000
|
||||
Biorka,-800641.620000,497849.843000
|
||||
Birch Creek,367370.548000,1834978.020000
|
||||
Birchwood,240325.534000,1277982.110000
|
||||
Bird,250549.834000,1225132.430000
|
||||
Boswell Bay,430995.132000,1182583.730000
|
||||
Boundary,625308.707000,1627689.680000
|
||||
Brevig Mission,-578738.900000,1762939.480000
|
||||
Broadmoor,290337.859000,1664938.810000
|
||||
Buckland,-323704.800000,1797562.290000
|
||||
Candle,-360839.900000,1794350.130000
|
||||
Cantwell,251604.660000,1501107.630000
|
||||
Cape Lisburne,-493603.860000,2145729.050000
|
||||
Cape Pole,1240717.890000,851822.131000
|
||||
Cape Yakataga,638764.243000,1175375.890000
|
||||
Central,423682.566000,1764224.840000
|
||||
Chakaktolik,-504571.930000,1346970.970000
|
||||
Chakwaktolik,-520161.230000,1288038.860000
|
||||
Chalkyitsik,454541.183000,1890452.900000
|
||||
Chaniliut,-473779.370000,1485136.590000
|
||||
Charlieskin Village,612947.097000,1500429.110000
|
||||
Chatham,1124560.690000,997052.924000
|
||||
Chefornak,-565904.270000,1174110.820000
|
||||
Chena Hot Springs,373187.587000,1699311.580000
|
||||
Chenega Bay,332163.441000,1135641.770000
|
||||
Chevak,-611150.750000,1336614.010000
|
||||
Chichagof,1052892.760000,993988.034000
|
||||
Chickaloon,290717.049000,1325365.990000
|
||||
Chicken,584679.465000,1620913.950000
|
||||
Chignik,-271948.590000,707458.506000
|
||||
Chignik Lagoon,-278883.330000,714441.616000
|
||||
Chignik Lake,-293485.640000,705389.794000
|
||||
Chiniak,109292.571000,847655.201000
|
||||
Chisana,619344.999000,1399305.720000
|
||||
Chistochina,477044.830000,1432730.180000
|
||||
Chitina,505441.894000,1318279.050000
|
||||
Christian,379154.350000,1959429.770000
|
||||
Chuathbaluk,-276863.430000,1299095.090000
|
||||
Chugiak,240558.668000,1276753.670000
|
||||
Chuloonawick,-499906.060000,1473464.890000
|
||||
Circle,453439.303000,1797019.010000
|
||||
Circle Hot Springs,432802.608000,1755485.510000
|
||||
Clam Gulch,143836.512000,1140959.860000
|
||||
Clarks Point,-261626.120000,992028.379000
|
||||
Clear,233220.067000,1605174.430000
|
||||
Clover Pass,1377601.350000,839209.143000
|
||||
Coffman Cove,1304538.490000,870347.576000
|
||||
Cohoe,148382.706000,1156319.480000
|
||||
Cold Bay,-553787.730000,611214.531000
|
||||
College,293144.729000,1668929.520000
|
||||
Cooper Landing,228036.940000,1174316.420000
|
||||
Copper Center,453412.802000,1360665.560000
|
||||
Cordova,449509.033000,1200929.850000
|
||||
Council,-456570.970000,1692667.720000
|
||||
Craig,1296184.370000,812342.581000
|
||||
Crooked Creek,-215427.430000,1328105.840000
|
||||
Deadhorse,214610.089000,2256845.540000
|
||||
Deering,-394379.810000,1816766.380000
|
||||
Delta Junction,402363.204000,1588765.870000
|
||||
Dillingham,-254798.990000,1013468.980000
|
||||
Diomede,-678571.700000,1832766.290000
|
||||
Dot Lake,489943.445000,1558618.530000
|
||||
Dry Creek,464064.096000,1558991.610000
|
||||
Dutch Harbor,-820540.960000,508407.519000
|
||||
Eagle,604643.376000,1705864.300000
|
||||
Eagle River,236396.270000,1268065.250000
|
||||
Eagle Village,608839.502000,1705833.680000
|
||||
Edna Bay,1249389.700000,852637.547000
|
||||
Eek,-442034.810000,1163510.420000
|
||||
Egavik,-336922.530000,1580757.350000
|
||||
Egegik,-197707.650000,917825.789000
|
||||
Egorkovskoi,-922711.940000,492043.188000
|
||||
Eklutna,246223.652000,1283961.160000
|
||||
Ekuk,-261341.870000,987504.892000
|
||||
Ekwok,-196899.080000,1044725.840000
|
||||
Elephant Point,-329705.150000,1830335.970000
|
||||
Elfin Cove,1023035.410000,1047246.370000
|
||||
Elim,-393981.830000,1652929.850000
|
||||
Ellamar,393547.786000,1234107.470000
|
||||
Emmonak,-533499.380000,1465289.470000
|
||||
English Bay,117729.165000,1042376.340000
|
||||
Eska,268023.893000,1316903.930000
|
||||
Ester,283525.278000,1666802.040000
|
||||
Eva Creek,249149.33,1575567.71
|
||||
Evansville,109511.723000,1887063.730000
|
||||
Excursion Inlet,1067090.560000,1085356.890000
|
||||
Eyak,452316.195000,1203806.200000
|
||||
Fairbanks,297703.529000,1667062.690000
|
||||
False Pass,-602250.710000,580675.702000
|
||||
Ferry,238224.716000,1570105.360000
|
||||
Fish Village,-503746.570000,1431620.300000
|
||||
Flat,-206085.210000,1392911.750000
|
||||
Fort Glenn,-918887.980000,472119.907000
|
||||
Fort Yukon,387657.594000,1870748.040000
|
||||
Fox,301046.268000,1680847.220000
|
||||
Gakona,448518.012000,1399145.390000
|
||||
Galena,-139372.260000,1644385.450000
|
||||
Gambell,-862973.650000,1650836.650000
|
||||
Georgetown,-198216.330000,1331394.300000
|
||||
Girdwood,260783.847000,1227108.400000
|
||||
Glennallen,438714.819000,1376144.990000
|
||||
Golovin,-431545.520000,1649498.710000
|
||||
Goodnews,-431887.990000,1038207.910000
|
||||
Goodnews Bay,-432158.630000,1038452.770000
|
||||
Grayling,-307626.970000,1451369.380000
|
||||
Gulkana,444841.996000,1395224.030000
|
||||
Gustavus,1050644.210000,1080189.180000
|
||||
HAARP,454935.91793,1410212.42456
|
||||
Haines,1041662.260000,1173311.440000
|
||||
Hamilton,-493238.980000,1478527.090000
|
||||
Haycock,-334835.330000,1713294.880000
|
||||
Healy,246891.453000,1552961.450000
|
||||
Healy Lake,451073.978000,1584684.040000
|
||||
Highland Park,314902.936000,1659831.510000
|
||||
Holikachuk,-279305.220000,1449440.790000
|
||||
Hollis,1324890.310000,823128.540000
|
||||
Holy Cross,-299074.310000,1371274.210000
|
||||
Homer,137738.364000,1074855.330000
|
||||
Hoonah,1076435.470000,1052386.260000
|
||||
Hooper Bay,-637709.090000,1341775.350000
|
||||
Hope,235426.184000,1222951.240000
|
||||
Houston,220846.744000,1301574.710000
|
||||
Hughes,-11603.881000,1787897.360000
|
||||
Huslia,-110370.410000,1750889.140000
|
||||
Hydaburg,1324869.370000,790278.636000
|
||||
Hyder,1465089.710000,923621.950000
|
||||
Iditarod,-209925.470000,1403361.160000
|
||||
Igiugig,-107454.850000,1038657.290000
|
||||
Ikatan,-597615.420000,568238.247000
|
||||
Iliamna,-45759.842000,1085017.670000
|
||||
Inakpuk,-177514.970000,1064312.510000
|
||||
Indian,241809.605000,1230909.930000
|
||||
Indian River,487156.662000,1445664.050000
|
||||
Ingrihak,-420043.420000,1333973.250000
|
||||
Ivanof Bay,-343686.750000,668581.584000
|
||||
Joe Ward Camp,451872.494000,1913902.820000
|
||||
Jonesville,266628.206000,1316023.040000
|
||||
Juneau,1127916.270000,1089943.440000
|
||||
Kachemak,143174.861000,1075765.090000
|
||||
Kaguyak,14183.452800,761395.323000
|
||||
Kake,1198358.550000,956670.684000
|
||||
Kakhonak,-42712.633000,1050102.510000
|
||||
Kaktovik,399091.420000,2270535.310000
|
||||
Kalskag,-333679.470000,1300261.080000
|
||||
Kaltag,-227902.240000,1604108.650000
|
||||
Kanakanak,-259412.350000,1009647.210000
|
||||
Karluk,-27176.078000,841001.611000
|
||||
Kasaan,1338805.560000,833782.306000
|
||||
Kashega,-870204.880000,470130.411000
|
||||
Kashegelok,-207595.390000,1211483.130000
|
||||
Kasigluk,-459535.040000,1241966.650000
|
||||
Kasilof,149922.803000,1153113.880000
|
||||
Kathakne,612066.341000,1500572.710000
|
||||
Kenai,149806.666000,1177388.750000
|
||||
Kenny Lake,475366.539000,1339120.360000
|
||||
Kepangalook,-411599.590000,1230987.170000
|
||||
Ketchikan,1391079.160000,828573.040000
|
||||
Kiana,-281301.980000,1904428.080000
|
||||
Kinegnak,-440266.000000,1007283.650000
|
||||
King Cove,-529536.940000,594290.425000
|
||||
King Island,-659287.260000,1737718.680000
|
||||
King Salmon,-153781.210000,968717.225000
|
||||
Kipnuk,-557098.580000,1147655.470000
|
||||
Kivalina,-446955.050000,2009709.110000
|
||||
Kiwalik,-355139.510000,1807119.360000
|
||||
Klawock,1296725.110000,821398.159000
|
||||
Klery Creek,-278130.360000,1926888.320000
|
||||
Klukwan,1012180.700000,1183937.060000
|
||||
Knik,226784.765000,1282632.940000
|
||||
Kobuk,-126670.390000,1886035.930000
|
||||
Kodiak,94464.052000,866399.215000
|
||||
Kokhanok,-43016.718000,1050206.620000
|
||||
Kokrines,-33100.647000,1663792.060000
|
||||
Koliganek,-184003.280000,1086491.430000
|
||||
Kongiganak,-492540.200000,1141312.020000
|
||||
Kotlik,-480587.140000,1486205.880000
|
||||
Kotzebue,-377149.160000,1906743.900000
|
||||
Koyuk,-337794.110000,1681680.610000
|
||||
Koyukuk,-175240.860000,1662580.420000
|
||||
Kupreanof,1258330.760000,957016.475000
|
||||
Kustatan,122336.656000,1194521.330000
|
||||
Kvichak,-168108.380000,1000467.510000
|
||||
Kwethluk,-402337.690000,1225677.880000
|
||||
Kwigillingok,-508242.990000,1132133.900000
|
||||
Kwiguk,-532691.270000,1462953.210000
|
||||
Lake Minchumina,82787.083900,1547444.070000
|
||||
Larsen Bay,1284.179720,837346.387000
|
||||
Last Tetlin Village,571838.201000,1500619.420000
|
||||
Latouche,338090.501000,1133577.680000
|
||||
Lemeta,297076.884000,1669299.680000
|
||||
Lena Beach,1106673.780000,1094192.050000
|
||||
Levelock,-163002.530000,1016839.200000
|
||||
Libbyville,-176083.760000,979690.532000
|
||||
Lime Village,-76513.219000,1264823.970000
|
||||
Livengood,252272.228000,1739861.670000
|
||||
Loring,1382227.470000,856291.409000
|
||||
Lower Kalskag,-336729.370000,1297553.010000
|
||||
Lower Tonsina,491620.046000,1332025.270000
|
||||
Lucky Shot Landing,241330.849000,1319158.760000
|
||||
Manley Hot Springs,158728.068000,1675200.830000
|
||||
Manokotak,-289311.360000,1009208.730000
|
||||
Mansfield Village,523504.047000,1541652.830000
|
||||
Marshall,-422621.890000,1348053.470000
|
||||
Marys Igloo,-516769.640000,1730674.430000
|
||||
Matanuska,252600.828000,1293798.220000
|
||||
McCarthy,586369.842000,1321619.250000
|
||||
McGrath,-80757.386000,1443866.590000
|
||||
McKinley Park,250705.824000,1537650.650000
|
||||
Meade River,-129371.350000,2279272.380000
|
||||
Meakerville,490138.261000,1206107.400000
|
||||
Medfra,-35960.063000,1459886.670000
|
||||
Mekoryuk,-665638.200000,1216921.550000
|
||||
Mendeltna,390029.514000,1362544.740000
|
||||
Mentasta Lake,515193.201000,1478634.130000
|
||||
Metlakatla,1403361.240000,807743.191000
|
||||
Meyers Chuck,1340206.260000,858006.353000
|
||||
Minto,218998.347000,1696173.840000
|
||||
Montana,204540.424000,1350938.670000
|
||||
Moose Pass,253523.866000,1175671.160000
|
||||
Morzhovoi,-594783.020000,585788.482000
|
||||
Moses Point,-382088.730000,1660736.280000
|
||||
Mountain Village,-504331.890000,1382831.880000
|
||||
Myers Chuck,1340298.780000,858143.701000
|
||||
Nabesna,564520.110000,1424380.750000
|
||||
Nakeen,-174232.420000,997318.892000
|
||||
Naknek,-173927.630000,974053.411000
|
||||
Nanwalek,118112.424000,1041907.340000
|
||||
Napaimiut,-248419.720000,1293499.840000
|
||||
Napakiak,-431676.830000,1216086.700000
|
||||
Napamiute,-247502.740000,1293244.480000
|
||||
Napaskiak,-421484.200000,1216147.640000
|
||||
Nash Harbor,-710224.670000,1204527.310000
|
||||
Nelchina,387913.546000,1362226.820000
|
||||
Nelson Lagoon,-447824.190000,689951.534000
|
||||
Nenana,234871.143000,1631085.700000
|
||||
New Knockhock,-564009.790000,1396786.650000
|
||||
New Stuyahok,-187079.990000,1055766.430000
|
||||
Newhalen,-50296.161000,1081318.560000
|
||||
Newtok,-571557.270000,1263564.420000
|
||||
Nightmute,-584925.860000,1213236.710000
|
||||
Nikiski,145268.887000,1186729.940000
|
||||
Nikolaevsk,132886.513000,1093522.670000
|
||||
Nikolai,-18943.145000,1449308.960000
|
||||
Nikolski,-994394.330000,436725.364000
|
||||
Nilikluguk,-604764.020000,1235876.180000
|
||||
Ninilchik,129361.286000,1120310.270000
|
||||
Noatak,-383179.910000,1982832.580000
|
||||
Nome,-544971.690000,1662325.060000
|
||||
Nondalton,-47053.709000,1109638.450000
|
||||
Noorvik,-309527.490000,1892041.470000
|
||||
North Pole,315996.379000,1659134.110000
|
||||
Northway,607059.788000,1498762.680000
|
||||
Northway Indian Village,605980.108000,1501030.250000
|
||||
Northway Junction,613137.087000,1506216.180000
|
||||
Nuiqsut,116305.348000,2251323.450000
|
||||
Nulato,-195370.130000,1645799.740000
|
||||
Nunapitchuk,-456237.760000,1241700.690000
|
||||
Nyac,-320025.070000,1239097.070000
|
||||
Ohogamiut,-415392.630000,1312250.390000
|
||||
Old Harbor,41977.437700,799917.890000
|
||||
Old Minto,228006.355000,1666785.020000
|
||||
Ophir,-126680.840000,1466342.980000
|
||||
Oscarville,-422533.550000,1218444.650000
|
||||
Osviak,-419498.550000,1003066.230000
|
||||
Ouzinkie,88843.182700,881195.198000
|
||||
Paimiut,-620536.320000,1357518.210000
|
||||
Palmer,258281.247000,1300715.820000
|
||||
Pastolik,-468719.890000,1480343.830000
|
||||
Pauloff Harbor,-563048.980000,530836.314000
|
||||
Paxson,428657.491000,1479458.460000
|
||||
Pedro Bay,-5487.761800,1088967.270000
|
||||
Pelican,1036491.210000,1023868.820000
|
||||
Pennock Island,1392614.880000,827411.365000
|
||||
Perryville,-322353.990000,668239.219000
|
||||
Petersburg,1261353.080000,957677.558000
|
||||
Petersville,165870.078000,1395365.010000
|
||||
Pilot Point,-213528.060000,845803.811000
|
||||
Pilot Station,-462994.240000,1360124.180000
|
||||
Pitkas Point,-482923.030000,1373465.640000
|
||||
Platinum,-446458.800000,1028233.640000
|
||||
Point Baker,1238228.890000,896329.697000
|
||||
Point Hope,-529220.710000,2094063.290000
|
||||
Point Lay,-354043.660000,2222365.580000
|
||||
Poorman,-75173.939000,1571416.280000
|
||||
Port Alexander,1181172.720000,866712.874000
|
||||
Port Alsworth,-17276.056000,1134979.000000
|
||||
Port Ashton,329730.294000,1133591.990000
|
||||
Port Graham,122993.642000,1041772.890000
|
||||
Port Heiden,-281761.920000,782075.151000
|
||||
Port Lions,66146.637600,874479.442000
|
||||
Port Moller,-409272.420000,684683.005000
|
||||
Port Protection,1240692.170000,892442.556000
|
||||
Portage,271761.876000,1216237.690000
|
||||
Portage Creek,-213239.020000,995330.040000
|
||||
Portlock,128615.684000,1026064.190000
|
||||
Prudhoe Bay,215806.928000,2259904.450000
|
||||
Quinhagak,-442331.230000,1110603.440000
|
||||
Rainbow,234735.298000,1232310.090000
|
||||
Rampart,177343.540000,1732437.710000
|
||||
Red Devil,-174242.630000,1313604.480000
|
||||
Red Dog,-372005.5485954737,2037279.2923727476
|
||||
Refuge Cove,1382978.600000,833378.147000
|
||||
Ruby,-70790.041000,1642785.980000
|
||||
Russian Mission,-384393.920000,1333219.000000
|
||||
Saint George,-944287.610000,843785.023000
|
||||
Saint Marys,-476314.520000,1374822.320000
|
||||
Saint Michael,-398836.570000,1525348.840000
|
||||
Saint Paul,-973759.930000,910613.341000
|
||||
Salamatof,145965.091000,1180959.600000
|
||||
Salt Chuck,1326571.610000,840121.357000
|
||||
Sanak,-570001.500000,534473.853000
|
||||
Sand Point,-411424.970000,612081.313000
|
||||
Savonoski,-165499.790000,972382.228000
|
||||
Savoonga,-805427.770000,1625359.430000
|
||||
Saxman,1394964.880000,827113.823000
|
||||
Scammon Bay,-604798.100000,1371265.400000
|
||||
Selawik,-266860.640000,1861675.770000
|
||||
Seldovia,129367.303000,1051675.210000
|
||||
Seward,252371.412000,1132619.480000
|
||||
Shageluk,-283728.600000,1424135.800000
|
||||
Shaktoolik,-346042.960000,1617280.740000
|
||||
Sheldon Point,-553926.810000,1440948.650000
|
||||
Shemya Station,-2079920.000000,810448.348000
|
||||
Shishmaref,-541068.090000,1860314.150000
|
||||
Shungnak,-137987.550000,1884415.000000
|
||||
Sitka,1115599.140000,940830.783000
|
||||
Skagway,1041919.730000,1199273.860000
|
||||
Skwentna,148534.951000,1335227.550000
|
||||
Slana,510327.800000,1453776.770000
|
||||
Slaterville,297526.602000,1668726.220000
|
||||
Sleetmute,-167049.630000,1306691.890000
|
||||
Soldotna,161063.090000,1170414.980000
|
||||
Solomon,-498112.740000,1661299.710000
|
||||
South Naknek,-173080.990000,972582.935000
|
||||
Spenard,218678.026000,1251920.770000
|
||||
Squaw Harbor,-416017.300000,601745.804000
|
||||
Stebbins,-410507.200000,1531765.880000
|
||||
Sterling,177198.901000,1176731.460000
|
||||
Stevens Village,223104.841000,1791395.870000
|
||||
Stony River,-136058.950000,1314369.800000
|
||||
Summit,243731.328000,1493515.490000
|
||||
Sunrise,247254.729000,1220542.040000
|
||||
Suntrana,252785.641000,1553470.440000
|
||||
Susitna,184716.430000,1289784.910000
|
||||
Sutton,268846.670000,1314039.750000
|
||||
Swale Bridge,114718.64380,2260712.27489
|
||||
Takotna,-104337.670000,1448116.580000
|
||||
Talkeetna,200934.644000,1378078.500000
|
||||
Tanacross,529229.992000,1533364.260000
|
||||
Tanana,90062.020800,1691520.090000
|
||||
Tanunak,-611657.570000,1229709.530000
|
||||
Tatitlek,395526.972000,1230826.480000
|
||||
Tazlina,446558.348000,1368283.470000
|
||||
Tee Harbor,1105523.730000,1095784.810000
|
||||
Telida,36585.624900,1490801.840000
|
||||
Teller,-574324.300000,1754040.060000
|
||||
Teller Mission,-578502.460000,1762735.690000
|
||||
Tenakee Springs,1099384.530000,1020736.530000
|
||||
Tetlin,567781.846000,1531353.630000
|
||||
Tetlin Junction,567259.424000,1531966.100000
|
||||
Thane,1134215.850000,1087422.830000
|
||||
Thompson Pass,442872.250000,1266131.440000
|
||||
Thorne Bay,1327899.990000,852272.734000
|
||||
Tin City,-635965.320000,1798902.480000
|
||||
Togiak,-363965.070000,1024882.760000
|
||||
Tok,547889.359000,1530921.350000
|
||||
Tokeen,1269819.890000,857820.610000
|
||||
Toksook Bay,-604434.160000,1222242.270000
|
||||
Tolovana,197915.766000,1661022.240000
|
||||
Tonsina,464585.462000,1328394.810000
|
||||
Tuluksak,-373374.120000,1255186.810000
|
||||
Tuntutuliak,-475612.340000,1181370.720000
|
||||
Tununak,-611659.180000,1229732.500000
|
||||
Twin Hills,-358011.980000,1026256.830000
|
||||
Tyonek,153739.83063,1235050.56184
|
||||
Uganik,34568.026700,853522.790000
|
||||
Ugashik,-202995.930000,839524.820000
|
||||
Ukivok,-659433.660000,1737442.240000
|
||||
Umiat,74442.855800,2156296.570000
|
||||
Umkumiute,-610238.790000,1219596.050000
|
||||
Umnak,-943787.410000,462831.010000
|
||||
Unalakleet,-332500.560000,1562316.350000
|
||||
Unalaska,-820776.410000,506472.655000
|
||||
Unga,-413708.500000,594748.185000
|
||||
Upper Kalskag,-333676.390000,1300069.800000
|
||||
Utukakarvik,-559629.410000,1377016.870000
|
||||
Uyak,720.982749,846905.381000
|
||||
Valdez,409872.010000,1262403.090000
|
||||
Venetie,331356.530000,1914056.000000
|
||||
Wainwright,-227853.550000,2304938.340000
|
||||
Wales,-645324.38185,1807986.89762
|
||||
Ward Cove,1383925.700000,833873.297000
|
||||
Wasilla,241189.832000,1297446.180000
|
||||
Whale Pass,1273632.470000,878090.102000
|
||||
White Mountain,-447244.450000,1667236.420000
|
||||
Whittier,288294.714000,1210290.910000
|
||||
Willow,208489.082000,1313925.330000
|
||||
Wiseman,167829.756000,1943993.260000
|
||||
Womens Bay,84046.882400,857214.783000
|
||||
Woody Island,97568.928100,865357.660000
|
||||
Wrangell,1307021.500000,932698.183000
|
||||
Yakutat,798126.272000,1147719.220000
|
|
60
tests/data/fbx1950-2009.csv
Executable file
60
tests/data/fbx1950-2009.csv
Executable file
|
@ -0,0 +1,60 @@
|
|||
-1.48, -24.34, 20.12, 30.74, 49.10, 59.54, 62.96, 57.92, 46.58, 22.28, -7.60, -4.18
|
||||
-23.26, -9.40, 2.48, 36.32, 50.90, 57.74, 61.34, 58.10, 46.76, 18.68, 9.68, -6.16
|
||||
-19.48, -4.54, 11.66, 29.84, 43.16, 59.00, 61.16, 54.14, 42.26, 27.14, 13.82, -1.84
|
||||
-22.54, 0.86, 10.40, 39.02, 52.88, 62.42, 63.14, 56.12, 44.96, 20.48, 0.50, -2.74
|
||||
-14.62, -18.40, 14.72, 28.22, 51.80, 59.36, 59.36, 56.84, 42.44, 27.86, 10.76, -19.84
|
||||
-3.10, -13.72, 12.92, 23.90, 47.48, 56.30, 62.42, 53.24, 43.34, 20.12, -11.38, -7.96
|
||||
-20.20, -12.28, 8.78, 33.08, 50.00, 58.28, 62.06, 55.94, 41.72, 13.64, -6.34, -25.96
|
||||
1.22, -4.54, 20.66, 34.34, 50.00, 64.76, 61.70, 59.54, 43.34, 25.70, 12.92, -14.08
|
||||
-3.10, -0.58, 18.68, 36.50, 49.82, 63.32, 63.50, 56.84, 42.80, 11.30, -0.22, -5.80
|
||||
-20.02, 4.10, -3.64, 27.86, 48.74, 62.06, 56.84, 54.50, 43.16, 20.48, 4.82, -8.14
|
||||
-4.36, 1.40, 7.16, 28.76, 54.50, 56.66, 62.96, 55.22, 41.00, 22.82, -2.74, 6.62
|
||||
-2.20, -6.34, 3.56, 27.14, 50.72, 59.36, 60.26, 55.40, 43.70, 17.60, -5.80, -22.36
|
||||
-7.78, 5.00, 10.76, 32.00, 46.22, 59.18, 64.40, 58.10, 41.18, 27.14, 2.66, -5.08
|
||||
1.04, -1.84, 10.58, 26.60, 50.72, 54.32, 61.16, 55.04, 47.48, 22.82, -12.28, 5.36
|
||||
-15.70, -1.12, 0.86, 27.68, 40.10, 60.62, 60.80, 56.48, 44.60, 24.80, 0.86, -25.06
|
||||
-18.04, -19.48, 27.50, 32.90, 44.42, 55.94, 61.34, 53.06, 47.84, 10.40, 1.94, -12.64
|
||||
-27.04, -9.22, 0.50, 28.94, 46.76, 63.86, 63.32, 57.02, 49.64, 20.48, -1.30, -18.22
|
||||
-15.34, -8.50, 12.74, 33.44, 47.12, 62.24, 60.80, 58.28, 46.22, 21.20, 7.70, -0.58
|
||||
-11.38, -6.52, 15.98, 31.10, 49.10, 60.08, 66.74, 58.64, 42.44, 18.50, 0.68, -16.24
|
||||
-26.68, -8.86, 13.10, 38.12, 50.90, 65.48, 60.26, 50.00, 48.74, 30.38, -0.58, 5.54
|
||||
-16.60, 6.26, 23.54, 33.80, 53.06, 58.46, 63.32, 57.02, 40.46, 15.08, 8.60, -8.68
|
||||
-31.54, -6.34, 2.48, 28.58, 48.74, 63.86, 62.06, 56.12, 44.24, 23.90, -1.48, -4.72
|
||||
-16.78, -12.10, 0.14, 22.46, 48.74, 59.90, 65.48, 59.00, 39.92, 23.18, 5.00, -1.48
|
||||
-18.40, -3.46, 14.36, 36.86, 51.98, 60.80, 62.96, 55.22, 46.94, 21.38, -2.74, -1.84
|
||||
-16.78, -19.66, 10.22, 36.50, 52.52, 59.18, 64.40, 59.18, 51.08, 17.78, -1.30, -10.12
|
||||
-15.88, -5.26, 15.26, 32.18, 54.68, 63.68, 69.08, 56.12, 45.50, 20.12, -9.76, -14.80
|
||||
-11.56, -15.52, 14.54, 37.58, 49.28, 60.08, 62.78, 59.18, 44.96, 20.30, 14.18, -2.56
|
||||
9.32, 6.80, 7.16, 29.84, 50.18, 60.08, 63.68, 62.60, 45.50, 22.10, -9.40, -13.54
|
||||
-0.22, 2.12, 16.70, 36.68, 51.44, 55.58, 64.40, 59.54, 46.40, 19.58, 6.80, 4.28
|
||||
-7.60, -26.86, 14.72, 32.72, 51.44, 58.10, 62.42, 60.62, 46.40, 28.76, 18.14, -9.04
|
||||
-9.76, 13.82, 19.94, 37.58, 52.16, 57.38, 62.06, 53.96, 42.62, 29.12, 9.50, -22.72
|
||||
17.78, 3.20, 29.66, 33.08, 52.88, 59.36, 57.74, 53.96, 43.70, 25.70, 10.04, -2.92
|
||||
-18.22, -5.80, 15.62, 29.30, 48.20, 59.18, 64.04, 56.66, 48.74, 14.72, 2.30, 3.38
|
||||
-11.56, 1.40, 16.16, 39.02, 51.62, 62.60, 64.94, 53.60, 40.64, 19.76, 6.62, -2.38
|
||||
-6.34, -15.34, 24.08, 32.00, 48.56, 62.06, 61.88, 53.78, 46.40, 22.10, -2.02, -2.02
|
||||
10.94, -11.38, 17.06, 22.64, 48.20, 58.28, 64.04, 56.30, 42.62, 15.26, -6.34, 9.14
|
||||
-2.20, 2.66, 8.60, 25.70, 49.10, 62.96, 64.40, 54.86, 45.86, 23.54, -1.84, 8.60
|
||||
0.32, -0.22, 15.98, 36.50, 52.16, 62.24, 64.94, 57.92, 43.70, 29.48, 4.10, -1.84
|
||||
-5.44, 1.94, 20.30, 35.42, 53.96, 63.32, 66.56, 58.46, 44.06, 14.00, -5.08, 5.54
|
||||
-21.64, 1.58, 9.32, 37.94, 49.28, 60.62, 65.48, 60.80, 48.20, 22.64, -8.86, 5.90
|
||||
-13.00, -23.44, 21.20, 39.92, 56.30, 62.06, 66.20, 59.90, 44.60, 20.48, -7.06, -5.08
|
||||
-4.72, -2.92, 14.54, 37.22, 52.70, 64.40, 61.52, 54.32, 47.66, 21.20, -1.30, -1.48
|
||||
-4.90, -10.30, 16.88, 27.86, 43.34, 60.62, 64.94, 56.66, 39.02, 13.82, 8.42, -6.34
|
||||
-4.00, 0.68, 19.94, 42.80, 55.04, 62.60, 66.56, 56.12, 43.70, 25.70, 6.44, 2.12
|
||||
-1.66, -8.14, 13.46, 36.50, 53.06, 59.00, 65.84, 59.72, 43.70, 17.60, -1.66, -7.06
|
||||
-9.40, -1.84, 6.62, 41.90, 54.86, 61.34, 64.04, 57.20, 52.34, 24.44, -4.54, -7.60
|
||||
-17.32, -5.62, 18.68, 34.88, 50.54, 60.26, 64.22, 53.60, 41.72, 9.50, -3.82, -12.10
|
||||
-16.42, 10.76, 7.52, 37.04, 50.90, 63.50, 65.66, 59.00, 49.64, 14.18, 8.96, -4.54
|
||||
-13.36, 0.50, 21.38, 40.46, 51.62, 59.72, 64.04, 53.60, 46.04, 22.46, 3.20, -4.00
|
||||
-16.96, -18.40, 10.94, 34.34, 48.56, 62.24, 62.60, 58.46, 45.14, 16.16, -6.16, -10.66
|
||||
-9.76, 5.54, 20.30, 33.98, 46.22, 61.88, 61.34, 52.34, 41.36, 19.04, 6.80, 1.76
|
||||
7.88, 5.36, 13.28, 35.60, 46.04, 62.06, 61.16, 57.74, 47.48, 19.04, -1.12, -8.86
|
||||
3.74, -2.38, 13.10, 25.34, 51.98, 59.00, 62.96, 54.86, 47.30, 28.22, 17.24, 6.44
|
||||
-3.46, 9.14, 11.66, 35.24, 48.92, 61.34, 62.24, 56.30, 41.72, 28.94, 7.16, -7.60
|
||||
-15.88, -0.58, 9.50, 36.86, 53.96, 67.64, 65.48, 62.42, 38.84, 26.24, 5.18, -2.20
|
||||
-8.86, -4.18, 23.00, 34.52, 57.20, 62.06, 63.32, 57.92, 46.40, 24.26, -6.70, 2.30
|
||||
-21.82, 3.92, 6.44, 31.82, 51.62, 59.18, 62.60, 55.04, 49.46, 28.40, -11.56, -0.58
|
||||
-6.88, -8.14, -3.28, 39.56, 52.70, 62.42, 65.48, 60.98, 46.94, 17.96, 9.86, -1.84
|
||||
-9.04, -6.70, 18.32, 32.36, 51.44, 60.80, 62.06, 55.22, 46.76, 12.56, -2.92, -6.34
|
||||
-11.38, -3.28, 9.14, 32.90, 53.42, 61.34, 67.64, 55.40, 48.20, 27.14, -2.38, -2.38
|
|
97
tests/test_backend_advanced.py
Executable file
97
tests/test_backend_advanced.py
Executable file
|
@ -0,0 +1,97 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Advanced tests for snapextract backend."""
|
||||
|
||||
import snapextract
|
||||
import nose
|
||||
from nose.tools import assert_equal
|
||||
import numpy as np
|
||||
from numpy.testing import assert_array_almost_equal
|
||||
import shutil
|
||||
import os
|
||||
|
||||
def test_extract_point_data_1c_59y():
|
||||
"""
|
||||
Similar to test_extract_point_data_1c_1y, except this test runs the
|
||||
whole range of years in a SNAP dataset.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_CRU_TS31_historical_1950_2009.zip'
|
||||
dataset = snapextract.GeoRefData(filename)
|
||||
# City,EASTING,NORTHING
|
||||
# Anchorage,214641.356000,1250935.040000
|
||||
northings = np.array([1250935.040000])
|
||||
eastings = np.array([214641.356000])
|
||||
startyr = 1950
|
||||
endyr = 2009
|
||||
# temps in F
|
||||
temps = np.loadtxt('tests/data/anc1950-2009.csv', delimiter=',')
|
||||
temps = np.reshape(temps, (1, 720))
|
||||
# temps in C
|
||||
temps = (temps - 32.0)*(5.0/9.0)
|
||||
extracted_temps = dataset.extract_points(northings, eastings,
|
||||
startyr, endyr)
|
||||
assert_array_almost_equal(extracted_temps['temperature'], temps)
|
||||
|
||||
def test_extract_point_data_2c_59y():
|
||||
"""
|
||||
Similar to test_extract_point_data_2c_1y, except this test runs the
|
||||
whole range of years in a SNAP dataset.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_CRU_TS31_historical_1950_2009.zip'
|
||||
dataset = snapextract.GeoRefData(filename)
|
||||
# City,EASTING,NORTHING
|
||||
# Anchorage,214641.356000,1250935.040000
|
||||
# Fairbanks,297703.529000,1667062.690000
|
||||
northings = np.array([1250935.040000, 1667062.690000])
|
||||
eastings = np.array([214641.356000, 297703.529000])
|
||||
startyr = 1950
|
||||
endyr = 2009
|
||||
# temps in F
|
||||
temps_anc = np.loadtxt('tests/data/anc1950-2009.csv', delimiter=',')
|
||||
temps_fbx = np.loadtxt('tests/data/fbx1950-2009.csv', delimiter=',')
|
||||
temps = np.zeros((2, 720))
|
||||
temps[0, :] = temps_anc.flatten()
|
||||
temps[1, :] = temps_fbx.flatten()
|
||||
# temps in C
|
||||
temps = (temps - 32.0)*(5.0/9.0)
|
||||
extracted_temps = dataset.extract_points(northings, eastings,
|
||||
startyr, endyr)
|
||||
assert_array_almost_equal(extracted_temps['temperature'], temps, decimal=3)
|
||||
|
||||
def test_raw_output_all_communities():
|
||||
"""
|
||||
Dumps *ALL* of the extracted points to disk.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_5modelAvg_sresb1_2001_2049.zip'
|
||||
dataset = snapextract.GeoRefData(filename)
|
||||
dt = np.dtype({'names':['community', 'northing', 'easting'],
|
||||
'formats':['S100', 'f8', 'f8']})
|
||||
community_file = 'tests/data/communities_dist.csv'
|
||||
communities, eastings, northings = np.loadtxt(community_file,
|
||||
skiprows=1, delimiter=',',
|
||||
unpack=True, dtype=dt)
|
||||
communities = communities.tolist()
|
||||
|
||||
startyr = 2001
|
||||
endyr = 2001
|
||||
extracted_temps = dataset.extract_points(northings, eastings,
|
||||
startyr, endyr)
|
||||
path = 'output/avg_monthly_temps/'
|
||||
snapextract.mkdir_p(path)
|
||||
shutil.rmtree(path)
|
||||
dataset.dump_raw_temperatures(communities, extracted_temps, path)
|
||||
file_list = os.listdir(path)
|
||||
communities.sort()
|
||||
file_list.sort()
|
||||
mismatches = []
|
||||
i = 0
|
||||
for item in communities:
|
||||
if item.decode('utf-8') != file_list[i].replace("_", " "):
|
||||
mismatches.append((item, file_list[i].replace("_", " ")))
|
||||
i += 1
|
||||
|
||||
assert_equal(len(mismatches), 0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
nose.main()
|
188
tests/test_backend_simple.py
Executable file
188
tests/test_backend_simple.py
Executable file
|
@ -0,0 +1,188 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Simple tests for snapextract backend.
|
||||
"""
|
||||
|
||||
import snapextract
|
||||
import nose
|
||||
from nose.tools import assert_equal
|
||||
import numpy as np
|
||||
from numpy.testing import assert_array_almost_equal
|
||||
import zipfile
|
||||
import os
|
||||
import shutil
|
||||
|
||||
def test_load_dataset():
|
||||
"""
|
||||
Test for zipfile module support.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_5modelAvg_sresa1b_2050_2100.zip'
|
||||
dataset = snapextract.SNAPDataSet(filename)
|
||||
assert_equal(isinstance(dataset.zip_data, zipfile.ZipFile), True)
|
||||
|
||||
def test_file_prefix():
|
||||
"""
|
||||
Test that zipfile prefix parsing is correct.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_5modelAvg_sresa1b_2050_2100.zip'
|
||||
dataset = snapextract.SNAPDataSet(filename)
|
||||
assert_equal(dataset.prefix, 'tas_mean_C_ar4_5modelAvg_sresa1b_')
|
||||
|
||||
def test_file_dir():
|
||||
"""
|
||||
Test that zipfile filenames are parsing correctly.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_5modelAvg_sresa1b_2050_2100.zip'
|
||||
dataset = snapextract.SNAPDataSet(filename)
|
||||
assert_equal(dataset.zip_dir, 'tas50_100/')
|
||||
|
||||
def test_load_geotiff_as_array():
|
||||
"""
|
||||
Check that geotiff file is correctly extracted from zipfile.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_5modelAvg_sresa1b_2050_2100.zip'
|
||||
dataset = snapextract.GeoRefData(filename)
|
||||
temperatures = dataset.read_geotiff_as_array(1, 2051)
|
||||
assert_equal(str(type(temperatures))[-15:-2], "numpy.ndarray")
|
||||
|
||||
def test_define_geotiff_params():
|
||||
"""
|
||||
Test that GDAL correctly determines geotiff parameters.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_5modelAvg_sresa1b_2050_2100.zip'
|
||||
dataset = snapextract.GeoRefData(filename)
|
||||
tiff_info = [dataset.cols, dataset.rows, dataset.bands, dataset.origin_x,
|
||||
dataset.origin_y, dataset.pixel_width, dataset.pixel_height]
|
||||
actual_tiff = [4762, 2557, 1, -2173225.118142955, 2381118.150470569,
|
||||
771.0, -770.9999999999999]
|
||||
assert_equal(tiff_info, actual_tiff)
|
||||
|
||||
def test_ne_to_indices():
|
||||
"""
|
||||
Test that conversion of Northings and Eastings to array indices works.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_5modelAvg_sresa1b_2050_2100.zip'
|
||||
dataset = snapextract.GeoRefData(filename)
|
||||
northings = np.array([1250935.040000])
|
||||
eastings = np.array([214641.356000])
|
||||
x_ind, y_ind = dataset.ne_to_indices(northings, eastings)
|
||||
assert_equal((x_ind, y_ind), (3097, 1465))
|
||||
|
||||
def test_indices_to_ne():
|
||||
"""
|
||||
Test that conversion of array indices to Northings and Eastings works.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_5modelAvg_sresa1b_2050_2100.zip'
|
||||
dataset = snapextract.GeoRefData(filename)
|
||||
x_ind = np.array([3097])
|
||||
y_ind = np.array([1465])
|
||||
northings, eastings = dataset.indices_to_ne(x_ind, y_ind)
|
||||
new_x, new_y = dataset.ne_to_indices(northings, eastings)
|
||||
assert_equal((x_ind, y_ind), (new_x, new_y))
|
||||
|
||||
def test_extract_point_data_1c_1y():
|
||||
"""
|
||||
Extract point temperatures from 1 city, 1 year.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_CRU_TS31_historical_1950_2009.zip'
|
||||
dataset = snapextract.GeoRefData(filename)
|
||||
# City,EASTING,NORTHING
|
||||
# Anchorage,214641.356000,1250935.040000
|
||||
northings = np.array([1250935.040000])
|
||||
eastings = np.array([214641.356000])
|
||||
startyr = 2009
|
||||
endyr = 2009
|
||||
# one city per row, one column per month
|
||||
# temps in F
|
||||
temps = np.array([15.08, 15.98, 24.26, 37.22, 51.08, 56.66,
|
||||
62.06, 57.74, 50.00, 38.48, 18.86, 21.20])
|
||||
# temps in C
|
||||
temps = (temps - 32.0) * (5.0 / 9.0)
|
||||
temps = temps.reshape(1, 12)
|
||||
extracted_temps = dataset.extract_points(northings, eastings,
|
||||
startyr, endyr)
|
||||
assert_array_almost_equal(extracted_temps['temperature'], temps)
|
||||
|
||||
def test_extract_point_data_2c_1y():
|
||||
"""
|
||||
Extract point temperatures from 2 cities, 1 year.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_CRU_TS31_historical_1950_2009.zip'
|
||||
dataset = snapextract.GeoRefData(filename)
|
||||
# City,EASTING,NORTHING
|
||||
# Anchorage,214641.356000,1250935.040000
|
||||
# Fairbanks,297703.529000,1667062.690000
|
||||
northings = np.array([1250935.040000, 1667062.690000])
|
||||
eastings = np.array([214641.356000, 297703.529000])
|
||||
startyr = 2009
|
||||
endyr = 2009
|
||||
# temps in F
|
||||
temps = np.array([[15.08, 15.98, 24.26, 37.22, 51.08, 56.66,
|
||||
62.06, 57.74, 50.00, 38.48, 18.86, 21.20],
|
||||
[-11.38, -3.28, 9.14, 32.90, 53.42, 61.34,
|
||||
67.64, 55.40, 48.20, 27.14, -2.38, -2.38]])
|
||||
# temps in C
|
||||
temps = (temps - 32.0) * (5.0 / 9.0)
|
||||
temps = temps.reshape(2, 12)
|
||||
extracted_temps = dataset.extract_points(northings, eastings,
|
||||
startyr, endyr)
|
||||
assert_array_almost_equal(extracted_temps['temperature'], temps)
|
||||
|
||||
def test_raw_output_simple():
|
||||
"""
|
||||
Dumps the extracted points for Fairbanks and Anchorage data to disk.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_5modelAvg_sresb1_2001_2049.zip'
|
||||
dataset = snapextract.GeoRefData(filename)
|
||||
# City,EASTING,NORTHING
|
||||
# Anchorage,214641.356000,1250935.040000
|
||||
# Fairbanks,297703.529000,1667062.690000
|
||||
communities = np.array(['Anchorage', 'Fairbanks'], dtype='S100')
|
||||
northings = np.array([1250935.040000, 1667062.690000])
|
||||
eastings = np.array([214641.356000, 297703.529000])
|
||||
startyr = 2001
|
||||
endyr = 2001
|
||||
extracted_temps = dataset.extract_points(northings, eastings,
|
||||
startyr, endyr)
|
||||
path = 'output/avg_monthly_temps/'
|
||||
snapextract.mkdir_p(path)
|
||||
shutil.rmtree(path)
|
||||
dataset.dump_raw_temperatures(communities, extracted_temps, path)
|
||||
file_list = os.listdir(path)
|
||||
assert_equal(file_list, ['Anchorage', 'Fairbanks'])
|
||||
|
||||
def test_raw_output_simple_from_index():
|
||||
"""
|
||||
Dumps the extracted points for Nigliq Channel data to disk.
|
||||
"""
|
||||
filename = 'raw_data/tas_AK_771m_5modelAvg_sresb1_2001_2049.zip'
|
||||
dataset = snapextract.GeoRefData(filename)
|
||||
# City,X_IND,Y_IND
|
||||
# Nigliq Channel,2967,156
|
||||
communities = np.array(['Nigliq Channel'], dtype='S100')
|
||||
x_ind = np.array([2967])
|
||||
y_ind = np.array([156])
|
||||
northings, eastings = dataset.indices_to_ne(x_ind, y_ind)
|
||||
startyr = 2001
|
||||
endyr = 2001
|
||||
extracted_temps = dataset.extract_points(northings, eastings,
|
||||
startyr, endyr)
|
||||
path = 'output/avg_monthly_temps/'
|
||||
snapextract.mkdir_p(path)
|
||||
shutil.rmtree(path)
|
||||
dataset.dump_raw_temperatures(communities, extracted_temps, path)
|
||||
file_list = os.listdir(path)
|
||||
assert_equal(file_list, ['Nigliq_Channel'])
|
||||
|
||||
def test_wgs84_to_ne():
|
||||
"""
|
||||
Check that conversion from WGS84 coordinates to SNAP NE works.
|
||||
"""
|
||||
latitude = 59.046667
|
||||
longitude = -158.508611
|
||||
easting, northing, elevation = snapextract.wgs84_to_ne(latitude,
|
||||
longitude)
|
||||
assert_equal((easting, northing), (-257669.0691295379, 1014443.6452589828))
|
||||
|
||||
if __name__ == '__main__':
|
||||
nose.main()
|
Loading…
Add table
Reference in a new issue